Linux Audio

Check our new training course

Loading...
v6.9.4
   1/*
   2 * Copyright 2007-11 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26
  27#include <linux/pci.h>
  28
  29#include <acpi/video.h>
  30
  31#include <drm/drm_edid.h>
  32#include <drm/amdgpu_drm.h>
  33#include "amdgpu.h"
  34#include "amdgpu_connectors.h"
  35#include "amdgpu_display.h"
  36#include "atom.h"
  37#include "atombios_encoders.h"
  38#include "atombios_dp.h"
  39#include <linux/backlight.h>
  40#include "bif/bif_4_1_d.h"
  41
  42u8
  43amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  44{
  45	u8 backlight_level;
  46	u32 bios_2_scratch;
  47
  48	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  49
  50	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  51			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  52
  53	return backlight_level;
  54}
  55
  56void
  57amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  58					    u8 backlight_level)
  59{
  60	u32 bios_2_scratch;
  61
  62	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  63
  64	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  65	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  66			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
  67
  68	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  69}
  70
  71u8
  72amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  73{
  74	struct drm_device *dev = amdgpu_encoder->base.dev;
  75	struct amdgpu_device *adev = drm_to_adev(dev);
  76
  77	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  78		return 0;
  79
  80	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  81}
  82
  83void
  84amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  85				     u8 level)
  86{
  87	struct drm_encoder *encoder = &amdgpu_encoder->base;
  88	struct drm_device *dev = amdgpu_encoder->base.dev;
  89	struct amdgpu_device *adev = drm_to_adev(dev);
  90	struct amdgpu_encoder_atom_dig *dig;
  91
  92	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  93		return;
  94
  95	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  96	    amdgpu_encoder->enc_priv) {
  97		dig = amdgpu_encoder->enc_priv;
  98		dig->backlight_level = level;
  99		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
 100
 101		switch (amdgpu_encoder->encoder_id) {
 102		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 103		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 104		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 105		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 106		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 107			if (dig->backlight_level == 0)
 108				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 109								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
 110			else {
 111				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 112								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
 113				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 114								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
 115			}
 116			break;
 117		default:
 118			break;
 119		}
 120	}
 121}
 122
 
 
 123static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
 124{
 125	u8 level;
 126
 127	/* Convert brightness to hardware level */
 128	if (bd->props.brightness < 0)
 129		level = 0;
 130	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
 131		level = AMDGPU_MAX_BL_LEVEL;
 132	else
 133		level = bd->props.brightness;
 134
 135	return level;
 136}
 137
 138static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
 139{
 140	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 141	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 142
 143	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
 144					     amdgpu_atombios_encoder_backlight_level(bd));
 145
 146	return 0;
 147}
 148
 149static int
 150amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
 151{
 152	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 153	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 154	struct drm_device *dev = amdgpu_encoder->base.dev;
 155	struct amdgpu_device *adev = drm_to_adev(dev);
 156
 157	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 158}
 159
 160static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
 161	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
 162	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
 163};
 164
 165void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
 166				     struct drm_connector *drm_connector)
 167{
 168	struct drm_device *dev = amdgpu_encoder->base.dev;
 169	struct amdgpu_device *adev = drm_to_adev(dev);
 170	struct backlight_device *bd;
 171	struct backlight_properties props;
 172	struct amdgpu_backlight_privdata *pdata;
 173	struct amdgpu_encoder_atom_dig *dig;
 
 174	char bl_name[16];
 175
 176	/* Mac laptops with multiple GPUs use the gmux driver for backlight
 177	 * so don't register a backlight device
 178	 */
 179	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
 180	    (adev->pdev->device == 0x6741))
 181		return;
 182
 183	if (!amdgpu_encoder->enc_priv)
 184		return;
 185
 186	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 187		goto register_acpi_backlight;
 188
 189	if (!acpi_video_backlight_use_native()) {
 190		drm_info(dev, "Skipping amdgpu atom DIG backlight registration\n");
 191		goto register_acpi_backlight;
 192	}
 193
 194	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
 195	if (!pdata) {
 196		DRM_ERROR("Memory allocation failed\n");
 197		goto error;
 198	}
 199
 200	memset(&props, 0, sizeof(props));
 201	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
 202	props.type = BACKLIGHT_RAW;
 203	snprintf(bl_name, sizeof(bl_name),
 204		 "amdgpu_bl%d", dev->primary->index);
 205	bd = backlight_device_register(bl_name, drm_connector->kdev,
 206				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
 207	if (IS_ERR(bd)) {
 208		DRM_ERROR("Backlight registration failed\n");
 209		goto error;
 210	}
 211
 212	pdata->encoder = amdgpu_encoder;
 213
 
 
 214	dig = amdgpu_encoder->enc_priv;
 215	dig->bl_dev = bd;
 216
 217	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
 218	bd->props.power = FB_BLANK_UNBLANK;
 219	backlight_update_status(bd);
 220
 221	DRM_INFO("amdgpu atom DIG backlight initialized\n");
 222
 223	return;
 224
 225error:
 226	kfree(pdata);
 227	return;
 228
 229register_acpi_backlight:
 230	/* Try registering an ACPI video backlight device instead. */
 231	acpi_video_register_backlight();
 232}
 233
 234void
 235amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
 236{
 237	struct drm_device *dev = amdgpu_encoder->base.dev;
 238	struct amdgpu_device *adev = drm_to_adev(dev);
 239	struct backlight_device *bd = NULL;
 240	struct amdgpu_encoder_atom_dig *dig;
 241
 242	if (!amdgpu_encoder->enc_priv)
 243		return;
 244
 245	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 246		return;
 247
 248	dig = amdgpu_encoder->enc_priv;
 249	bd = dig->bl_dev;
 250	dig->bl_dev = NULL;
 251
 252	if (bd) {
 253		struct amdgpu_legacy_backlight_privdata *pdata;
 254
 255		pdata = bl_get_data(bd);
 256		backlight_device_unregister(bd);
 257		kfree(pdata);
 258
 259		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
 260	}
 261}
 262
 
 
 
 
 
 
 
 
 
 
 
 
 263bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
 264{
 265	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 266	switch (amdgpu_encoder->encoder_id) {
 267	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 268	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 269	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 270	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 271	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 272		return true;
 273	default:
 274		return false;
 275	}
 276}
 277
 278bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
 279				 const struct drm_display_mode *mode,
 280				 struct drm_display_mode *adjusted_mode)
 281{
 282	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 283
 284	/* set the active encoder to connector routing */
 285	amdgpu_encoder_set_active_device(encoder);
 286	drm_mode_set_crtcinfo(adjusted_mode, 0);
 287
 288	/* hw bug */
 289	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
 290	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
 291		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
 292
 293	/* vertical FP must be at least 1 */
 294	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
 295		adjusted_mode->crtc_vsync_start++;
 296
 297	/* get the native mode for scaling */
 298	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
 299		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 300	else if (amdgpu_encoder->rmx_type != RMX_OFF)
 301		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 302
 303	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
 304	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
 305		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 306		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
 307	}
 308
 309	return true;
 310}
 311
 312static void
 313amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
 314{
 315	struct drm_device *dev = encoder->dev;
 316	struct amdgpu_device *adev = drm_to_adev(dev);
 317	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 318	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
 319	int index = 0;
 320
 321	memset(&args, 0, sizeof(args));
 322
 323	switch (amdgpu_encoder->encoder_id) {
 324	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
 325	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
 326		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
 327		break;
 328	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
 329	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
 330		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
 331		break;
 332	}
 333
 334	args.ucAction = action;
 335	args.ucDacStandard = ATOM_DAC1_PS2;
 336	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 337
 338	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
 339
 340}
 341
 342static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
 343{
 344	int bpc = 8;
 345
 346	if (encoder->crtc) {
 347		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 348		bpc = amdgpu_crtc->bpc;
 349	}
 350
 351	switch (bpc) {
 352	case 0:
 353		return PANEL_BPC_UNDEFINE;
 354	case 6:
 355		return PANEL_6BIT_PER_COLOR;
 356	case 8:
 357	default:
 358		return PANEL_8BIT_PER_COLOR;
 359	case 10:
 360		return PANEL_10BIT_PER_COLOR;
 361	case 12:
 362		return PANEL_12BIT_PER_COLOR;
 363	case 16:
 364		return PANEL_16BIT_PER_COLOR;
 365	}
 366}
 367
 368union dvo_encoder_control {
 369	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
 370	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
 371	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
 372	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
 373};
 374
 375static void
 376amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
 377{
 378	struct drm_device *dev = encoder->dev;
 379	struct amdgpu_device *adev = drm_to_adev(dev);
 380	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 381	union dvo_encoder_control args;
 382	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
 383	uint8_t frev, crev;
 384
 385	memset(&args, 0, sizeof(args));
 386
 387	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 388		return;
 389
 390	switch (frev) {
 391	case 1:
 392		switch (crev) {
 393		case 1:
 394			/* R4xx, R5xx */
 395			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
 396
 397			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 398				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
 399
 400			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
 401			break;
 402		case 2:
 403			/* RS600/690/740 */
 404			args.dvo.sDVOEncoder.ucAction = action;
 405			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 406			/* DFP1, CRT1, TV1 depending on the type of port */
 407			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
 408
 409			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 410				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
 411			break;
 412		case 3:
 413			/* R6xx */
 414			args.dvo_v3.ucAction = action;
 415			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 416			args.dvo_v3.ucDVOConfig = 0; /* XXX */
 417			break;
 418		case 4:
 419			/* DCE8 */
 420			args.dvo_v4.ucAction = action;
 421			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 422			args.dvo_v4.ucDVOConfig = 0; /* XXX */
 423			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 424			break;
 425		default:
 426			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 427			break;
 428		}
 429		break;
 430	default:
 431		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 432		break;
 433	}
 434
 435	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
 436}
 437
 438int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
 439{
 440	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 441	struct drm_connector *connector;
 442	struct amdgpu_connector *amdgpu_connector;
 443	struct amdgpu_connector_atom_dig *dig_connector;
 444
 445	/* dp bridges are always DP */
 446	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
 447		return ATOM_ENCODER_MODE_DP;
 448
 449	/* DVO is always DVO */
 450	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
 451	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
 452		return ATOM_ENCODER_MODE_DVO;
 453
 454	connector = amdgpu_get_connector_for_encoder(encoder);
 455	/* if we don't have an active device yet, just use one of
 456	 * the connectors tied to the encoder.
 457	 */
 458	if (!connector)
 459		connector = amdgpu_get_connector_for_encoder_init(encoder);
 460	amdgpu_connector = to_amdgpu_connector(connector);
 461
 462	switch (connector->connector_type) {
 463	case DRM_MODE_CONNECTOR_DVII:
 464	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
 465		if (amdgpu_audio != 0) {
 466			if (amdgpu_connector->use_digital &&
 467			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
 468				return ATOM_ENCODER_MODE_HDMI;
 469			else if (connector->display_info.is_hdmi &&
 470				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 471				return ATOM_ENCODER_MODE_HDMI;
 472			else if (amdgpu_connector->use_digital)
 473				return ATOM_ENCODER_MODE_DVI;
 474			else
 475				return ATOM_ENCODER_MODE_CRT;
 476		} else if (amdgpu_connector->use_digital) {
 477			return ATOM_ENCODER_MODE_DVI;
 478		} else {
 479			return ATOM_ENCODER_MODE_CRT;
 480		}
 481		break;
 482	case DRM_MODE_CONNECTOR_DVID:
 483	case DRM_MODE_CONNECTOR_HDMIA:
 484	default:
 485		if (amdgpu_audio != 0) {
 486			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 487				return ATOM_ENCODER_MODE_HDMI;
 488			else if (connector->display_info.is_hdmi &&
 489				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 490				return ATOM_ENCODER_MODE_HDMI;
 491			else
 492				return ATOM_ENCODER_MODE_DVI;
 493		} else {
 494			return ATOM_ENCODER_MODE_DVI;
 495		}
 
 496	case DRM_MODE_CONNECTOR_LVDS:
 497		return ATOM_ENCODER_MODE_LVDS;
 
 498	case DRM_MODE_CONNECTOR_DisplayPort:
 499		dig_connector = amdgpu_connector->con_priv;
 500		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
 501		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
 502			return ATOM_ENCODER_MODE_DP;
 503		} else if (amdgpu_audio != 0) {
 504			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 505				return ATOM_ENCODER_MODE_HDMI;
 506			else if (connector->display_info.is_hdmi &&
 507				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 508				return ATOM_ENCODER_MODE_HDMI;
 509			else
 510				return ATOM_ENCODER_MODE_DVI;
 511		} else {
 512			return ATOM_ENCODER_MODE_DVI;
 513		}
 
 514	case DRM_MODE_CONNECTOR_eDP:
 515		return ATOM_ENCODER_MODE_DP;
 516	case DRM_MODE_CONNECTOR_DVIA:
 517	case DRM_MODE_CONNECTOR_VGA:
 518		return ATOM_ENCODER_MODE_CRT;
 
 519	case DRM_MODE_CONNECTOR_Composite:
 520	case DRM_MODE_CONNECTOR_SVIDEO:
 521	case DRM_MODE_CONNECTOR_9PinDIN:
 522		/* fix me */
 523		return ATOM_ENCODER_MODE_TV;
 
 
 524	}
 525}
 526
 527/*
 528 * DIG Encoder/Transmitter Setup
 529 *
 530 * DCE 6.0
 531 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
 532 * Supports up to 6 digital outputs
 533 * - 6 DIG encoder blocks.
 534 * - DIG to PHY mapping is hardcoded
 535 * DIG1 drives UNIPHY0 link A, A+B
 536 * DIG2 drives UNIPHY0 link B
 537 * DIG3 drives UNIPHY1 link A, A+B
 538 * DIG4 drives UNIPHY1 link B
 539 * DIG5 drives UNIPHY2 link A, A+B
 540 * DIG6 drives UNIPHY2 link B
 541 *
 542 * Routing
 543 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
 544 * Examples:
 545 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
 546 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
 547 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
 548 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
 549 */
 550
 551union dig_encoder_control {
 552	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
 553	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
 554	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
 555	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
 556	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
 557};
 558
 559void
 560amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
 561				   int action, int panel_mode)
 562{
 563	struct drm_device *dev = encoder->dev;
 564	struct amdgpu_device *adev = drm_to_adev(dev);
 565	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 566	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 567	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 568	union dig_encoder_control args;
 569	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
 570	uint8_t frev, crev;
 571	int dp_clock = 0;
 572	int dp_lane_count = 0;
 573	int hpd_id = AMDGPU_HPD_NONE;
 574
 575	if (connector) {
 576		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 577		struct amdgpu_connector_atom_dig *dig_connector =
 578			amdgpu_connector->con_priv;
 579
 580		dp_clock = dig_connector->dp_clock;
 581		dp_lane_count = dig_connector->dp_lane_count;
 582		hpd_id = amdgpu_connector->hpd.hpd;
 583	}
 584
 585	/* no dig encoder assigned */
 586	if (dig->dig_encoder == -1)
 587		return;
 588
 589	memset(&args, 0, sizeof(args));
 590
 591	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 592		return;
 593
 594	switch (frev) {
 595	case 1:
 596		switch (crev) {
 597		case 1:
 598			args.v1.ucAction = action;
 599			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 600			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 601				args.v3.ucPanelMode = panel_mode;
 602			else
 603				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 604
 605			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
 606				args.v1.ucLaneNum = dp_lane_count;
 607			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 608				args.v1.ucLaneNum = 8;
 609			else
 610				args.v1.ucLaneNum = 4;
 611
 612			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
 613				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
 614			switch (amdgpu_encoder->encoder_id) {
 615			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 616				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
 617				break;
 618			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 619			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 620				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
 621				break;
 622			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 623				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
 624				break;
 625			}
 626			if (dig->linkb)
 627				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
 628			else
 629				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
 630			break;
 631		case 2:
 632		case 3:
 633			args.v3.ucAction = action;
 634			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 635			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 636				args.v3.ucPanelMode = panel_mode;
 637			else
 638				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 639
 640			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
 641				args.v3.ucLaneNum = dp_lane_count;
 642			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 643				args.v3.ucLaneNum = 8;
 644			else
 645				args.v3.ucLaneNum = 4;
 646
 647			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
 648				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
 649			args.v3.acConfig.ucDigSel = dig->dig_encoder;
 650			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 651			break;
 652		case 4:
 653			args.v4.ucAction = action;
 654			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 655			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 656				args.v4.ucPanelMode = panel_mode;
 657			else
 658				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 659
 660			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
 661				args.v4.ucLaneNum = dp_lane_count;
 662			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 663				args.v4.ucLaneNum = 8;
 664			else
 665				args.v4.ucLaneNum = 4;
 666
 667			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
 668				if (dp_clock == 540000)
 669					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
 670				else if (dp_clock == 324000)
 671					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
 672				else if (dp_clock == 270000)
 673					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
 674				else
 675					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
 676			}
 677			args.v4.acConfig.ucDigSel = dig->dig_encoder;
 678			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 679			if (hpd_id == AMDGPU_HPD_NONE)
 680				args.v4.ucHPD_ID = 0;
 681			else
 682				args.v4.ucHPD_ID = hpd_id + 1;
 683			break;
 684		case 5:
 685			switch (action) {
 686			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
 687				args.v5.asDPPanelModeParam.ucAction = action;
 688				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
 689				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
 690				break;
 691			case ATOM_ENCODER_CMD_STREAM_SETUP:
 692				args.v5.asStreamParam.ucAction = action;
 693				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
 694				args.v5.asStreamParam.ucDigMode =
 695					amdgpu_atombios_encoder_get_encoder_mode(encoder);
 696				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
 697					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
 698				else if (amdgpu_dig_monitor_is_duallink(encoder,
 699									amdgpu_encoder->pixel_clock))
 700					args.v5.asStreamParam.ucLaneNum = 8;
 701				else
 702					args.v5.asStreamParam.ucLaneNum = 4;
 703				args.v5.asStreamParam.ulPixelClock =
 704					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
 705				args.v5.asStreamParam.ucBitPerColor =
 706					amdgpu_atombios_encoder_get_bpc(encoder);
 707				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
 708				break;
 709			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
 710			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
 711			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
 712			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
 713			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
 714			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
 715			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
 716			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
 717				args.v5.asCmdParam.ucAction = action;
 718				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
 719				break;
 720			default:
 721				DRM_ERROR("Unsupported action 0x%x\n", action);
 722				break;
 723			}
 724			break;
 725		default:
 726			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 727			break;
 728		}
 729		break;
 730	default:
 731		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 732		break;
 733	}
 734
 735	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
 736
 737}
 738
 739union dig_transmitter_control {
 740	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
 741	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
 742	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
 743	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
 744	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
 745	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
 746};
 747
 748void
 749amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
 750					      uint8_t lane_num, uint8_t lane_set)
 751{
 752	struct drm_device *dev = encoder->dev;
 753	struct amdgpu_device *adev = drm_to_adev(dev);
 754	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 755	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 756	struct drm_connector *connector;
 757	union dig_transmitter_control args;
 758	int index = 0;
 759	uint8_t frev, crev;
 760	bool is_dp = false;
 761	int pll_id = 0;
 762	int dp_clock = 0;
 763	int dp_lane_count = 0;
 764	int connector_object_id = 0;
 
 765	int dig_encoder = dig->dig_encoder;
 766	int hpd_id = AMDGPU_HPD_NONE;
 767
 768	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 769		connector = amdgpu_get_connector_for_encoder_init(encoder);
 770		/* just needed to avoid bailing in the encoder check.  the encoder
 771		 * isn't used for init
 772		 */
 773		dig_encoder = 0;
 774	} else
 775		connector = amdgpu_get_connector_for_encoder(encoder);
 776
 777	if (connector) {
 778		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 779		struct amdgpu_connector_atom_dig *dig_connector =
 780			amdgpu_connector->con_priv;
 781
 782		hpd_id = amdgpu_connector->hpd.hpd;
 783		dp_clock = dig_connector->dp_clock;
 784		dp_lane_count = dig_connector->dp_lane_count;
 785		connector_object_id =
 786			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
 787	}
 788
 789	if (encoder->crtc) {
 790		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 791		pll_id = amdgpu_crtc->pll_id;
 792	}
 793
 794	/* no dig encoder assigned */
 795	if (dig_encoder == -1)
 796		return;
 797
 798	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
 799		is_dp = true;
 800
 801	memset(&args, 0, sizeof(args));
 802
 803	switch (amdgpu_encoder->encoder_id) {
 804	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 805		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
 806		break;
 807	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 808	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 809	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 810	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 811		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
 812		break;
 813	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 814		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
 815		break;
 816	}
 817
 818	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 819		return;
 820
 821	switch (frev) {
 822	case 1:
 823		switch (crev) {
 824		case 1:
 825			args.v1.ucAction = action;
 826			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 827				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
 828			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 829				args.v1.asMode.ucLaneSel = lane_num;
 830				args.v1.asMode.ucLaneSet = lane_set;
 831			} else {
 832				if (is_dp)
 833					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
 834				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 835					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 836				else
 837					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 838			}
 839
 840			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
 841
 842			if (dig_encoder)
 843				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
 844			else
 845				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
 846
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 847			if (dig->linkb)
 848				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
 849			else
 850				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
 851
 852			if (is_dp)
 853				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 854			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 855				if (dig->coherent_mode)
 856					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 857				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 858					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
 859			}
 860			break;
 861		case 2:
 862			args.v2.ucAction = action;
 863			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 864				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
 865			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 866				args.v2.asMode.ucLaneSel = lane_num;
 867				args.v2.asMode.ucLaneSet = lane_set;
 868			} else {
 869				if (is_dp)
 870					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
 871				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 872					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 873				else
 874					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 875			}
 876
 877			args.v2.acConfig.ucEncoderSel = dig_encoder;
 878			if (dig->linkb)
 879				args.v2.acConfig.ucLinkSel = 1;
 880
 881			switch (amdgpu_encoder->encoder_id) {
 882			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 883				args.v2.acConfig.ucTransmitterSel = 0;
 884				break;
 885			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 886				args.v2.acConfig.ucTransmitterSel = 1;
 887				break;
 888			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 889				args.v2.acConfig.ucTransmitterSel = 2;
 890				break;
 891			}
 892
 893			if (is_dp) {
 894				args.v2.acConfig.fCoherentMode = 1;
 895				args.v2.acConfig.fDPConnector = 1;
 896			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 897				if (dig->coherent_mode)
 898					args.v2.acConfig.fCoherentMode = 1;
 899				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 900					args.v2.acConfig.fDualLinkConnector = 1;
 901			}
 902			break;
 903		case 3:
 904			args.v3.ucAction = action;
 905			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 906				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
 907			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 908				args.v3.asMode.ucLaneSel = lane_num;
 909				args.v3.asMode.ucLaneSet = lane_set;
 910			} else {
 911				if (is_dp)
 912					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
 913				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 914					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 915				else
 916					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 917			}
 918
 919			if (is_dp)
 920				args.v3.ucLaneNum = dp_lane_count;
 921			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 922				args.v3.ucLaneNum = 8;
 923			else
 924				args.v3.ucLaneNum = 4;
 925
 926			if (dig->linkb)
 927				args.v3.acConfig.ucLinkSel = 1;
 928			if (dig_encoder & 1)
 929				args.v3.acConfig.ucEncoderSel = 1;
 930
 931			/* Select the PLL for the PHY
 932			 * DP PHY should be clocked from external src if there is
 933			 * one.
 934			 */
 935			/* On DCE4, if there is an external clock, it generates the DP ref clock */
 936			if (is_dp && adev->clock.dp_extclk)
 937				args.v3.acConfig.ucRefClkSource = 2; /* external src */
 938			else
 939				args.v3.acConfig.ucRefClkSource = pll_id;
 940
 941			switch (amdgpu_encoder->encoder_id) {
 942			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 943				args.v3.acConfig.ucTransmitterSel = 0;
 944				break;
 945			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 946				args.v3.acConfig.ucTransmitterSel = 1;
 947				break;
 948			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 949				args.v3.acConfig.ucTransmitterSel = 2;
 950				break;
 951			}
 952
 953			if (is_dp)
 954				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
 955			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 956				if (dig->coherent_mode)
 957					args.v3.acConfig.fCoherentMode = 1;
 958				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 959					args.v3.acConfig.fDualLinkConnector = 1;
 960			}
 961			break;
 962		case 4:
 963			args.v4.ucAction = action;
 964			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 965				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
 966			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 967				args.v4.asMode.ucLaneSel = lane_num;
 968				args.v4.asMode.ucLaneSet = lane_set;
 969			} else {
 970				if (is_dp)
 971					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
 972				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 973					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 974				else
 975					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 976			}
 977
 978			if (is_dp)
 979				args.v4.ucLaneNum = dp_lane_count;
 980			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 981				args.v4.ucLaneNum = 8;
 982			else
 983				args.v4.ucLaneNum = 4;
 984
 985			if (dig->linkb)
 986				args.v4.acConfig.ucLinkSel = 1;
 987			if (dig_encoder & 1)
 988				args.v4.acConfig.ucEncoderSel = 1;
 989
 990			/* Select the PLL for the PHY
 991			 * DP PHY should be clocked from external src if there is
 992			 * one.
 993			 */
 994			/* On DCE5 DCPLL usually generates the DP ref clock */
 995			if (is_dp) {
 996				if (adev->clock.dp_extclk)
 997					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
 998				else
 999					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1000			} else
1001				args.v4.acConfig.ucRefClkSource = pll_id;
1002
1003			switch (amdgpu_encoder->encoder_id) {
1004			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1005				args.v4.acConfig.ucTransmitterSel = 0;
1006				break;
1007			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1008				args.v4.acConfig.ucTransmitterSel = 1;
1009				break;
1010			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1011				args.v4.acConfig.ucTransmitterSel = 2;
1012				break;
1013			}
1014
1015			if (is_dp)
1016				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1017			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1018				if (dig->coherent_mode)
1019					args.v4.acConfig.fCoherentMode = 1;
1020				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1021					args.v4.acConfig.fDualLinkConnector = 1;
1022			}
1023			break;
1024		case 5:
1025			args.v5.ucAction = action;
1026			if (is_dp)
1027				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1028			else
1029				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1030
1031			switch (amdgpu_encoder->encoder_id) {
1032			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1033				if (dig->linkb)
1034					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1035				else
1036					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1037				break;
1038			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1039				if (dig->linkb)
1040					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1041				else
1042					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1043				break;
1044			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1045				if (dig->linkb)
1046					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1047				else
1048					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1049				break;
1050			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1051				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1052				break;
1053			}
1054			if (is_dp)
1055				args.v5.ucLaneNum = dp_lane_count;
1056			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1057				args.v5.ucLaneNum = 8;
1058			else
1059				args.v5.ucLaneNum = 4;
1060			args.v5.ucConnObjId = connector_object_id;
1061			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1062
1063			if (is_dp && adev->clock.dp_extclk)
1064				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1065			else
1066				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1067
1068			if (is_dp)
1069				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1070			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1071				if (dig->coherent_mode)
1072					args.v5.asConfig.ucCoherentMode = 1;
1073			}
1074			if (hpd_id == AMDGPU_HPD_NONE)
1075				args.v5.asConfig.ucHPDSel = 0;
1076			else
1077				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1078			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1079			args.v5.ucDPLaneSet = lane_set;
1080			break;
1081		case 6:
1082			args.v6.ucAction = action;
1083			if (is_dp)
1084				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1085			else
1086				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1087
1088			switch (amdgpu_encoder->encoder_id) {
1089			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1090				if (dig->linkb)
1091					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1092				else
1093					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1094				break;
1095			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1096				if (dig->linkb)
1097					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1098				else
1099					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1100				break;
1101			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1102				if (dig->linkb)
1103					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1104				else
1105					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1106				break;
1107			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1108				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1109				break;
1110			}
1111			if (is_dp)
1112				args.v6.ucLaneNum = dp_lane_count;
1113			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1114				args.v6.ucLaneNum = 8;
1115			else
1116				args.v6.ucLaneNum = 4;
1117			args.v6.ucConnObjId = connector_object_id;
1118			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1119				args.v6.ucDPLaneSet = lane_set;
1120			else
1121				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1122
1123			if (hpd_id == AMDGPU_HPD_NONE)
1124				args.v6.ucHPDSel = 0;
1125			else
1126				args.v6.ucHPDSel = hpd_id + 1;
1127			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1128			break;
1129		default:
1130			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1131			break;
1132		}
1133		break;
1134	default:
1135		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1136		break;
1137	}
1138
1139	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1140}
1141
1142bool
1143amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1144				     int action)
1145{
1146	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1147	struct drm_device *dev = amdgpu_connector->base.dev;
1148	struct amdgpu_device *adev = drm_to_adev(dev);
1149	union dig_transmitter_control args;
1150	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1151	uint8_t frev, crev;
1152
1153	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1154		goto done;
1155
1156	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1157	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1158		goto done;
1159
1160	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1161		goto done;
1162
1163	memset(&args, 0, sizeof(args));
1164
1165	args.v1.ucAction = action;
1166
1167	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1168
1169	/* wait for the panel to power up */
1170	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1171		int i;
1172
1173		for (i = 0; i < 300; i++) {
1174			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1175				return true;
1176			mdelay(1);
1177		}
1178		return false;
1179	}
1180done:
1181	return true;
1182}
1183
1184union external_encoder_control {
1185	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1186	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1187};
1188
1189static void
1190amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1191					struct drm_encoder *ext_encoder,
1192					int action)
1193{
1194	struct drm_device *dev = encoder->dev;
1195	struct amdgpu_device *adev = drm_to_adev(dev);
1196	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1197	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1198	union external_encoder_control args;
1199	struct drm_connector *connector;
1200	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1201	u8 frev, crev;
1202	int dp_clock = 0;
1203	int dp_lane_count = 0;
1204	int connector_object_id = 0;
1205	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1206
1207	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1208		connector = amdgpu_get_connector_for_encoder_init(encoder);
1209	else
1210		connector = amdgpu_get_connector_for_encoder(encoder);
1211
1212	if (connector) {
1213		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1214		struct amdgpu_connector_atom_dig *dig_connector =
1215			amdgpu_connector->con_priv;
1216
1217		dp_clock = dig_connector->dp_clock;
1218		dp_lane_count = dig_connector->dp_lane_count;
1219		connector_object_id =
1220			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1221	}
1222
1223	memset(&args, 0, sizeof(args));
1224
1225	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1226		return;
1227
1228	switch (frev) {
1229	case 1:
1230		/* no params on frev 1 */
1231		break;
1232	case 2:
1233		switch (crev) {
1234		case 1:
1235		case 2:
1236			args.v1.sDigEncoder.ucAction = action;
1237			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1238			args.v1.sDigEncoder.ucEncoderMode =
1239				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1240
1241			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1242				if (dp_clock == 270000)
1243					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1244				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1245			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1246				args.v1.sDigEncoder.ucLaneNum = 8;
1247			else
1248				args.v1.sDigEncoder.ucLaneNum = 4;
1249			break;
1250		case 3:
1251			args.v3.sExtEncoder.ucAction = action;
1252			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1253				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1254			else
1255				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1256			args.v3.sExtEncoder.ucEncoderMode =
1257				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1258
1259			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1260				if (dp_clock == 270000)
1261					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1262				else if (dp_clock == 540000)
1263					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1264				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1265			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1266				args.v3.sExtEncoder.ucLaneNum = 8;
1267			else
1268				args.v3.sExtEncoder.ucLaneNum = 4;
1269			switch (ext_enum) {
1270			case GRAPH_OBJECT_ENUM_ID1:
1271				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1272				break;
1273			case GRAPH_OBJECT_ENUM_ID2:
1274				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1275				break;
1276			case GRAPH_OBJECT_ENUM_ID3:
1277				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1278				break;
1279			}
1280			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1281			break;
1282		default:
1283			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1284			return;
1285		}
1286		break;
1287	default:
1288		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1289		return;
1290	}
1291	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1292}
1293
1294static void
1295amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1296{
1297	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1298	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1299	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1300	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1301	struct amdgpu_connector *amdgpu_connector = NULL;
1302	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1303
1304	if (connector) {
1305		amdgpu_connector = to_amdgpu_connector(connector);
1306		amdgpu_dig_connector = amdgpu_connector->con_priv;
1307	}
1308
1309	if (action == ATOM_ENABLE) {
1310		if (!connector)
1311			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1312		else
1313			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1314
1315		/* setup and enable the encoder */
1316		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1317		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1318						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1319						   dig->panel_mode);
1320		if (ext_encoder)
1321			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1322								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1323		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1324		    connector) {
1325			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1326				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1327								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1328				amdgpu_dig_connector->edp_on = true;
1329			}
1330		}
1331		/* enable the transmitter */
1332		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1333						       ATOM_TRANSMITTER_ACTION_ENABLE,
1334						       0, 0);
1335		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1336		    connector) {
1337			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1338			amdgpu_atombios_dp_link_train(encoder, connector);
1339			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1340		}
1341		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1342			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1343		if (ext_encoder)
1344			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1345	} else {
1346		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1347		    connector)
1348			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1349							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1350		if (ext_encoder)
1351			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1352		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1353			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1354							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1355
1356		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1357		    connector)
1358			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1359		/* disable the transmitter */
1360		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1361						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1362		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1363		    connector) {
1364			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1365				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1366								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1367				amdgpu_dig_connector->edp_on = false;
1368			}
1369		}
1370	}
1371}
1372
1373void
1374amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1375{
1376	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1377
1378	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1379		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1380		  amdgpu_encoder->active_device);
1381	switch (amdgpu_encoder->encoder_id) {
1382	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1383	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1384	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1385	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1386		switch (mode) {
1387		case DRM_MODE_DPMS_ON:
1388			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1389			break;
1390		case DRM_MODE_DPMS_STANDBY:
1391		case DRM_MODE_DPMS_SUSPEND:
1392		case DRM_MODE_DPMS_OFF:
1393			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1394			break;
1395		}
1396		break;
1397	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1398		switch (mode) {
1399		case DRM_MODE_DPMS_ON:
1400			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1401			break;
1402		case DRM_MODE_DPMS_STANDBY:
1403		case DRM_MODE_DPMS_SUSPEND:
1404		case DRM_MODE_DPMS_OFF:
1405			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1406			break;
1407		}
1408		break;
1409	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1410		switch (mode) {
1411		case DRM_MODE_DPMS_ON:
1412			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1413			break;
1414		case DRM_MODE_DPMS_STANDBY:
1415		case DRM_MODE_DPMS_SUSPEND:
1416		case DRM_MODE_DPMS_OFF:
1417			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1418			break;
1419		}
1420		break;
1421	default:
1422		return;
1423	}
1424}
1425
1426union crtc_source_param {
1427	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1428	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1429	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1430};
1431
1432void
1433amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1434{
1435	struct drm_device *dev = encoder->dev;
1436	struct amdgpu_device *adev = drm_to_adev(dev);
1437	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1438	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1439	union crtc_source_param args;
1440	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1441	uint8_t frev, crev;
1442	struct amdgpu_encoder_atom_dig *dig;
1443
1444	memset(&args, 0, sizeof(args));
1445
1446	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1447		return;
1448
1449	switch (frev) {
1450	case 1:
1451		switch (crev) {
1452		case 1:
1453		default:
1454			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1455			switch (amdgpu_encoder->encoder_id) {
1456			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1457			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1458				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1459				break;
1460			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1461			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1462				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1463					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1464				else
1465					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1466				break;
1467			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1468			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1469			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1470				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1471				break;
1472			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1473			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1474				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1475					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1476				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1477					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1478				else
1479					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1480				break;
1481			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1482			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1483				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1484					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1485				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1486					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1487				else
1488					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1489				break;
1490			}
1491			break;
1492		case 2:
1493			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1494			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1495				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1496
1497				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1498					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1499				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1500					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1501				else
1502					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1503			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1504				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1505			} else {
1506				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1507			}
1508			switch (amdgpu_encoder->encoder_id) {
1509			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1510			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1511			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1512			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1513			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1514				dig = amdgpu_encoder->enc_priv;
1515				switch (dig->dig_encoder) {
1516				case 0:
1517					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1518					break;
1519				case 1:
1520					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1521					break;
1522				case 2:
1523					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1524					break;
1525				case 3:
1526					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1527					break;
1528				case 4:
1529					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1530					break;
1531				case 5:
1532					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1533					break;
1534				case 6:
1535					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1536					break;
1537				}
1538				break;
1539			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1540				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1541				break;
1542			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1543				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1544					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1545				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1546					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1547				else
1548					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1549				break;
1550			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1551				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1552					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1553				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1554					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1555				else
1556					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1557				break;
1558			}
1559			break;
1560		case 3:
1561			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1562			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1563				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1564
1565				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1566					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1567				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1568					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1569				else
1570					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1571			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1572				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1573			} else {
1574				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1575			}
1576			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1577			switch (amdgpu_encoder->encoder_id) {
1578			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1579			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1580			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1581			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1582			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1583				dig = amdgpu_encoder->enc_priv;
1584				switch (dig->dig_encoder) {
1585				case 0:
1586					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1587					break;
1588				case 1:
1589					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1590					break;
1591				case 2:
1592					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1593					break;
1594				case 3:
1595					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1596					break;
1597				case 4:
1598					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1599					break;
1600				case 5:
1601					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1602					break;
1603				case 6:
1604					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1605					break;
1606				}
1607				break;
1608			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1609				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1610				break;
1611			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1612				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1613					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1614				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1615					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1616				else
1617					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1618				break;
1619			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1620				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1621					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1622				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1623					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1624				else
1625					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1626				break;
1627			}
1628			break;
1629		}
1630		break;
1631	default:
1632		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1633		return;
1634	}
1635
1636	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1637}
1638
1639/* This only needs to be called once at startup */
1640void
1641amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1642{
1643	struct drm_device *dev = adev_to_drm(adev);
1644	struct drm_encoder *encoder;
1645
1646	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1647		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1648		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1649
1650		switch (amdgpu_encoder->encoder_id) {
1651		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1652		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1653		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1654		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1655			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1656							       0, 0);
1657			break;
1658		}
1659
1660		if (ext_encoder)
1661			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1662								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1663	}
1664}
1665
1666static bool
1667amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1668				 struct drm_connector *connector)
1669{
1670	struct drm_device *dev = encoder->dev;
1671	struct amdgpu_device *adev = drm_to_adev(dev);
1672	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1673	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1674
1675	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1676				       ATOM_DEVICE_CV_SUPPORT |
1677				       ATOM_DEVICE_CRT_SUPPORT)) {
1678		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1679		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1680		uint8_t frev, crev;
1681
1682		memset(&args, 0, sizeof(args));
1683
1684		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1685			return false;
1686
1687		args.sDacload.ucMisc = 0;
1688
1689		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1690		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1691			args.sDacload.ucDacType = ATOM_DAC_A;
1692		else
1693			args.sDacload.ucDacType = ATOM_DAC_B;
1694
1695		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1696			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1697		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1698			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1699		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1700			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1701			if (crev >= 3)
1702				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1703		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1704			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1705			if (crev >= 3)
1706				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1707		}
1708
1709		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args, sizeof(args));
1710
1711		return true;
1712	} else
1713		return false;
1714}
1715
1716enum drm_connector_status
1717amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1718			    struct drm_connector *connector)
1719{
1720	struct drm_device *dev = encoder->dev;
1721	struct amdgpu_device *adev = drm_to_adev(dev);
1722	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1723	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1724	uint32_t bios_0_scratch;
1725
1726	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1727		DRM_DEBUG_KMS("detect returned false \n");
1728		return connector_status_unknown;
1729	}
1730
1731	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1732
1733	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1734	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1735		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1736			return connector_status_connected;
1737	}
1738	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1739		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1740			return connector_status_connected;
1741	}
1742	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1743		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1744			return connector_status_connected;
1745	}
1746	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1747		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1748			return connector_status_connected; /* CTV */
1749		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1750			return connector_status_connected; /* STV */
1751	}
1752	return connector_status_disconnected;
1753}
1754
1755enum drm_connector_status
1756amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1757			    struct drm_connector *connector)
1758{
1759	struct drm_device *dev = encoder->dev;
1760	struct amdgpu_device *adev = drm_to_adev(dev);
1761	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1762	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1763	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1764	u32 bios_0_scratch;
1765
1766	if (!ext_encoder)
1767		return connector_status_unknown;
1768
1769	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1770		return connector_status_unknown;
1771
1772	/* load detect on the dp bridge */
1773	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1774						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1775
1776	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1777
1778	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1779	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1780		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1781			return connector_status_connected;
1782	}
1783	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1784		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1785			return connector_status_connected;
1786	}
1787	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1788		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1789			return connector_status_connected;
1790	}
1791	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1792		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1793			return connector_status_connected; /* CTV */
1794		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1795			return connector_status_connected; /* STV */
1796	}
1797	return connector_status_disconnected;
1798}
1799
1800void
1801amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1802{
1803	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1804
1805	if (ext_encoder)
1806		/* ddc_setup on the dp bridge */
1807		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1808							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1809
1810}
1811
1812void
1813amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1814				       struct drm_encoder *encoder,
1815				       bool connected)
1816{
1817	struct drm_device *dev = connector->dev;
1818	struct amdgpu_device *adev = drm_to_adev(dev);
1819	struct amdgpu_connector *amdgpu_connector =
1820	    to_amdgpu_connector(connector);
1821	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1822	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1823
1824	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1825	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1826	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1827
1828	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1829	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1830		if (connected) {
1831			DRM_DEBUG_KMS("LCD1 connected\n");
1832			bios_0_scratch |= ATOM_S0_LCD1;
1833			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1834			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1835		} else {
1836			DRM_DEBUG_KMS("LCD1 disconnected\n");
1837			bios_0_scratch &= ~ATOM_S0_LCD1;
1838			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1839			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1840		}
1841	}
1842	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1843	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1844		if (connected) {
1845			DRM_DEBUG_KMS("CRT1 connected\n");
1846			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1847			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1848			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1849		} else {
1850			DRM_DEBUG_KMS("CRT1 disconnected\n");
1851			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1852			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1853			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1854		}
1855	}
1856	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1857	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1858		if (connected) {
1859			DRM_DEBUG_KMS("CRT2 connected\n");
1860			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1861			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1862			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1863		} else {
1864			DRM_DEBUG_KMS("CRT2 disconnected\n");
1865			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1866			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1867			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1868		}
1869	}
1870	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1871	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1872		if (connected) {
1873			DRM_DEBUG_KMS("DFP1 connected\n");
1874			bios_0_scratch |= ATOM_S0_DFP1;
1875			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1876			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1877		} else {
1878			DRM_DEBUG_KMS("DFP1 disconnected\n");
1879			bios_0_scratch &= ~ATOM_S0_DFP1;
1880			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1881			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1882		}
1883	}
1884	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1885	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1886		if (connected) {
1887			DRM_DEBUG_KMS("DFP2 connected\n");
1888			bios_0_scratch |= ATOM_S0_DFP2;
1889			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1890			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1891		} else {
1892			DRM_DEBUG_KMS("DFP2 disconnected\n");
1893			bios_0_scratch &= ~ATOM_S0_DFP2;
1894			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1895			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1896		}
1897	}
1898	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1899	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1900		if (connected) {
1901			DRM_DEBUG_KMS("DFP3 connected\n");
1902			bios_0_scratch |= ATOM_S0_DFP3;
1903			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1904			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1905		} else {
1906			DRM_DEBUG_KMS("DFP3 disconnected\n");
1907			bios_0_scratch &= ~ATOM_S0_DFP3;
1908			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1909			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1910		}
1911	}
1912	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1913	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1914		if (connected) {
1915			DRM_DEBUG_KMS("DFP4 connected\n");
1916			bios_0_scratch |= ATOM_S0_DFP4;
1917			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1918			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1919		} else {
1920			DRM_DEBUG_KMS("DFP4 disconnected\n");
1921			bios_0_scratch &= ~ATOM_S0_DFP4;
1922			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1923			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1924		}
1925	}
1926	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1927	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1928		if (connected) {
1929			DRM_DEBUG_KMS("DFP5 connected\n");
1930			bios_0_scratch |= ATOM_S0_DFP5;
1931			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1932			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1933		} else {
1934			DRM_DEBUG_KMS("DFP5 disconnected\n");
1935			bios_0_scratch &= ~ATOM_S0_DFP5;
1936			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1937			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1938		}
1939	}
1940	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1941	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1942		if (connected) {
1943			DRM_DEBUG_KMS("DFP6 connected\n");
1944			bios_0_scratch |= ATOM_S0_DFP6;
1945			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1946			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1947		} else {
1948			DRM_DEBUG_KMS("DFP6 disconnected\n");
1949			bios_0_scratch &= ~ATOM_S0_DFP6;
1950			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1951			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1952		}
1953	}
1954
1955	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1956	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1957	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1958}
1959
1960union lvds_info {
1961	struct _ATOM_LVDS_INFO info;
1962	struct _ATOM_LVDS_INFO_V12 info_12;
1963};
1964
1965struct amdgpu_encoder_atom_dig *
1966amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1967{
1968	struct drm_device *dev = encoder->base.dev;
1969	struct amdgpu_device *adev = drm_to_adev(dev);
1970	struct amdgpu_mode_info *mode_info = &adev->mode_info;
1971	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1972	uint16_t data_offset, misc;
1973	union lvds_info *lvds_info;
1974	uint8_t frev, crev;
1975	struct amdgpu_encoder_atom_dig *lvds = NULL;
1976	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1977
1978	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1979				   &frev, &crev, &data_offset)) {
1980		lvds_info =
1981			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
1982		lvds =
1983		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
1984
1985		if (!lvds)
1986			return NULL;
1987
1988		lvds->native_mode.clock =
1989		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1990		lvds->native_mode.hdisplay =
1991		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1992		lvds->native_mode.vdisplay =
1993		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1994		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1995			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1996		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1997			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1998		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
1999			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2000		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2001			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2002		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2003			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2004		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2005			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2006		lvds->panel_pwr_delay =
2007		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2008		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2009
2010		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2011		if (misc & ATOM_VSYNC_POLARITY)
2012			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2013		if (misc & ATOM_HSYNC_POLARITY)
2014			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2015		if (misc & ATOM_COMPOSITESYNC)
2016			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2017		if (misc & ATOM_INTERLACE)
2018			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2019		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2020			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2021
2022		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2023		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2024
2025		/* set crtc values */
2026		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2027
2028		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2029
2030		encoder->native_mode = lvds->native_mode;
2031
2032		if (encoder_enum == 2)
2033			lvds->linkb = true;
2034		else
2035			lvds->linkb = false;
2036
2037		/* parse the lcd record table */
2038		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2039			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2040			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2041			bool bad_record = false;
2042			u8 *record;
2043
2044			if ((frev == 1) && (crev < 2))
2045				/* absolute */
2046				record = (u8 *)(mode_info->atom_context->bios +
2047						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2048			else
2049				/* relative */
2050				record = (u8 *)(mode_info->atom_context->bios +
2051						data_offset +
2052						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2053			while (*record != ATOM_RECORD_END_TYPE) {
2054				switch (*record) {
2055				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2056					record += sizeof(ATOM_PATCH_RECORD_MODE);
2057					break;
2058				case LCD_RTS_RECORD_TYPE:
2059					record += sizeof(ATOM_LCD_RTS_RECORD);
2060					break;
2061				case LCD_CAP_RECORD_TYPE:
2062					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2063					break;
2064				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2065					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2066					if (fake_edid_record->ucFakeEDIDLength) {
2067						struct edid *edid;
2068						int edid_size =
2069							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2070						edid = kmalloc(edid_size, GFP_KERNEL);
2071						if (edid) {
2072							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2073							       fake_edid_record->ucFakeEDIDLength);
2074
2075							if (drm_edid_is_valid(edid)) {
2076								adev->mode_info.bios_hardcoded_edid = edid;
2077								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2078							} else
2079								kfree(edid);
2080						}
2081					}
2082					record += fake_edid_record->ucFakeEDIDLength ?
2083						  struct_size(fake_edid_record,
2084							      ucFakeEDIDString,
2085							      fake_edid_record->ucFakeEDIDLength) :
2086						  /* empty fake edid record must be 3 bytes long */
2087						  sizeof(ATOM_FAKE_EDID_PATCH_RECORD) + 1;
2088					break;
2089				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2090					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2091					lvds->native_mode.width_mm = panel_res_record->usHSize;
2092					lvds->native_mode.height_mm = panel_res_record->usVSize;
2093					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2094					break;
2095				default:
2096					DRM_ERROR("Bad LCD record %d\n", *record);
2097					bad_record = true;
2098					break;
2099				}
2100				if (bad_record)
2101					break;
2102			}
2103		}
2104	}
2105	return lvds;
2106}
2107
2108struct amdgpu_encoder_atom_dig *
2109amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2110{
2111	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2112	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2113
2114	if (!dig)
2115		return NULL;
2116
2117	/* coherent mode by default */
2118	dig->coherent_mode = true;
2119	dig->dig_encoder = -1;
2120
2121	if (encoder_enum == 2)
2122		dig->linkb = true;
2123	else
2124		dig->linkb = false;
2125
2126	return dig;
2127}
2128
v4.17
   1/*
   2 * Copyright 2007-11 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26#include <drm/drmP.h>
  27#include <drm/drm_crtc_helper.h>
 
 
 
 
  28#include <drm/amdgpu_drm.h>
  29#include "amdgpu.h"
  30#include "amdgpu_connectors.h"
 
  31#include "atom.h"
  32#include "atombios_encoders.h"
  33#include "atombios_dp.h"
  34#include <linux/backlight.h>
  35#include "bif/bif_4_1_d.h"
  36
  37u8
  38amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  39{
  40	u8 backlight_level;
  41	u32 bios_2_scratch;
  42
  43	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  44
  45	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  46			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  47
  48	return backlight_level;
  49}
  50
  51void
  52amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  53					    u8 backlight_level)
  54{
  55	u32 bios_2_scratch;
  56
  57	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  58
  59	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  60	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  61			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
  62
  63	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  64}
  65
  66u8
  67amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  68{
  69	struct drm_device *dev = amdgpu_encoder->base.dev;
  70	struct amdgpu_device *adev = dev->dev_private;
  71
  72	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  73		return 0;
  74
  75	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  76}
  77
  78void
  79amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  80				     u8 level)
  81{
  82	struct drm_encoder *encoder = &amdgpu_encoder->base;
  83	struct drm_device *dev = amdgpu_encoder->base.dev;
  84	struct amdgpu_device *adev = dev->dev_private;
  85	struct amdgpu_encoder_atom_dig *dig;
  86
  87	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  88		return;
  89
  90	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  91	    amdgpu_encoder->enc_priv) {
  92		dig = amdgpu_encoder->enc_priv;
  93		dig->backlight_level = level;
  94		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  95
  96		switch (amdgpu_encoder->encoder_id) {
  97		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  98		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  99		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 100		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 101		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 102			if (dig->backlight_level == 0)
 103				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 104								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
 105			else {
 106				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 107								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
 108				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 109								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
 110			}
 111			break;
 112		default:
 113			break;
 114		}
 115	}
 116}
 117
 118#if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
 119
 120static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
 121{
 122	u8 level;
 123
 124	/* Convert brightness to hardware level */
 125	if (bd->props.brightness < 0)
 126		level = 0;
 127	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
 128		level = AMDGPU_MAX_BL_LEVEL;
 129	else
 130		level = bd->props.brightness;
 131
 132	return level;
 133}
 134
 135static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
 136{
 137	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 138	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 139
 140	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
 141					     amdgpu_atombios_encoder_backlight_level(bd));
 142
 143	return 0;
 144}
 145
 146static int
 147amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
 148{
 149	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 150	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 151	struct drm_device *dev = amdgpu_encoder->base.dev;
 152	struct amdgpu_device *adev = dev->dev_private;
 153
 154	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 155}
 156
 157static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
 158	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
 159	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
 160};
 161
 162void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
 163				     struct drm_connector *drm_connector)
 164{
 165	struct drm_device *dev = amdgpu_encoder->base.dev;
 166	struct amdgpu_device *adev = dev->dev_private;
 167	struct backlight_device *bd;
 168	struct backlight_properties props;
 169	struct amdgpu_backlight_privdata *pdata;
 170	struct amdgpu_encoder_atom_dig *dig;
 171	u8 backlight_level;
 172	char bl_name[16];
 173
 174	/* Mac laptops with multiple GPUs use the gmux driver for backlight
 175	 * so don't register a backlight device
 176	 */
 177	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
 178	    (adev->pdev->device == 0x6741))
 179		return;
 180
 181	if (!amdgpu_encoder->enc_priv)
 182		return;
 183
 184	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 185		return;
 
 
 
 
 
 186
 187	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
 188	if (!pdata) {
 189		DRM_ERROR("Memory allocation failed\n");
 190		goto error;
 191	}
 192
 193	memset(&props, 0, sizeof(props));
 194	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
 195	props.type = BACKLIGHT_RAW;
 196	snprintf(bl_name, sizeof(bl_name),
 197		 "amdgpu_bl%d", dev->primary->index);
 198	bd = backlight_device_register(bl_name, drm_connector->kdev,
 199				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
 200	if (IS_ERR(bd)) {
 201		DRM_ERROR("Backlight registration failed\n");
 202		goto error;
 203	}
 204
 205	pdata->encoder = amdgpu_encoder;
 206
 207	backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 208
 209	dig = amdgpu_encoder->enc_priv;
 210	dig->bl_dev = bd;
 211
 212	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
 213	bd->props.power = FB_BLANK_UNBLANK;
 214	backlight_update_status(bd);
 215
 216	DRM_INFO("amdgpu atom DIG backlight initialized\n");
 217
 218	return;
 219
 220error:
 221	kfree(pdata);
 222	return;
 
 
 
 
 223}
 224
 225void
 226amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
 227{
 228	struct drm_device *dev = amdgpu_encoder->base.dev;
 229	struct amdgpu_device *adev = dev->dev_private;
 230	struct backlight_device *bd = NULL;
 231	struct amdgpu_encoder_atom_dig *dig;
 232
 233	if (!amdgpu_encoder->enc_priv)
 234		return;
 235
 236	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 237		return;
 238
 239	dig = amdgpu_encoder->enc_priv;
 240	bd = dig->bl_dev;
 241	dig->bl_dev = NULL;
 242
 243	if (bd) {
 244		struct amdgpu_legacy_backlight_privdata *pdata;
 245
 246		pdata = bl_get_data(bd);
 247		backlight_device_unregister(bd);
 248		kfree(pdata);
 249
 250		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
 251	}
 252}
 253
 254#else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
 255
 256void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
 257{
 258}
 259
 260void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
 261{
 262}
 263
 264#endif
 265
 266bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
 267{
 268	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 269	switch (amdgpu_encoder->encoder_id) {
 270	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 271	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 272	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 273	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 274	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 275		return true;
 276	default:
 277		return false;
 278	}
 279}
 280
 281bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
 282				 const struct drm_display_mode *mode,
 283				 struct drm_display_mode *adjusted_mode)
 284{
 285	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 286
 287	/* set the active encoder to connector routing */
 288	amdgpu_encoder_set_active_device(encoder);
 289	drm_mode_set_crtcinfo(adjusted_mode, 0);
 290
 291	/* hw bug */
 292	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
 293	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
 294		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
 295
 296	/* vertical FP must be at least 1 */
 297	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
 298		adjusted_mode->crtc_vsync_start++;
 299
 300	/* get the native mode for scaling */
 301	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
 302		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 303	else if (amdgpu_encoder->rmx_type != RMX_OFF)
 304		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 305
 306	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
 307	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
 308		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 309		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
 310	}
 311
 312	return true;
 313}
 314
 315static void
 316amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
 317{
 318	struct drm_device *dev = encoder->dev;
 319	struct amdgpu_device *adev = dev->dev_private;
 320	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 321	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
 322	int index = 0;
 323
 324	memset(&args, 0, sizeof(args));
 325
 326	switch (amdgpu_encoder->encoder_id) {
 327	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
 328	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
 329		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
 330		break;
 331	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
 332	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
 333		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
 334		break;
 335	}
 336
 337	args.ucAction = action;
 338	args.ucDacStandard = ATOM_DAC1_PS2;
 339	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 340
 341	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 342
 343}
 344
 345static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
 346{
 347	int bpc = 8;
 348
 349	if (encoder->crtc) {
 350		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 351		bpc = amdgpu_crtc->bpc;
 352	}
 353
 354	switch (bpc) {
 355	case 0:
 356		return PANEL_BPC_UNDEFINE;
 357	case 6:
 358		return PANEL_6BIT_PER_COLOR;
 359	case 8:
 360	default:
 361		return PANEL_8BIT_PER_COLOR;
 362	case 10:
 363		return PANEL_10BIT_PER_COLOR;
 364	case 12:
 365		return PANEL_12BIT_PER_COLOR;
 366	case 16:
 367		return PANEL_16BIT_PER_COLOR;
 368	}
 369}
 370
 371union dvo_encoder_control {
 372	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
 373	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
 374	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
 375	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
 376};
 377
 378static void
 379amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
 380{
 381	struct drm_device *dev = encoder->dev;
 382	struct amdgpu_device *adev = dev->dev_private;
 383	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 384	union dvo_encoder_control args;
 385	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
 386	uint8_t frev, crev;
 387
 388	memset(&args, 0, sizeof(args));
 389
 390	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 391		return;
 392
 393	switch (frev) {
 394	case 1:
 395		switch (crev) {
 396		case 1:
 397			/* R4xx, R5xx */
 398			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
 399
 400			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 401				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
 402
 403			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
 404			break;
 405		case 2:
 406			/* RS600/690/740 */
 407			args.dvo.sDVOEncoder.ucAction = action;
 408			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 409			/* DFP1, CRT1, TV1 depending on the type of port */
 410			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
 411
 412			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 413				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
 414			break;
 415		case 3:
 416			/* R6xx */
 417			args.dvo_v3.ucAction = action;
 418			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 419			args.dvo_v3.ucDVOConfig = 0; /* XXX */
 420			break;
 421		case 4:
 422			/* DCE8 */
 423			args.dvo_v4.ucAction = action;
 424			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 425			args.dvo_v4.ucDVOConfig = 0; /* XXX */
 426			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 427			break;
 428		default:
 429			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 430			break;
 431		}
 432		break;
 433	default:
 434		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 435		break;
 436	}
 437
 438	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 439}
 440
 441int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
 442{
 443	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 444	struct drm_connector *connector;
 445	struct amdgpu_connector *amdgpu_connector;
 446	struct amdgpu_connector_atom_dig *dig_connector;
 447
 448	/* dp bridges are always DP */
 449	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
 450		return ATOM_ENCODER_MODE_DP;
 451
 452	/* DVO is always DVO */
 453	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
 454	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
 455		return ATOM_ENCODER_MODE_DVO;
 456
 457	connector = amdgpu_get_connector_for_encoder(encoder);
 458	/* if we don't have an active device yet, just use one of
 459	 * the connectors tied to the encoder.
 460	 */
 461	if (!connector)
 462		connector = amdgpu_get_connector_for_encoder_init(encoder);
 463	amdgpu_connector = to_amdgpu_connector(connector);
 464
 465	switch (connector->connector_type) {
 466	case DRM_MODE_CONNECTOR_DVII:
 467	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
 468		if (amdgpu_audio != 0) {
 469			if (amdgpu_connector->use_digital &&
 470			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
 471				return ATOM_ENCODER_MODE_HDMI;
 472			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 473				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 474				return ATOM_ENCODER_MODE_HDMI;
 475			else if (amdgpu_connector->use_digital)
 476				return ATOM_ENCODER_MODE_DVI;
 477			else
 478				return ATOM_ENCODER_MODE_CRT;
 479		} else if (amdgpu_connector->use_digital) {
 480			return ATOM_ENCODER_MODE_DVI;
 481		} else {
 482			return ATOM_ENCODER_MODE_CRT;
 483		}
 484		break;
 485	case DRM_MODE_CONNECTOR_DVID:
 486	case DRM_MODE_CONNECTOR_HDMIA:
 487	default:
 488		if (amdgpu_audio != 0) {
 489			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 490				return ATOM_ENCODER_MODE_HDMI;
 491			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 492				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 493				return ATOM_ENCODER_MODE_HDMI;
 494			else
 495				return ATOM_ENCODER_MODE_DVI;
 496		} else {
 497			return ATOM_ENCODER_MODE_DVI;
 498		}
 499		break;
 500	case DRM_MODE_CONNECTOR_LVDS:
 501		return ATOM_ENCODER_MODE_LVDS;
 502		break;
 503	case DRM_MODE_CONNECTOR_DisplayPort:
 504		dig_connector = amdgpu_connector->con_priv;
 505		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
 506		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
 507			return ATOM_ENCODER_MODE_DP;
 508		} else if (amdgpu_audio != 0) {
 509			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 510				return ATOM_ENCODER_MODE_HDMI;
 511			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 512				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 513				return ATOM_ENCODER_MODE_HDMI;
 514			else
 515				return ATOM_ENCODER_MODE_DVI;
 516		} else {
 517			return ATOM_ENCODER_MODE_DVI;
 518		}
 519		break;
 520	case DRM_MODE_CONNECTOR_eDP:
 521		return ATOM_ENCODER_MODE_DP;
 522	case DRM_MODE_CONNECTOR_DVIA:
 523	case DRM_MODE_CONNECTOR_VGA:
 524		return ATOM_ENCODER_MODE_CRT;
 525		break;
 526	case DRM_MODE_CONNECTOR_Composite:
 527	case DRM_MODE_CONNECTOR_SVIDEO:
 528	case DRM_MODE_CONNECTOR_9PinDIN:
 529		/* fix me */
 530		return ATOM_ENCODER_MODE_TV;
 531		/*return ATOM_ENCODER_MODE_CV;*/
 532		break;
 533	}
 534}
 535
 536/*
 537 * DIG Encoder/Transmitter Setup
 538 *
 539 * DCE 6.0
 540 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
 541 * Supports up to 6 digital outputs
 542 * - 6 DIG encoder blocks.
 543 * - DIG to PHY mapping is hardcoded
 544 * DIG1 drives UNIPHY0 link A, A+B
 545 * DIG2 drives UNIPHY0 link B
 546 * DIG3 drives UNIPHY1 link A, A+B
 547 * DIG4 drives UNIPHY1 link B
 548 * DIG5 drives UNIPHY2 link A, A+B
 549 * DIG6 drives UNIPHY2 link B
 550 *
 551 * Routing
 552 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
 553 * Examples:
 554 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
 555 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
 556 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
 557 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
 558 */
 559
 560union dig_encoder_control {
 561	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
 562	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
 563	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
 564	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
 565	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
 566};
 567
 568void
 569amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
 570				   int action, int panel_mode)
 571{
 572	struct drm_device *dev = encoder->dev;
 573	struct amdgpu_device *adev = dev->dev_private;
 574	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 575	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 576	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 577	union dig_encoder_control args;
 578	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
 579	uint8_t frev, crev;
 580	int dp_clock = 0;
 581	int dp_lane_count = 0;
 582	int hpd_id = AMDGPU_HPD_NONE;
 583
 584	if (connector) {
 585		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 586		struct amdgpu_connector_atom_dig *dig_connector =
 587			amdgpu_connector->con_priv;
 588
 589		dp_clock = dig_connector->dp_clock;
 590		dp_lane_count = dig_connector->dp_lane_count;
 591		hpd_id = amdgpu_connector->hpd.hpd;
 592	}
 593
 594	/* no dig encoder assigned */
 595	if (dig->dig_encoder == -1)
 596		return;
 597
 598	memset(&args, 0, sizeof(args));
 599
 600	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 601		return;
 602
 603	switch (frev) {
 604	case 1:
 605		switch (crev) {
 606		case 1:
 607			args.v1.ucAction = action;
 608			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 609			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 610				args.v3.ucPanelMode = panel_mode;
 611			else
 612				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 613
 614			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
 615				args.v1.ucLaneNum = dp_lane_count;
 616			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 617				args.v1.ucLaneNum = 8;
 618			else
 619				args.v1.ucLaneNum = 4;
 620
 621			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
 622				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
 623			switch (amdgpu_encoder->encoder_id) {
 624			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 625				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
 626				break;
 627			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 628			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 629				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
 630				break;
 631			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 632				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
 633				break;
 634			}
 635			if (dig->linkb)
 636				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
 637			else
 638				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
 639			break;
 640		case 2:
 641		case 3:
 642			args.v3.ucAction = action;
 643			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 644			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 645				args.v3.ucPanelMode = panel_mode;
 646			else
 647				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 648
 649			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
 650				args.v3.ucLaneNum = dp_lane_count;
 651			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 652				args.v3.ucLaneNum = 8;
 653			else
 654				args.v3.ucLaneNum = 4;
 655
 656			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
 657				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
 658			args.v3.acConfig.ucDigSel = dig->dig_encoder;
 659			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 660			break;
 661		case 4:
 662			args.v4.ucAction = action;
 663			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 664			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 665				args.v4.ucPanelMode = panel_mode;
 666			else
 667				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 668
 669			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
 670				args.v4.ucLaneNum = dp_lane_count;
 671			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 672				args.v4.ucLaneNum = 8;
 673			else
 674				args.v4.ucLaneNum = 4;
 675
 676			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
 677				if (dp_clock == 540000)
 678					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
 679				else if (dp_clock == 324000)
 680					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
 681				else if (dp_clock == 270000)
 682					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
 683				else
 684					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
 685			}
 686			args.v4.acConfig.ucDigSel = dig->dig_encoder;
 687			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 688			if (hpd_id == AMDGPU_HPD_NONE)
 689				args.v4.ucHPD_ID = 0;
 690			else
 691				args.v4.ucHPD_ID = hpd_id + 1;
 692			break;
 693		case 5:
 694			switch (action) {
 695			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
 696				args.v5.asDPPanelModeParam.ucAction = action;
 697				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
 698				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
 699				break;
 700			case ATOM_ENCODER_CMD_STREAM_SETUP:
 701				args.v5.asStreamParam.ucAction = action;
 702				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
 703				args.v5.asStreamParam.ucDigMode =
 704					amdgpu_atombios_encoder_get_encoder_mode(encoder);
 705				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
 706					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
 707				else if (amdgpu_dig_monitor_is_duallink(encoder,
 708									amdgpu_encoder->pixel_clock))
 709					args.v5.asStreamParam.ucLaneNum = 8;
 710				else
 711					args.v5.asStreamParam.ucLaneNum = 4;
 712				args.v5.asStreamParam.ulPixelClock =
 713					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
 714				args.v5.asStreamParam.ucBitPerColor =
 715					amdgpu_atombios_encoder_get_bpc(encoder);
 716				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
 717				break;
 718			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
 719			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
 720			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
 721			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
 722			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
 723			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
 724			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
 725			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
 726				args.v5.asCmdParam.ucAction = action;
 727				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
 728				break;
 729			default:
 730				DRM_ERROR("Unsupported action 0x%x\n", action);
 731				break;
 732			}
 733			break;
 734		default:
 735			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 736			break;
 737		}
 738		break;
 739	default:
 740		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 741		break;
 742	}
 743
 744	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 745
 746}
 747
 748union dig_transmitter_control {
 749	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
 750	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
 751	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
 752	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
 753	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
 754	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
 755};
 756
 757void
 758amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
 759					      uint8_t lane_num, uint8_t lane_set)
 760{
 761	struct drm_device *dev = encoder->dev;
 762	struct amdgpu_device *adev = dev->dev_private;
 763	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 764	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 765	struct drm_connector *connector;
 766	union dig_transmitter_control args;
 767	int index = 0;
 768	uint8_t frev, crev;
 769	bool is_dp = false;
 770	int pll_id = 0;
 771	int dp_clock = 0;
 772	int dp_lane_count = 0;
 773	int connector_object_id = 0;
 774	int igp_lane_info = 0;
 775	int dig_encoder = dig->dig_encoder;
 776	int hpd_id = AMDGPU_HPD_NONE;
 777
 778	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 779		connector = amdgpu_get_connector_for_encoder_init(encoder);
 780		/* just needed to avoid bailing in the encoder check.  the encoder
 781		 * isn't used for init
 782		 */
 783		dig_encoder = 0;
 784	} else
 785		connector = amdgpu_get_connector_for_encoder(encoder);
 786
 787	if (connector) {
 788		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 789		struct amdgpu_connector_atom_dig *dig_connector =
 790			amdgpu_connector->con_priv;
 791
 792		hpd_id = amdgpu_connector->hpd.hpd;
 793		dp_clock = dig_connector->dp_clock;
 794		dp_lane_count = dig_connector->dp_lane_count;
 795		connector_object_id =
 796			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
 797	}
 798
 799	if (encoder->crtc) {
 800		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 801		pll_id = amdgpu_crtc->pll_id;
 802	}
 803
 804	/* no dig encoder assigned */
 805	if (dig_encoder == -1)
 806		return;
 807
 808	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
 809		is_dp = true;
 810
 811	memset(&args, 0, sizeof(args));
 812
 813	switch (amdgpu_encoder->encoder_id) {
 814	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 815		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
 816		break;
 817	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 818	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 819	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 820	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 821		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
 822		break;
 823	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 824		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
 825		break;
 826	}
 827
 828	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 829		return;
 830
 831	switch (frev) {
 832	case 1:
 833		switch (crev) {
 834		case 1:
 835			args.v1.ucAction = action;
 836			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 837				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
 838			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 839				args.v1.asMode.ucLaneSel = lane_num;
 840				args.v1.asMode.ucLaneSet = lane_set;
 841			} else {
 842				if (is_dp)
 843					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
 844				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 845					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 846				else
 847					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 848			}
 849
 850			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
 851
 852			if (dig_encoder)
 853				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
 854			else
 855				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
 856
 857			if ((adev->flags & AMD_IS_APU) &&
 858			    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
 859				if (is_dp ||
 860				    !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
 861					if (igp_lane_info & 0x1)
 862						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
 863					else if (igp_lane_info & 0x2)
 864						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
 865					else if (igp_lane_info & 0x4)
 866						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
 867					else if (igp_lane_info & 0x8)
 868						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
 869				} else {
 870					if (igp_lane_info & 0x3)
 871						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
 872					else if (igp_lane_info & 0xc)
 873						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
 874				}
 875			}
 876
 877			if (dig->linkb)
 878				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
 879			else
 880				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
 881
 882			if (is_dp)
 883				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 884			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 885				if (dig->coherent_mode)
 886					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 887				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 888					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
 889			}
 890			break;
 891		case 2:
 892			args.v2.ucAction = action;
 893			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 894				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
 895			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 896				args.v2.asMode.ucLaneSel = lane_num;
 897				args.v2.asMode.ucLaneSet = lane_set;
 898			} else {
 899				if (is_dp)
 900					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
 901				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 902					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 903				else
 904					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 905			}
 906
 907			args.v2.acConfig.ucEncoderSel = dig_encoder;
 908			if (dig->linkb)
 909				args.v2.acConfig.ucLinkSel = 1;
 910
 911			switch (amdgpu_encoder->encoder_id) {
 912			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 913				args.v2.acConfig.ucTransmitterSel = 0;
 914				break;
 915			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 916				args.v2.acConfig.ucTransmitterSel = 1;
 917				break;
 918			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 919				args.v2.acConfig.ucTransmitterSel = 2;
 920				break;
 921			}
 922
 923			if (is_dp) {
 924				args.v2.acConfig.fCoherentMode = 1;
 925				args.v2.acConfig.fDPConnector = 1;
 926			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 927				if (dig->coherent_mode)
 928					args.v2.acConfig.fCoherentMode = 1;
 929				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 930					args.v2.acConfig.fDualLinkConnector = 1;
 931			}
 932			break;
 933		case 3:
 934			args.v3.ucAction = action;
 935			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 936				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
 937			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 938				args.v3.asMode.ucLaneSel = lane_num;
 939				args.v3.asMode.ucLaneSet = lane_set;
 940			} else {
 941				if (is_dp)
 942					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
 943				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 944					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 945				else
 946					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 947			}
 948
 949			if (is_dp)
 950				args.v3.ucLaneNum = dp_lane_count;
 951			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 952				args.v3.ucLaneNum = 8;
 953			else
 954				args.v3.ucLaneNum = 4;
 955
 956			if (dig->linkb)
 957				args.v3.acConfig.ucLinkSel = 1;
 958			if (dig_encoder & 1)
 959				args.v3.acConfig.ucEncoderSel = 1;
 960
 961			/* Select the PLL for the PHY
 962			 * DP PHY should be clocked from external src if there is
 963			 * one.
 964			 */
 965			/* On DCE4, if there is an external clock, it generates the DP ref clock */
 966			if (is_dp && adev->clock.dp_extclk)
 967				args.v3.acConfig.ucRefClkSource = 2; /* external src */
 968			else
 969				args.v3.acConfig.ucRefClkSource = pll_id;
 970
 971			switch (amdgpu_encoder->encoder_id) {
 972			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 973				args.v3.acConfig.ucTransmitterSel = 0;
 974				break;
 975			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 976				args.v3.acConfig.ucTransmitterSel = 1;
 977				break;
 978			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 979				args.v3.acConfig.ucTransmitterSel = 2;
 980				break;
 981			}
 982
 983			if (is_dp)
 984				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
 985			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 986				if (dig->coherent_mode)
 987					args.v3.acConfig.fCoherentMode = 1;
 988				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 989					args.v3.acConfig.fDualLinkConnector = 1;
 990			}
 991			break;
 992		case 4:
 993			args.v4.ucAction = action;
 994			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 995				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
 996			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 997				args.v4.asMode.ucLaneSel = lane_num;
 998				args.v4.asMode.ucLaneSet = lane_set;
 999			} else {
1000				if (is_dp)
1001					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1002				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1003					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1004				else
1005					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1006			}
1007
1008			if (is_dp)
1009				args.v4.ucLaneNum = dp_lane_count;
1010			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1011				args.v4.ucLaneNum = 8;
1012			else
1013				args.v4.ucLaneNum = 4;
1014
1015			if (dig->linkb)
1016				args.v4.acConfig.ucLinkSel = 1;
1017			if (dig_encoder & 1)
1018				args.v4.acConfig.ucEncoderSel = 1;
1019
1020			/* Select the PLL for the PHY
1021			 * DP PHY should be clocked from external src if there is
1022			 * one.
1023			 */
1024			/* On DCE5 DCPLL usually generates the DP ref clock */
1025			if (is_dp) {
1026				if (adev->clock.dp_extclk)
1027					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1028				else
1029					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1030			} else
1031				args.v4.acConfig.ucRefClkSource = pll_id;
1032
1033			switch (amdgpu_encoder->encoder_id) {
1034			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1035				args.v4.acConfig.ucTransmitterSel = 0;
1036				break;
1037			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1038				args.v4.acConfig.ucTransmitterSel = 1;
1039				break;
1040			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1041				args.v4.acConfig.ucTransmitterSel = 2;
1042				break;
1043			}
1044
1045			if (is_dp)
1046				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1047			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1048				if (dig->coherent_mode)
1049					args.v4.acConfig.fCoherentMode = 1;
1050				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1051					args.v4.acConfig.fDualLinkConnector = 1;
1052			}
1053			break;
1054		case 5:
1055			args.v5.ucAction = action;
1056			if (is_dp)
1057				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1058			else
1059				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1060
1061			switch (amdgpu_encoder->encoder_id) {
1062			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1063				if (dig->linkb)
1064					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1065				else
1066					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1067				break;
1068			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1069				if (dig->linkb)
1070					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1071				else
1072					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1073				break;
1074			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1075				if (dig->linkb)
1076					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1077				else
1078					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1079				break;
1080			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1081				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1082				break;
1083			}
1084			if (is_dp)
1085				args.v5.ucLaneNum = dp_lane_count;
1086			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1087				args.v5.ucLaneNum = 8;
1088			else
1089				args.v5.ucLaneNum = 4;
1090			args.v5.ucConnObjId = connector_object_id;
1091			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1092
1093			if (is_dp && adev->clock.dp_extclk)
1094				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1095			else
1096				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1097
1098			if (is_dp)
1099				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1100			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1101				if (dig->coherent_mode)
1102					args.v5.asConfig.ucCoherentMode = 1;
1103			}
1104			if (hpd_id == AMDGPU_HPD_NONE)
1105				args.v5.asConfig.ucHPDSel = 0;
1106			else
1107				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1108			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1109			args.v5.ucDPLaneSet = lane_set;
1110			break;
1111		case 6:
1112			args.v6.ucAction = action;
1113			if (is_dp)
1114				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1115			else
1116				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1117
1118			switch (amdgpu_encoder->encoder_id) {
1119			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1120				if (dig->linkb)
1121					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1122				else
1123					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1124				break;
1125			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1126				if (dig->linkb)
1127					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1128				else
1129					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1130				break;
1131			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1132				if (dig->linkb)
1133					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1134				else
1135					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1136				break;
1137			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1138				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1139				break;
1140			}
1141			if (is_dp)
1142				args.v6.ucLaneNum = dp_lane_count;
1143			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1144				args.v6.ucLaneNum = 8;
1145			else
1146				args.v6.ucLaneNum = 4;
1147			args.v6.ucConnObjId = connector_object_id;
1148			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1149				args.v6.ucDPLaneSet = lane_set;
1150			else
1151				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1152
1153			if (hpd_id == AMDGPU_HPD_NONE)
1154				args.v6.ucHPDSel = 0;
1155			else
1156				args.v6.ucHPDSel = hpd_id + 1;
1157			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1158			break;
1159		default:
1160			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1161			break;
1162		}
1163		break;
1164	default:
1165		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1166		break;
1167	}
1168
1169	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1170}
1171
1172bool
1173amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1174				     int action)
1175{
1176	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1177	struct drm_device *dev = amdgpu_connector->base.dev;
1178	struct amdgpu_device *adev = dev->dev_private;
1179	union dig_transmitter_control args;
1180	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1181	uint8_t frev, crev;
1182
1183	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1184		goto done;
1185
1186	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1187	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1188		goto done;
1189
1190	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1191		goto done;
1192
1193	memset(&args, 0, sizeof(args));
1194
1195	args.v1.ucAction = action;
1196
1197	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1198
1199	/* wait for the panel to power up */
1200	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1201		int i;
1202
1203		for (i = 0; i < 300; i++) {
1204			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1205				return true;
1206			mdelay(1);
1207		}
1208		return false;
1209	}
1210done:
1211	return true;
1212}
1213
1214union external_encoder_control {
1215	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1216	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1217};
1218
1219static void
1220amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1221					struct drm_encoder *ext_encoder,
1222					int action)
1223{
1224	struct drm_device *dev = encoder->dev;
1225	struct amdgpu_device *adev = dev->dev_private;
1226	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1227	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1228	union external_encoder_control args;
1229	struct drm_connector *connector;
1230	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1231	u8 frev, crev;
1232	int dp_clock = 0;
1233	int dp_lane_count = 0;
1234	int connector_object_id = 0;
1235	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1236
1237	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1238		connector = amdgpu_get_connector_for_encoder_init(encoder);
1239	else
1240		connector = amdgpu_get_connector_for_encoder(encoder);
1241
1242	if (connector) {
1243		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1244		struct amdgpu_connector_atom_dig *dig_connector =
1245			amdgpu_connector->con_priv;
1246
1247		dp_clock = dig_connector->dp_clock;
1248		dp_lane_count = dig_connector->dp_lane_count;
1249		connector_object_id =
1250			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1251	}
1252
1253	memset(&args, 0, sizeof(args));
1254
1255	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1256		return;
1257
1258	switch (frev) {
1259	case 1:
1260		/* no params on frev 1 */
1261		break;
1262	case 2:
1263		switch (crev) {
1264		case 1:
1265		case 2:
1266			args.v1.sDigEncoder.ucAction = action;
1267			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1268			args.v1.sDigEncoder.ucEncoderMode =
1269				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1270
1271			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1272				if (dp_clock == 270000)
1273					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1274				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1275			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1276				args.v1.sDigEncoder.ucLaneNum = 8;
1277			else
1278				args.v1.sDigEncoder.ucLaneNum = 4;
1279			break;
1280		case 3:
1281			args.v3.sExtEncoder.ucAction = action;
1282			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1283				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1284			else
1285				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1286			args.v3.sExtEncoder.ucEncoderMode =
1287				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1288
1289			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1290				if (dp_clock == 270000)
1291					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1292				else if (dp_clock == 540000)
1293					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1294				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1295			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1296				args.v3.sExtEncoder.ucLaneNum = 8;
1297			else
1298				args.v3.sExtEncoder.ucLaneNum = 4;
1299			switch (ext_enum) {
1300			case GRAPH_OBJECT_ENUM_ID1:
1301				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1302				break;
1303			case GRAPH_OBJECT_ENUM_ID2:
1304				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1305				break;
1306			case GRAPH_OBJECT_ENUM_ID3:
1307				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1308				break;
1309			}
1310			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1311			break;
1312		default:
1313			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1314			return;
1315		}
1316		break;
1317	default:
1318		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1319		return;
1320	}
1321	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1322}
1323
1324static void
1325amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1326{
1327	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1328	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1329	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1330	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1331	struct amdgpu_connector *amdgpu_connector = NULL;
1332	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1333
1334	if (connector) {
1335		amdgpu_connector = to_amdgpu_connector(connector);
1336		amdgpu_dig_connector = amdgpu_connector->con_priv;
1337	}
1338
1339	if (action == ATOM_ENABLE) {
1340		if (!connector)
1341			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1342		else
1343			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1344
1345		/* setup and enable the encoder */
1346		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1347		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1348						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1349						   dig->panel_mode);
1350		if (ext_encoder)
1351			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1352								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1353		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1354		    connector) {
1355			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1356				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1357								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1358				amdgpu_dig_connector->edp_on = true;
1359			}
1360		}
1361		/* enable the transmitter */
1362		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1363						       ATOM_TRANSMITTER_ACTION_ENABLE,
1364						       0, 0);
1365		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1366		    connector) {
1367			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1368			amdgpu_atombios_dp_link_train(encoder, connector);
1369			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1370		}
1371		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1372			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1373		if (ext_encoder)
1374			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1375	} else {
1376		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1377		    connector)
1378			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1379							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1380		if (ext_encoder)
1381			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1382		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1383			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1384							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1385
1386		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1387		    connector)
1388			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1389		/* disable the transmitter */
1390		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1391						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1392		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1393		    connector) {
1394			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1395				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1396								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1397				amdgpu_dig_connector->edp_on = false;
1398			}
1399		}
1400	}
1401}
1402
1403void
1404amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1405{
1406	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1407
1408	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1409		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1410		  amdgpu_encoder->active_device);
1411	switch (amdgpu_encoder->encoder_id) {
1412	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1413	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1414	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1415	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1416		switch (mode) {
1417		case DRM_MODE_DPMS_ON:
1418			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1419			break;
1420		case DRM_MODE_DPMS_STANDBY:
1421		case DRM_MODE_DPMS_SUSPEND:
1422		case DRM_MODE_DPMS_OFF:
1423			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1424			break;
1425		}
1426		break;
1427	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1428		switch (mode) {
1429		case DRM_MODE_DPMS_ON:
1430			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1431			break;
1432		case DRM_MODE_DPMS_STANDBY:
1433		case DRM_MODE_DPMS_SUSPEND:
1434		case DRM_MODE_DPMS_OFF:
1435			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1436			break;
1437		}
1438		break;
1439	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1440		switch (mode) {
1441		case DRM_MODE_DPMS_ON:
1442			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1443			break;
1444		case DRM_MODE_DPMS_STANDBY:
1445		case DRM_MODE_DPMS_SUSPEND:
1446		case DRM_MODE_DPMS_OFF:
1447			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1448			break;
1449		}
1450		break;
1451	default:
1452		return;
1453	}
1454}
1455
1456union crtc_source_param {
1457	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1458	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1459	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1460};
1461
1462void
1463amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1464{
1465	struct drm_device *dev = encoder->dev;
1466	struct amdgpu_device *adev = dev->dev_private;
1467	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1468	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1469	union crtc_source_param args;
1470	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1471	uint8_t frev, crev;
1472	struct amdgpu_encoder_atom_dig *dig;
1473
1474	memset(&args, 0, sizeof(args));
1475
1476	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1477		return;
1478
1479	switch (frev) {
1480	case 1:
1481		switch (crev) {
1482		case 1:
1483		default:
1484			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1485			switch (amdgpu_encoder->encoder_id) {
1486			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1487			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1488				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1489				break;
1490			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1491			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1492				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1493					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1494				else
1495					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1496				break;
1497			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1498			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1499			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1500				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1501				break;
1502			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1503			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1504				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1505					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1506				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1507					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1508				else
1509					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1510				break;
1511			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1512			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1513				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1514					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1515				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1516					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1517				else
1518					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1519				break;
1520			}
1521			break;
1522		case 2:
1523			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1524			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1525				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1526
1527				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1528					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1529				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1530					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1531				else
1532					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1533			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1534				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1535			} else {
1536				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1537			}
1538			switch (amdgpu_encoder->encoder_id) {
1539			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1540			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1541			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1542			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1543			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1544				dig = amdgpu_encoder->enc_priv;
1545				switch (dig->dig_encoder) {
1546				case 0:
1547					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1548					break;
1549				case 1:
1550					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1551					break;
1552				case 2:
1553					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1554					break;
1555				case 3:
1556					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1557					break;
1558				case 4:
1559					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1560					break;
1561				case 5:
1562					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1563					break;
1564				case 6:
1565					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1566					break;
1567				}
1568				break;
1569			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1570				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1571				break;
1572			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1573				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1574					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1575				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1576					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1577				else
1578					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1579				break;
1580			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1581				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1582					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1583				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1584					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1585				else
1586					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1587				break;
1588			}
1589			break;
1590		case 3:
1591			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1592			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1593				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1594
1595				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1596					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1597				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1598					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1599				else
1600					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1601			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1602				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1603			} else {
1604				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1605			}
1606			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1607			switch (amdgpu_encoder->encoder_id) {
1608			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1609			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1610			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1611			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1612			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1613				dig = amdgpu_encoder->enc_priv;
1614				switch (dig->dig_encoder) {
1615				case 0:
1616					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1617					break;
1618				case 1:
1619					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1620					break;
1621				case 2:
1622					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1623					break;
1624				case 3:
1625					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1626					break;
1627				case 4:
1628					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1629					break;
1630				case 5:
1631					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1632					break;
1633				case 6:
1634					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1635					break;
1636				}
1637				break;
1638			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1639				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1640				break;
1641			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1642				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1643					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1644				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1645					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1646				else
1647					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1648				break;
1649			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1650				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1651					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1652				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1653					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1654				else
1655					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1656				break;
1657			}
1658			break;
1659		}
1660		break;
1661	default:
1662		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1663		return;
1664	}
1665
1666	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1667}
1668
1669/* This only needs to be called once at startup */
1670void
1671amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1672{
1673	struct drm_device *dev = adev->ddev;
1674	struct drm_encoder *encoder;
1675
1676	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1677		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1678		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1679
1680		switch (amdgpu_encoder->encoder_id) {
1681		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1682		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1683		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1684		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1685			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1686							       0, 0);
1687			break;
1688		}
1689
1690		if (ext_encoder)
1691			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1692								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1693	}
1694}
1695
1696static bool
1697amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1698				 struct drm_connector *connector)
1699{
1700	struct drm_device *dev = encoder->dev;
1701	struct amdgpu_device *adev = dev->dev_private;
1702	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1703	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1704
1705	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1706				       ATOM_DEVICE_CV_SUPPORT |
1707				       ATOM_DEVICE_CRT_SUPPORT)) {
1708		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1709		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1710		uint8_t frev, crev;
1711
1712		memset(&args, 0, sizeof(args));
1713
1714		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1715			return false;
1716
1717		args.sDacload.ucMisc = 0;
1718
1719		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1720		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1721			args.sDacload.ucDacType = ATOM_DAC_A;
1722		else
1723			args.sDacload.ucDacType = ATOM_DAC_B;
1724
1725		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1726			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1727		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1728			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1729		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1730			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1731			if (crev >= 3)
1732				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1733		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1734			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1735			if (crev >= 3)
1736				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1737		}
1738
1739		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1740
1741		return true;
1742	} else
1743		return false;
1744}
1745
1746enum drm_connector_status
1747amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1748			    struct drm_connector *connector)
1749{
1750	struct drm_device *dev = encoder->dev;
1751	struct amdgpu_device *adev = dev->dev_private;
1752	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1753	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1754	uint32_t bios_0_scratch;
1755
1756	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1757		DRM_DEBUG_KMS("detect returned false \n");
1758		return connector_status_unknown;
1759	}
1760
1761	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1762
1763	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1764	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1765		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1766			return connector_status_connected;
1767	}
1768	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1769		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1770			return connector_status_connected;
1771	}
1772	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1773		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1774			return connector_status_connected;
1775	}
1776	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1777		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1778			return connector_status_connected; /* CTV */
1779		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1780			return connector_status_connected; /* STV */
1781	}
1782	return connector_status_disconnected;
1783}
1784
1785enum drm_connector_status
1786amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1787			    struct drm_connector *connector)
1788{
1789	struct drm_device *dev = encoder->dev;
1790	struct amdgpu_device *adev = dev->dev_private;
1791	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1792	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1793	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1794	u32 bios_0_scratch;
1795
1796	if (!ext_encoder)
1797		return connector_status_unknown;
1798
1799	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1800		return connector_status_unknown;
1801
1802	/* load detect on the dp bridge */
1803	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1804						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1805
1806	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1807
1808	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1809	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1810		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1811			return connector_status_connected;
1812	}
1813	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1814		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1815			return connector_status_connected;
1816	}
1817	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1818		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1819			return connector_status_connected;
1820	}
1821	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1822		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1823			return connector_status_connected; /* CTV */
1824		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1825			return connector_status_connected; /* STV */
1826	}
1827	return connector_status_disconnected;
1828}
1829
1830void
1831amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1832{
1833	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1834
1835	if (ext_encoder)
1836		/* ddc_setup on the dp bridge */
1837		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1838							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1839
1840}
1841
1842void
1843amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1844				       struct drm_encoder *encoder,
1845				       bool connected)
1846{
1847	struct drm_device *dev = connector->dev;
1848	struct amdgpu_device *adev = dev->dev_private;
1849	struct amdgpu_connector *amdgpu_connector =
1850	    to_amdgpu_connector(connector);
1851	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1852	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1853
1854	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1855	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1856	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1857
1858	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1859	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1860		if (connected) {
1861			DRM_DEBUG_KMS("LCD1 connected\n");
1862			bios_0_scratch |= ATOM_S0_LCD1;
1863			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1864			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1865		} else {
1866			DRM_DEBUG_KMS("LCD1 disconnected\n");
1867			bios_0_scratch &= ~ATOM_S0_LCD1;
1868			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1869			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1870		}
1871	}
1872	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1873	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1874		if (connected) {
1875			DRM_DEBUG_KMS("CRT1 connected\n");
1876			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1877			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1878			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1879		} else {
1880			DRM_DEBUG_KMS("CRT1 disconnected\n");
1881			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1882			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1883			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1884		}
1885	}
1886	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1887	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1888		if (connected) {
1889			DRM_DEBUG_KMS("CRT2 connected\n");
1890			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1891			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1892			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1893		} else {
1894			DRM_DEBUG_KMS("CRT2 disconnected\n");
1895			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1896			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1897			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1898		}
1899	}
1900	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1901	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1902		if (connected) {
1903			DRM_DEBUG_KMS("DFP1 connected\n");
1904			bios_0_scratch |= ATOM_S0_DFP1;
1905			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1906			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1907		} else {
1908			DRM_DEBUG_KMS("DFP1 disconnected\n");
1909			bios_0_scratch &= ~ATOM_S0_DFP1;
1910			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1911			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1912		}
1913	}
1914	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1915	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1916		if (connected) {
1917			DRM_DEBUG_KMS("DFP2 connected\n");
1918			bios_0_scratch |= ATOM_S0_DFP2;
1919			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1920			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1921		} else {
1922			DRM_DEBUG_KMS("DFP2 disconnected\n");
1923			bios_0_scratch &= ~ATOM_S0_DFP2;
1924			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1925			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1926		}
1927	}
1928	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1929	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1930		if (connected) {
1931			DRM_DEBUG_KMS("DFP3 connected\n");
1932			bios_0_scratch |= ATOM_S0_DFP3;
1933			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1934			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1935		} else {
1936			DRM_DEBUG_KMS("DFP3 disconnected\n");
1937			bios_0_scratch &= ~ATOM_S0_DFP3;
1938			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1939			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1940		}
1941	}
1942	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1943	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1944		if (connected) {
1945			DRM_DEBUG_KMS("DFP4 connected\n");
1946			bios_0_scratch |= ATOM_S0_DFP4;
1947			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1948			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1949		} else {
1950			DRM_DEBUG_KMS("DFP4 disconnected\n");
1951			bios_0_scratch &= ~ATOM_S0_DFP4;
1952			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1953			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1954		}
1955	}
1956	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1957	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1958		if (connected) {
1959			DRM_DEBUG_KMS("DFP5 connected\n");
1960			bios_0_scratch |= ATOM_S0_DFP5;
1961			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1962			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1963		} else {
1964			DRM_DEBUG_KMS("DFP5 disconnected\n");
1965			bios_0_scratch &= ~ATOM_S0_DFP5;
1966			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1967			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1968		}
1969	}
1970	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1971	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1972		if (connected) {
1973			DRM_DEBUG_KMS("DFP6 connected\n");
1974			bios_0_scratch |= ATOM_S0_DFP6;
1975			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1976			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1977		} else {
1978			DRM_DEBUG_KMS("DFP6 disconnected\n");
1979			bios_0_scratch &= ~ATOM_S0_DFP6;
1980			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1981			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1982		}
1983	}
1984
1985	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1986	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1987	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1988}
1989
1990union lvds_info {
1991	struct _ATOM_LVDS_INFO info;
1992	struct _ATOM_LVDS_INFO_V12 info_12;
1993};
1994
1995struct amdgpu_encoder_atom_dig *
1996amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1997{
1998	struct drm_device *dev = encoder->base.dev;
1999	struct amdgpu_device *adev = dev->dev_private;
2000	struct amdgpu_mode_info *mode_info = &adev->mode_info;
2001	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2002	uint16_t data_offset, misc;
2003	union lvds_info *lvds_info;
2004	uint8_t frev, crev;
2005	struct amdgpu_encoder_atom_dig *lvds = NULL;
2006	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2007
2008	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2009				   &frev, &crev, &data_offset)) {
2010		lvds_info =
2011			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
2012		lvds =
2013		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2014
2015		if (!lvds)
2016			return NULL;
2017
2018		lvds->native_mode.clock =
2019		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2020		lvds->native_mode.hdisplay =
2021		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2022		lvds->native_mode.vdisplay =
2023		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2024		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2025			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2026		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2027			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2028		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2029			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2030		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2031			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2032		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2033			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2034		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2035			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2036		lvds->panel_pwr_delay =
2037		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2038		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2039
2040		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2041		if (misc & ATOM_VSYNC_POLARITY)
2042			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2043		if (misc & ATOM_HSYNC_POLARITY)
2044			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2045		if (misc & ATOM_COMPOSITESYNC)
2046			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2047		if (misc & ATOM_INTERLACE)
2048			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2049		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2050			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2051
2052		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2053		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2054
2055		/* set crtc values */
2056		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2057
2058		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2059
2060		encoder->native_mode = lvds->native_mode;
2061
2062		if (encoder_enum == 2)
2063			lvds->linkb = true;
2064		else
2065			lvds->linkb = false;
2066
2067		/* parse the lcd record table */
2068		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2069			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2070			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2071			bool bad_record = false;
2072			u8 *record;
2073
2074			if ((frev == 1) && (crev < 2))
2075				/* absolute */
2076				record = (u8 *)(mode_info->atom_context->bios +
2077						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2078			else
2079				/* relative */
2080				record = (u8 *)(mode_info->atom_context->bios +
2081						data_offset +
2082						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2083			while (*record != ATOM_RECORD_END_TYPE) {
2084				switch (*record) {
2085				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2086					record += sizeof(ATOM_PATCH_RECORD_MODE);
2087					break;
2088				case LCD_RTS_RECORD_TYPE:
2089					record += sizeof(ATOM_LCD_RTS_RECORD);
2090					break;
2091				case LCD_CAP_RECORD_TYPE:
2092					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2093					break;
2094				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2095					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2096					if (fake_edid_record->ucFakeEDIDLength) {
2097						struct edid *edid;
2098						int edid_size =
2099							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2100						edid = kmalloc(edid_size, GFP_KERNEL);
2101						if (edid) {
2102							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2103							       fake_edid_record->ucFakeEDIDLength);
2104
2105							if (drm_edid_is_valid(edid)) {
2106								adev->mode_info.bios_hardcoded_edid = edid;
2107								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2108							} else
2109								kfree(edid);
2110						}
2111					}
2112					record += fake_edid_record->ucFakeEDIDLength ?
2113						fake_edid_record->ucFakeEDIDLength + 2 :
2114						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
 
 
 
2115					break;
2116				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2117					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2118					lvds->native_mode.width_mm = panel_res_record->usHSize;
2119					lvds->native_mode.height_mm = panel_res_record->usVSize;
2120					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2121					break;
2122				default:
2123					DRM_ERROR("Bad LCD record %d\n", *record);
2124					bad_record = true;
2125					break;
2126				}
2127				if (bad_record)
2128					break;
2129			}
2130		}
2131	}
2132	return lvds;
2133}
2134
2135struct amdgpu_encoder_atom_dig *
2136amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2137{
2138	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2139	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2140
2141	if (!dig)
2142		return NULL;
2143
2144	/* coherent mode by default */
2145	dig->coherent_mode = true;
2146	dig->dig_encoder = -1;
2147
2148	if (encoder_enum == 2)
2149		dig->linkb = true;
2150	else
2151		dig->linkb = false;
2152
2153	return dig;
2154}
2155