Linux Audio

Check our new training course

Loading...
v4.6
   1/*
   2 * Copyright 2007-11 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26#include <drm/drmP.h>
 
 
  27#include <drm/drm_crtc_helper.h>
  28#include <drm/amdgpu_drm.h>
  29#include "amdgpu.h"
  30#include "amdgpu_connectors.h"
 
  31#include "atom.h"
  32#include "atombios_encoders.h"
  33#include "atombios_dp.h"
  34#include <linux/backlight.h>
  35#include "bif/bif_4_1_d.h"
  36
  37static u8
  38amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  39{
  40	u8 backlight_level;
  41	u32 bios_2_scratch;
  42
  43	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  44
  45	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  46			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  47
  48	return backlight_level;
  49}
  50
  51static void
  52amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  53					    u8 backlight_level)
  54{
  55	u32 bios_2_scratch;
  56
  57	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  58
  59	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  60	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  61			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
  62
  63	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  64}
  65
  66u8
  67amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  68{
  69	struct drm_device *dev = amdgpu_encoder->base.dev;
  70	struct amdgpu_device *adev = dev->dev_private;
  71
  72	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  73		return 0;
  74
  75	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  76}
  77
  78void
  79amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  80				     u8 level)
  81{
  82	struct drm_encoder *encoder = &amdgpu_encoder->base;
  83	struct drm_device *dev = amdgpu_encoder->base.dev;
  84	struct amdgpu_device *adev = dev->dev_private;
  85	struct amdgpu_encoder_atom_dig *dig;
  86
  87	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  88		return;
  89
  90	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  91	    amdgpu_encoder->enc_priv) {
  92		dig = amdgpu_encoder->enc_priv;
  93		dig->backlight_level = level;
  94		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  95
  96		switch (amdgpu_encoder->encoder_id) {
  97		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
  98		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
  99		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 100		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 
 101			if (dig->backlight_level == 0)
 102				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 103								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
 104			else {
 105				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 106								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
 107				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 108								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
 109			}
 110			break;
 111		default:
 112			break;
 113		}
 114	}
 115}
 116
 117#if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
 118
 119static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
 120{
 121	u8 level;
 122
 123	/* Convert brightness to hardware level */
 124	if (bd->props.brightness < 0)
 125		level = 0;
 126	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
 127		level = AMDGPU_MAX_BL_LEVEL;
 128	else
 129		level = bd->props.brightness;
 130
 131	return level;
 132}
 133
 134static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
 135{
 136	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 137	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 138
 139	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
 140					     amdgpu_atombios_encoder_backlight_level(bd));
 141
 142	return 0;
 143}
 144
 145static int
 146amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
 147{
 148	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 149	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 150	struct drm_device *dev = amdgpu_encoder->base.dev;
 151	struct amdgpu_device *adev = dev->dev_private;
 152
 153	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 154}
 155
 156static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
 157	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
 158	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
 159};
 160
 161void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
 162				     struct drm_connector *drm_connector)
 163{
 164	struct drm_device *dev = amdgpu_encoder->base.dev;
 165	struct amdgpu_device *adev = dev->dev_private;
 166	struct backlight_device *bd;
 167	struct backlight_properties props;
 168	struct amdgpu_backlight_privdata *pdata;
 169	struct amdgpu_encoder_atom_dig *dig;
 170	u8 backlight_level;
 171	char bl_name[16];
 172
 173	/* Mac laptops with multiple GPUs use the gmux driver for backlight
 174	 * so don't register a backlight device
 175	 */
 176	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
 177	    (adev->pdev->device == 0x6741))
 178		return;
 179
 180	if (!amdgpu_encoder->enc_priv)
 181		return;
 182
 183	if (!adev->is_atom_bios)
 184		return;
 185
 186	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 187		return;
 188
 189	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
 190	if (!pdata) {
 191		DRM_ERROR("Memory allocation failed\n");
 192		goto error;
 193	}
 194
 195	memset(&props, 0, sizeof(props));
 196	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
 197	props.type = BACKLIGHT_RAW;
 198	snprintf(bl_name, sizeof(bl_name),
 199		 "amdgpu_bl%d", dev->primary->index);
 200	bd = backlight_device_register(bl_name, drm_connector->kdev,
 201				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
 202	if (IS_ERR(bd)) {
 203		DRM_ERROR("Backlight registration failed\n");
 204		goto error;
 205	}
 206
 207	pdata->encoder = amdgpu_encoder;
 208
 209	backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 210
 211	dig = amdgpu_encoder->enc_priv;
 212	dig->bl_dev = bd;
 213
 214	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
 215	bd->props.power = FB_BLANK_UNBLANK;
 216	backlight_update_status(bd);
 217
 218	DRM_INFO("amdgpu atom DIG backlight initialized\n");
 219
 220	return;
 221
 222error:
 223	kfree(pdata);
 224	return;
 225}
 226
 227void
 228amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
 229{
 230	struct drm_device *dev = amdgpu_encoder->base.dev;
 231	struct amdgpu_device *adev = dev->dev_private;
 232	struct backlight_device *bd = NULL;
 233	struct amdgpu_encoder_atom_dig *dig;
 234
 235	if (!amdgpu_encoder->enc_priv)
 236		return;
 237
 238	if (!adev->is_atom_bios)
 239		return;
 240
 241	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 242		return;
 243
 244	dig = amdgpu_encoder->enc_priv;
 245	bd = dig->bl_dev;
 246	dig->bl_dev = NULL;
 247
 248	if (bd) {
 249		struct amdgpu_legacy_backlight_privdata *pdata;
 250
 251		pdata = bl_get_data(bd);
 252		backlight_device_unregister(bd);
 253		kfree(pdata);
 254
 255		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
 256	}
 257}
 258
 259#else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
 260
 261void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
 262{
 263}
 264
 265void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
 266{
 267}
 268
 269#endif
 270
 271bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
 272{
 273	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 274	switch (amdgpu_encoder->encoder_id) {
 275	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 276	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 277	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 278	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 279	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 280		return true;
 281	default:
 282		return false;
 283	}
 284}
 285
 286bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
 287				 const struct drm_display_mode *mode,
 288				 struct drm_display_mode *adjusted_mode)
 289{
 290	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 291
 292	/* set the active encoder to connector routing */
 293	amdgpu_encoder_set_active_device(encoder);
 294	drm_mode_set_crtcinfo(adjusted_mode, 0);
 295
 296	/* hw bug */
 297	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
 298	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
 299		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
 300
 301	/* vertical FP must be at least 1 */
 302	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
 303		adjusted_mode->crtc_vsync_start++;
 304
 305	/* get the native mode for scaling */
 306	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
 307		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 308	else if (amdgpu_encoder->rmx_type != RMX_OFF)
 309		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 310
 311	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
 312	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
 313		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 314		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
 315	}
 316
 317	return true;
 318}
 319
 320static void
 321amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
 322{
 323	struct drm_device *dev = encoder->dev;
 324	struct amdgpu_device *adev = dev->dev_private;
 325	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 326	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
 327	int index = 0;
 328
 329	memset(&args, 0, sizeof(args));
 330
 331	switch (amdgpu_encoder->encoder_id) {
 332	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
 333	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
 334		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
 335		break;
 336	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
 337	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
 338		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
 339		break;
 340	}
 341
 342	args.ucAction = action;
 343	args.ucDacStandard = ATOM_DAC1_PS2;
 344	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 345
 346	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 347
 348}
 349
 350static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
 351{
 352	int bpc = 8;
 353
 354	if (encoder->crtc) {
 355		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 356		bpc = amdgpu_crtc->bpc;
 357	}
 358
 359	switch (bpc) {
 360	case 0:
 361		return PANEL_BPC_UNDEFINE;
 362	case 6:
 363		return PANEL_6BIT_PER_COLOR;
 364	case 8:
 365	default:
 366		return PANEL_8BIT_PER_COLOR;
 367	case 10:
 368		return PANEL_10BIT_PER_COLOR;
 369	case 12:
 370		return PANEL_12BIT_PER_COLOR;
 371	case 16:
 372		return PANEL_16BIT_PER_COLOR;
 373	}
 374}
 375
 376union dvo_encoder_control {
 377	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
 378	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
 379	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
 380	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
 381};
 382
 383static void
 384amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
 385{
 386	struct drm_device *dev = encoder->dev;
 387	struct amdgpu_device *adev = dev->dev_private;
 388	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 389	union dvo_encoder_control args;
 390	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
 391	uint8_t frev, crev;
 392
 393	memset(&args, 0, sizeof(args));
 394
 395	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 396		return;
 397
 398	switch (frev) {
 399	case 1:
 400		switch (crev) {
 401		case 1:
 402			/* R4xx, R5xx */
 403			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
 404
 405			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 406				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
 407
 408			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
 409			break;
 410		case 2:
 411			/* RS600/690/740 */
 412			args.dvo.sDVOEncoder.ucAction = action;
 413			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 414			/* DFP1, CRT1, TV1 depending on the type of port */
 415			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
 416
 417			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 418				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
 419			break;
 420		case 3:
 421			/* R6xx */
 422			args.dvo_v3.ucAction = action;
 423			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 424			args.dvo_v3.ucDVOConfig = 0; /* XXX */
 425			break;
 426		case 4:
 427			/* DCE8 */
 428			args.dvo_v4.ucAction = action;
 429			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 430			args.dvo_v4.ucDVOConfig = 0; /* XXX */
 431			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 432			break;
 433		default:
 434			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 435			break;
 436		}
 437		break;
 438	default:
 439		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 440		break;
 441	}
 442
 443	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 444}
 445
 446int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
 447{
 448	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 449	struct drm_connector *connector;
 450	struct amdgpu_connector *amdgpu_connector;
 451	struct amdgpu_connector_atom_dig *dig_connector;
 452
 453	/* dp bridges are always DP */
 454	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
 455		return ATOM_ENCODER_MODE_DP;
 456
 457	/* DVO is always DVO */
 458	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
 459	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
 460		return ATOM_ENCODER_MODE_DVO;
 461
 462	connector = amdgpu_get_connector_for_encoder(encoder);
 463	/* if we don't have an active device yet, just use one of
 464	 * the connectors tied to the encoder.
 465	 */
 466	if (!connector)
 467		connector = amdgpu_get_connector_for_encoder_init(encoder);
 468	amdgpu_connector = to_amdgpu_connector(connector);
 469
 470	switch (connector->connector_type) {
 471	case DRM_MODE_CONNECTOR_DVII:
 472	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
 473		if (amdgpu_audio != 0) {
 474			if (amdgpu_connector->use_digital &&
 475			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
 476				return ATOM_ENCODER_MODE_HDMI;
 477			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 478				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 479				return ATOM_ENCODER_MODE_HDMI;
 480			else if (amdgpu_connector->use_digital)
 481				return ATOM_ENCODER_MODE_DVI;
 482			else
 483				return ATOM_ENCODER_MODE_CRT;
 484		} else if (amdgpu_connector->use_digital) {
 485			return ATOM_ENCODER_MODE_DVI;
 486		} else {
 487			return ATOM_ENCODER_MODE_CRT;
 488		}
 489		break;
 490	case DRM_MODE_CONNECTOR_DVID:
 491	case DRM_MODE_CONNECTOR_HDMIA:
 492	default:
 493		if (amdgpu_audio != 0) {
 494			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 495				return ATOM_ENCODER_MODE_HDMI;
 496			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 497				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 498				return ATOM_ENCODER_MODE_HDMI;
 499			else
 500				return ATOM_ENCODER_MODE_DVI;
 501		} else {
 502			return ATOM_ENCODER_MODE_DVI;
 503		}
 504		break;
 505	case DRM_MODE_CONNECTOR_LVDS:
 506		return ATOM_ENCODER_MODE_LVDS;
 507		break;
 508	case DRM_MODE_CONNECTOR_DisplayPort:
 509		dig_connector = amdgpu_connector->con_priv;
 510		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
 511		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
 512			return ATOM_ENCODER_MODE_DP;
 513		} else if (amdgpu_audio != 0) {
 514			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 515				return ATOM_ENCODER_MODE_HDMI;
 516			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 517				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 518				return ATOM_ENCODER_MODE_HDMI;
 519			else
 520				return ATOM_ENCODER_MODE_DVI;
 521		} else {
 522			return ATOM_ENCODER_MODE_DVI;
 523		}
 524		break;
 525	case DRM_MODE_CONNECTOR_eDP:
 526		return ATOM_ENCODER_MODE_DP;
 527	case DRM_MODE_CONNECTOR_DVIA:
 528	case DRM_MODE_CONNECTOR_VGA:
 529		return ATOM_ENCODER_MODE_CRT;
 530		break;
 531	case DRM_MODE_CONNECTOR_Composite:
 532	case DRM_MODE_CONNECTOR_SVIDEO:
 533	case DRM_MODE_CONNECTOR_9PinDIN:
 534		/* fix me */
 535		return ATOM_ENCODER_MODE_TV;
 536		/*return ATOM_ENCODER_MODE_CV;*/
 537		break;
 538	}
 539}
 540
 541/*
 542 * DIG Encoder/Transmitter Setup
 543 *
 544 * DCE 6.0
 545 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
 546 * Supports up to 6 digital outputs
 547 * - 6 DIG encoder blocks.
 548 * - DIG to PHY mapping is hardcoded
 549 * DIG1 drives UNIPHY0 link A, A+B
 550 * DIG2 drives UNIPHY0 link B
 551 * DIG3 drives UNIPHY1 link A, A+B
 552 * DIG4 drives UNIPHY1 link B
 553 * DIG5 drives UNIPHY2 link A, A+B
 554 * DIG6 drives UNIPHY2 link B
 555 *
 556 * Routing
 557 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
 558 * Examples:
 559 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
 560 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
 561 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
 562 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
 563 */
 564
 565union dig_encoder_control {
 566	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
 567	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
 568	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
 569	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
 
 570};
 571
 572void
 573amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
 574				   int action, int panel_mode)
 575{
 576	struct drm_device *dev = encoder->dev;
 577	struct amdgpu_device *adev = dev->dev_private;
 578	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 579	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 580	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 581	union dig_encoder_control args;
 582	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
 583	uint8_t frev, crev;
 584	int dp_clock = 0;
 585	int dp_lane_count = 0;
 586	int hpd_id = AMDGPU_HPD_NONE;
 587
 588	if (connector) {
 589		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 590		struct amdgpu_connector_atom_dig *dig_connector =
 591			amdgpu_connector->con_priv;
 592
 593		dp_clock = dig_connector->dp_clock;
 594		dp_lane_count = dig_connector->dp_lane_count;
 595		hpd_id = amdgpu_connector->hpd.hpd;
 596	}
 597
 598	/* no dig encoder assigned */
 599	if (dig->dig_encoder == -1)
 600		return;
 601
 602	memset(&args, 0, sizeof(args));
 603
 604	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 605		return;
 606
 607	switch (frev) {
 608	case 1:
 609		switch (crev) {
 610		case 1:
 611			args.v1.ucAction = action;
 612			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 613			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 614				args.v3.ucPanelMode = panel_mode;
 615			else
 616				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 617
 618			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
 619				args.v1.ucLaneNum = dp_lane_count;
 620			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 621				args.v1.ucLaneNum = 8;
 622			else
 623				args.v1.ucLaneNum = 4;
 624
 625			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
 626				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
 627			switch (amdgpu_encoder->encoder_id) {
 628			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 629				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
 630				break;
 631			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 632			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 633				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
 634				break;
 635			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 636				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
 637				break;
 638			}
 639			if (dig->linkb)
 640				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
 641			else
 642				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
 643			break;
 644		case 2:
 645		case 3:
 646			args.v3.ucAction = action;
 647			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 648			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 649				args.v3.ucPanelMode = panel_mode;
 650			else
 651				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 652
 653			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
 654				args.v3.ucLaneNum = dp_lane_count;
 655			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 656				args.v3.ucLaneNum = 8;
 657			else
 658				args.v3.ucLaneNum = 4;
 659
 660			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
 661				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
 662			args.v3.acConfig.ucDigSel = dig->dig_encoder;
 663			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 664			break;
 665		case 4:
 666			args.v4.ucAction = action;
 667			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 668			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 669				args.v4.ucPanelMode = panel_mode;
 670			else
 671				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 672
 673			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
 674				args.v4.ucLaneNum = dp_lane_count;
 675			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 676				args.v4.ucLaneNum = 8;
 677			else
 678				args.v4.ucLaneNum = 4;
 679
 680			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
 681				if (dp_clock == 540000)
 682					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
 683				else if (dp_clock == 324000)
 684					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
 685				else if (dp_clock == 270000)
 686					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
 687				else
 688					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
 689			}
 690			args.v4.acConfig.ucDigSel = dig->dig_encoder;
 691			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 692			if (hpd_id == AMDGPU_HPD_NONE)
 693				args.v4.ucHPD_ID = 0;
 694			else
 695				args.v4.ucHPD_ID = hpd_id + 1;
 696			break;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 697		default:
 698			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 699			break;
 700		}
 701		break;
 702	default:
 703		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 704		break;
 705	}
 706
 707	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 708
 709}
 710
 711union dig_transmitter_control {
 712	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
 713	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
 714	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
 715	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
 716	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
 
 717};
 718
 719void
 720amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
 721				       uint8_t lane_num, uint8_t lane_set)
 722{
 723	struct drm_device *dev = encoder->dev;
 724	struct amdgpu_device *adev = dev->dev_private;
 725	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 726	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 727	struct drm_connector *connector;
 728	union dig_transmitter_control args;
 729	int index = 0;
 730	uint8_t frev, crev;
 731	bool is_dp = false;
 732	int pll_id = 0;
 733	int dp_clock = 0;
 734	int dp_lane_count = 0;
 735	int connector_object_id = 0;
 736	int igp_lane_info = 0;
 737	int dig_encoder = dig->dig_encoder;
 738	int hpd_id = AMDGPU_HPD_NONE;
 739
 740	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 741		connector = amdgpu_get_connector_for_encoder_init(encoder);
 742		/* just needed to avoid bailing in the encoder check.  the encoder
 743		 * isn't used for init
 744		 */
 745		dig_encoder = 0;
 746	} else
 747		connector = amdgpu_get_connector_for_encoder(encoder);
 748
 749	if (connector) {
 750		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 751		struct amdgpu_connector_atom_dig *dig_connector =
 752			amdgpu_connector->con_priv;
 753
 754		hpd_id = amdgpu_connector->hpd.hpd;
 755		dp_clock = dig_connector->dp_clock;
 756		dp_lane_count = dig_connector->dp_lane_count;
 757		connector_object_id =
 758			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
 759	}
 760
 761	if (encoder->crtc) {
 762		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 763		pll_id = amdgpu_crtc->pll_id;
 764	}
 765
 766	/* no dig encoder assigned */
 767	if (dig_encoder == -1)
 768		return;
 769
 770	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
 771		is_dp = true;
 772
 773	memset(&args, 0, sizeof(args));
 774
 775	switch (amdgpu_encoder->encoder_id) {
 776	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 777		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
 778		break;
 779	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 780	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 781	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 782	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 783		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
 784		break;
 785	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 786		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
 787		break;
 788	}
 789
 790	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 791		return;
 792
 793	switch (frev) {
 794	case 1:
 795		switch (crev) {
 796		case 1:
 797			args.v1.ucAction = action;
 798			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 799				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
 800			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 801				args.v1.asMode.ucLaneSel = lane_num;
 802				args.v1.asMode.ucLaneSet = lane_set;
 803			} else {
 804				if (is_dp)
 805					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
 806				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 807					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 808				else
 809					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 810			}
 811
 812			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
 813
 814			if (dig_encoder)
 815				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
 816			else
 817				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
 818
 819			if ((adev->flags & AMD_IS_APU) &&
 820			    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
 821				if (is_dp ||
 822				    !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
 823					if (igp_lane_info & 0x1)
 824						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
 825					else if (igp_lane_info & 0x2)
 826						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
 827					else if (igp_lane_info & 0x4)
 828						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
 829					else if (igp_lane_info & 0x8)
 830						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
 831				} else {
 832					if (igp_lane_info & 0x3)
 833						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
 834					else if (igp_lane_info & 0xc)
 835						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
 836				}
 837			}
 838
 839			if (dig->linkb)
 840				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
 841			else
 842				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
 843
 844			if (is_dp)
 845				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 846			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 847				if (dig->coherent_mode)
 848					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 849				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 850					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
 851			}
 852			break;
 853		case 2:
 854			args.v2.ucAction = action;
 855			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 856				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
 857			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 858				args.v2.asMode.ucLaneSel = lane_num;
 859				args.v2.asMode.ucLaneSet = lane_set;
 860			} else {
 861				if (is_dp)
 862					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
 863				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 864					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 865				else
 866					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 867			}
 868
 869			args.v2.acConfig.ucEncoderSel = dig_encoder;
 870			if (dig->linkb)
 871				args.v2.acConfig.ucLinkSel = 1;
 872
 873			switch (amdgpu_encoder->encoder_id) {
 874			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 875				args.v2.acConfig.ucTransmitterSel = 0;
 876				break;
 877			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 878				args.v2.acConfig.ucTransmitterSel = 1;
 879				break;
 880			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 881				args.v2.acConfig.ucTransmitterSel = 2;
 882				break;
 883			}
 884
 885			if (is_dp) {
 886				args.v2.acConfig.fCoherentMode = 1;
 887				args.v2.acConfig.fDPConnector = 1;
 888			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 889				if (dig->coherent_mode)
 890					args.v2.acConfig.fCoherentMode = 1;
 891				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 892					args.v2.acConfig.fDualLinkConnector = 1;
 893			}
 894			break;
 895		case 3:
 896			args.v3.ucAction = action;
 897			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 898				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
 899			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 900				args.v3.asMode.ucLaneSel = lane_num;
 901				args.v3.asMode.ucLaneSet = lane_set;
 902			} else {
 903				if (is_dp)
 904					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
 905				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 906					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 907				else
 908					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 909			}
 910
 911			if (is_dp)
 912				args.v3.ucLaneNum = dp_lane_count;
 913			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 914				args.v3.ucLaneNum = 8;
 915			else
 916				args.v3.ucLaneNum = 4;
 917
 918			if (dig->linkb)
 919				args.v3.acConfig.ucLinkSel = 1;
 920			if (dig_encoder & 1)
 921				args.v3.acConfig.ucEncoderSel = 1;
 922
 923			/* Select the PLL for the PHY
 924			 * DP PHY should be clocked from external src if there is
 925			 * one.
 926			 */
 927			/* On DCE4, if there is an external clock, it generates the DP ref clock */
 928			if (is_dp && adev->clock.dp_extclk)
 929				args.v3.acConfig.ucRefClkSource = 2; /* external src */
 930			else
 931				args.v3.acConfig.ucRefClkSource = pll_id;
 932
 933			switch (amdgpu_encoder->encoder_id) {
 934			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 935				args.v3.acConfig.ucTransmitterSel = 0;
 936				break;
 937			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 938				args.v3.acConfig.ucTransmitterSel = 1;
 939				break;
 940			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 941				args.v3.acConfig.ucTransmitterSel = 2;
 942				break;
 943			}
 944
 945			if (is_dp)
 946				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
 947			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 948				if (dig->coherent_mode)
 949					args.v3.acConfig.fCoherentMode = 1;
 950				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 951					args.v3.acConfig.fDualLinkConnector = 1;
 952			}
 953			break;
 954		case 4:
 955			args.v4.ucAction = action;
 956			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 957				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
 958			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 959				args.v4.asMode.ucLaneSel = lane_num;
 960				args.v4.asMode.ucLaneSet = lane_set;
 961			} else {
 962				if (is_dp)
 963					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
 964				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 965					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 966				else
 967					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 968			}
 969
 970			if (is_dp)
 971				args.v4.ucLaneNum = dp_lane_count;
 972			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 973				args.v4.ucLaneNum = 8;
 974			else
 975				args.v4.ucLaneNum = 4;
 976
 977			if (dig->linkb)
 978				args.v4.acConfig.ucLinkSel = 1;
 979			if (dig_encoder & 1)
 980				args.v4.acConfig.ucEncoderSel = 1;
 981
 982			/* Select the PLL for the PHY
 983			 * DP PHY should be clocked from external src if there is
 984			 * one.
 985			 */
 986			/* On DCE5 DCPLL usually generates the DP ref clock */
 987			if (is_dp) {
 988				if (adev->clock.dp_extclk)
 989					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
 990				else
 991					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
 992			} else
 993				args.v4.acConfig.ucRefClkSource = pll_id;
 994
 995			switch (amdgpu_encoder->encoder_id) {
 996			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 997				args.v4.acConfig.ucTransmitterSel = 0;
 998				break;
 999			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1000				args.v4.acConfig.ucTransmitterSel = 1;
1001				break;
1002			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1003				args.v4.acConfig.ucTransmitterSel = 2;
1004				break;
1005			}
1006
1007			if (is_dp)
1008				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1009			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1010				if (dig->coherent_mode)
1011					args.v4.acConfig.fCoherentMode = 1;
1012				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1013					args.v4.acConfig.fDualLinkConnector = 1;
1014			}
1015			break;
1016		case 5:
1017			args.v5.ucAction = action;
1018			if (is_dp)
1019				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1020			else
1021				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1022
1023			switch (amdgpu_encoder->encoder_id) {
1024			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1025				if (dig->linkb)
1026					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1027				else
1028					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1029				break;
1030			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1031				if (dig->linkb)
1032					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1033				else
1034					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1035				break;
1036			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1037				if (dig->linkb)
1038					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1039				else
1040					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1041				break;
1042			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1043				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1044				break;
1045			}
1046			if (is_dp)
1047				args.v5.ucLaneNum = dp_lane_count;
1048			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1049				args.v5.ucLaneNum = 8;
1050			else
1051				args.v5.ucLaneNum = 4;
1052			args.v5.ucConnObjId = connector_object_id;
1053			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1054
1055			if (is_dp && adev->clock.dp_extclk)
1056				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1057			else
1058				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1059
1060			if (is_dp)
1061				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1062			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1063				if (dig->coherent_mode)
1064					args.v5.asConfig.ucCoherentMode = 1;
1065			}
1066			if (hpd_id == AMDGPU_HPD_NONE)
1067				args.v5.asConfig.ucHPDSel = 0;
1068			else
1069				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1070			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1071			args.v5.ucDPLaneSet = lane_set;
1072			break;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1073		default:
1074			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1075			break;
1076		}
1077		break;
1078	default:
1079		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1080		break;
1081	}
1082
1083	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1084}
1085
1086bool
1087amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1088				     int action)
1089{
1090	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1091	struct drm_device *dev = amdgpu_connector->base.dev;
1092	struct amdgpu_device *adev = dev->dev_private;
1093	union dig_transmitter_control args;
1094	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1095	uint8_t frev, crev;
1096
1097	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1098		goto done;
1099
1100	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1101	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1102		goto done;
1103
1104	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1105		goto done;
1106
1107	memset(&args, 0, sizeof(args));
1108
1109	args.v1.ucAction = action;
1110
1111	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1112
1113	/* wait for the panel to power up */
1114	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1115		int i;
1116
1117		for (i = 0; i < 300; i++) {
1118			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1119				return true;
1120			mdelay(1);
1121		}
1122		return false;
1123	}
1124done:
1125	return true;
1126}
1127
1128union external_encoder_control {
1129	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1130	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1131};
1132
1133static void
1134amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1135					struct drm_encoder *ext_encoder,
1136					int action)
1137{
1138	struct drm_device *dev = encoder->dev;
1139	struct amdgpu_device *adev = dev->dev_private;
1140	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1141	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1142	union external_encoder_control args;
1143	struct drm_connector *connector;
1144	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1145	u8 frev, crev;
1146	int dp_clock = 0;
1147	int dp_lane_count = 0;
1148	int connector_object_id = 0;
1149	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1150
1151	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1152		connector = amdgpu_get_connector_for_encoder_init(encoder);
1153	else
1154		connector = amdgpu_get_connector_for_encoder(encoder);
1155
1156	if (connector) {
1157		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1158		struct amdgpu_connector_atom_dig *dig_connector =
1159			amdgpu_connector->con_priv;
1160
1161		dp_clock = dig_connector->dp_clock;
1162		dp_lane_count = dig_connector->dp_lane_count;
1163		connector_object_id =
1164			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1165	}
1166
1167	memset(&args, 0, sizeof(args));
1168
1169	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1170		return;
1171
1172	switch (frev) {
1173	case 1:
1174		/* no params on frev 1 */
1175		break;
1176	case 2:
1177		switch (crev) {
1178		case 1:
1179		case 2:
1180			args.v1.sDigEncoder.ucAction = action;
1181			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1182			args.v1.sDigEncoder.ucEncoderMode =
1183				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1184
1185			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1186				if (dp_clock == 270000)
1187					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1188				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1189			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1190				args.v1.sDigEncoder.ucLaneNum = 8;
1191			else
1192				args.v1.sDigEncoder.ucLaneNum = 4;
1193			break;
1194		case 3:
1195			args.v3.sExtEncoder.ucAction = action;
1196			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1197				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1198			else
1199				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1200			args.v3.sExtEncoder.ucEncoderMode =
1201				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1202
1203			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1204				if (dp_clock == 270000)
1205					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1206				else if (dp_clock == 540000)
1207					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1208				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1209			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1210				args.v3.sExtEncoder.ucLaneNum = 8;
1211			else
1212				args.v3.sExtEncoder.ucLaneNum = 4;
1213			switch (ext_enum) {
1214			case GRAPH_OBJECT_ENUM_ID1:
1215				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1216				break;
1217			case GRAPH_OBJECT_ENUM_ID2:
1218				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1219				break;
1220			case GRAPH_OBJECT_ENUM_ID3:
1221				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1222				break;
1223			}
1224			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1225			break;
1226		default:
1227			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1228			return;
1229		}
1230		break;
1231	default:
1232		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1233		return;
1234	}
1235	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1236}
1237
1238static void
1239amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1240{
1241	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1242	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1243	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1244	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1245	struct amdgpu_connector *amdgpu_connector = NULL;
1246	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1247
1248	if (connector) {
1249		amdgpu_connector = to_amdgpu_connector(connector);
1250		amdgpu_dig_connector = amdgpu_connector->con_priv;
1251	}
1252
1253	if (action == ATOM_ENABLE) {
1254		if (!connector)
1255			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1256		else
1257			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1258
1259		/* setup and enable the encoder */
1260		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1261		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1262						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1263						   dig->panel_mode);
1264		if (ext_encoder)
1265			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1266								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1267		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1268		    connector) {
1269			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1270				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1271								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1272				amdgpu_dig_connector->edp_on = true;
1273			}
1274		}
1275		/* enable the transmitter */
1276		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1277						       ATOM_TRANSMITTER_ACTION_ENABLE,
1278						       0, 0);
1279		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1280		    connector) {
1281			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1282			amdgpu_atombios_dp_link_train(encoder, connector);
1283			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1284		}
1285		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1286			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1287		if (ext_encoder)
1288			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1289	} else {
1290		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1291		    connector)
1292			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1293							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1294		if (ext_encoder)
1295			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1296		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1297			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1298							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1299
1300		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1301		    connector)
1302			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1303		/* disable the transmitter */
1304		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1305						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1306		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1307		    connector) {
1308			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1309				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1310								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1311				amdgpu_dig_connector->edp_on = false;
1312			}
1313		}
1314	}
1315}
1316
1317void
1318amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1319{
1320	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1321
1322	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1323		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1324		  amdgpu_encoder->active_device);
1325	switch (amdgpu_encoder->encoder_id) {
1326	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1327	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1328	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1329	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1330		switch (mode) {
1331		case DRM_MODE_DPMS_ON:
1332			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1333			break;
1334		case DRM_MODE_DPMS_STANDBY:
1335		case DRM_MODE_DPMS_SUSPEND:
1336		case DRM_MODE_DPMS_OFF:
1337			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1338			break;
1339		}
1340		break;
1341	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1342		switch (mode) {
1343		case DRM_MODE_DPMS_ON:
1344			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1345			break;
1346		case DRM_MODE_DPMS_STANDBY:
1347		case DRM_MODE_DPMS_SUSPEND:
1348		case DRM_MODE_DPMS_OFF:
1349			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1350			break;
1351		}
1352		break;
1353	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1354		switch (mode) {
1355		case DRM_MODE_DPMS_ON:
1356			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1357			break;
1358		case DRM_MODE_DPMS_STANDBY:
1359		case DRM_MODE_DPMS_SUSPEND:
1360		case DRM_MODE_DPMS_OFF:
1361			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1362			break;
1363		}
1364		break;
1365	default:
1366		return;
1367	}
1368}
1369
1370union crtc_source_param {
1371	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1372	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1373	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1374};
1375
1376void
1377amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1378{
1379	struct drm_device *dev = encoder->dev;
1380	struct amdgpu_device *adev = dev->dev_private;
1381	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1382	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1383	union crtc_source_param args;
1384	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1385	uint8_t frev, crev;
1386	struct amdgpu_encoder_atom_dig *dig;
1387
1388	memset(&args, 0, sizeof(args));
1389
1390	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1391		return;
1392
1393	switch (frev) {
1394	case 1:
1395		switch (crev) {
1396		case 1:
1397		default:
1398			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1399			switch (amdgpu_encoder->encoder_id) {
1400			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1401			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1402				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1403				break;
1404			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1405			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1406				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1407					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1408				else
1409					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1410				break;
1411			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1412			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1413			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1414				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1415				break;
1416			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1417			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1418				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1419					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1420				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1421					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1422				else
1423					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1424				break;
1425			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1426			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1427				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1428					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1429				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1430					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1431				else
1432					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1433				break;
1434			}
1435			break;
1436		case 2:
1437			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1438			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1439				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1440
1441				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1442					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1443				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1444					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1445				else
1446					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1447			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1448				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1449			} else {
1450				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1451			}
1452			switch (amdgpu_encoder->encoder_id) {
1453			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1454			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1455			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1456			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1457			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1458				dig = amdgpu_encoder->enc_priv;
1459				switch (dig->dig_encoder) {
1460				case 0:
1461					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1462					break;
1463				case 1:
1464					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1465					break;
1466				case 2:
1467					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1468					break;
1469				case 3:
1470					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1471					break;
1472				case 4:
1473					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1474					break;
1475				case 5:
1476					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1477					break;
1478				case 6:
1479					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1480					break;
1481				}
1482				break;
1483			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1484				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1485				break;
1486			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1487				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1488					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1489				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1490					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1491				else
1492					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1493				break;
1494			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1495				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1496					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1497				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1498					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1499				else
1500					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1501				break;
1502			}
1503			break;
1504		case 3:
1505			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1506			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1507				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1508
1509				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1510					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1511				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1512					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1513				else
1514					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1515			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1516				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1517			} else {
1518				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1519			}
1520			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1521			switch (amdgpu_encoder->encoder_id) {
1522			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1523			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1524			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1525			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1526			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1527				dig = amdgpu_encoder->enc_priv;
1528				switch (dig->dig_encoder) {
1529				case 0:
1530					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1531					break;
1532				case 1:
1533					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1534					break;
1535				case 2:
1536					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1537					break;
1538				case 3:
1539					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1540					break;
1541				case 4:
1542					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1543					break;
1544				case 5:
1545					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1546					break;
1547				case 6:
1548					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1549					break;
1550				}
1551				break;
1552			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1553				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1554				break;
1555			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1556				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1557					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1558				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1559					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1560				else
1561					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1562				break;
1563			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1564				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1565					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1566				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1567					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1568				else
1569					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1570				break;
1571			}
1572			break;
1573		}
1574		break;
1575	default:
1576		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1577		return;
1578	}
1579
1580	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1581}
1582
1583/* This only needs to be called once at startup */
1584void
1585amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1586{
1587	struct drm_device *dev = adev->ddev;
1588	struct drm_encoder *encoder;
1589
1590	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1591		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1592		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1593
1594		switch (amdgpu_encoder->encoder_id) {
1595		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1596		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1597		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1598		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1599			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1600							       0, 0);
1601			break;
1602		}
1603
1604		if (ext_encoder)
1605			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1606								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1607	}
1608}
1609
1610static bool
1611amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1612				 struct drm_connector *connector)
1613{
1614	struct drm_device *dev = encoder->dev;
1615	struct amdgpu_device *adev = dev->dev_private;
1616	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1617	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1618
1619	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1620				       ATOM_DEVICE_CV_SUPPORT |
1621				       ATOM_DEVICE_CRT_SUPPORT)) {
1622		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1623		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1624		uint8_t frev, crev;
1625
1626		memset(&args, 0, sizeof(args));
1627
1628		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1629			return false;
1630
1631		args.sDacload.ucMisc = 0;
1632
1633		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1634		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1635			args.sDacload.ucDacType = ATOM_DAC_A;
1636		else
1637			args.sDacload.ucDacType = ATOM_DAC_B;
1638
1639		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1640			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1641		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1642			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1643		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1644			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1645			if (crev >= 3)
1646				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1647		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1648			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1649			if (crev >= 3)
1650				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1651		}
1652
1653		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1654
1655		return true;
1656	} else
1657		return false;
1658}
1659
1660enum drm_connector_status
1661amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1662			    struct drm_connector *connector)
1663{
1664	struct drm_device *dev = encoder->dev;
1665	struct amdgpu_device *adev = dev->dev_private;
1666	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1667	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1668	uint32_t bios_0_scratch;
1669
1670	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1671		DRM_DEBUG_KMS("detect returned false \n");
1672		return connector_status_unknown;
1673	}
1674
1675	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1676
1677	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1678	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1679		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1680			return connector_status_connected;
1681	}
1682	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1683		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1684			return connector_status_connected;
1685	}
1686	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1687		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1688			return connector_status_connected;
1689	}
1690	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1691		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1692			return connector_status_connected; /* CTV */
1693		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1694			return connector_status_connected; /* STV */
1695	}
1696	return connector_status_disconnected;
1697}
1698
1699enum drm_connector_status
1700amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1701			    struct drm_connector *connector)
1702{
1703	struct drm_device *dev = encoder->dev;
1704	struct amdgpu_device *adev = dev->dev_private;
1705	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1706	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1707	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1708	u32 bios_0_scratch;
1709
1710	if (!ext_encoder)
1711		return connector_status_unknown;
1712
1713	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1714		return connector_status_unknown;
1715
1716	/* load detect on the dp bridge */
1717	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1718						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1719
1720	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1721
1722	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1723	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1724		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1725			return connector_status_connected;
1726	}
1727	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1728		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1729			return connector_status_connected;
1730	}
1731	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1732		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1733			return connector_status_connected;
1734	}
1735	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1736		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1737			return connector_status_connected; /* CTV */
1738		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1739			return connector_status_connected; /* STV */
1740	}
1741	return connector_status_disconnected;
1742}
1743
1744void
1745amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1746{
1747	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1748
1749	if (ext_encoder)
1750		/* ddc_setup on the dp bridge */
1751		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1752							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1753
1754}
1755
1756void
1757amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1758				       struct drm_encoder *encoder,
1759				       bool connected)
1760{
1761	struct drm_device *dev = connector->dev;
1762	struct amdgpu_device *adev = dev->dev_private;
1763	struct amdgpu_connector *amdgpu_connector =
1764	    to_amdgpu_connector(connector);
1765	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1766	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1767
1768	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1769	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1770	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1771
1772	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1773	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1774		if (connected) {
1775			DRM_DEBUG_KMS("LCD1 connected\n");
1776			bios_0_scratch |= ATOM_S0_LCD1;
1777			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1778			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1779		} else {
1780			DRM_DEBUG_KMS("LCD1 disconnected\n");
1781			bios_0_scratch &= ~ATOM_S0_LCD1;
1782			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1783			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1784		}
1785	}
1786	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1787	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1788		if (connected) {
1789			DRM_DEBUG_KMS("CRT1 connected\n");
1790			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1791			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1792			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1793		} else {
1794			DRM_DEBUG_KMS("CRT1 disconnected\n");
1795			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1796			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1797			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1798		}
1799	}
1800	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1801	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1802		if (connected) {
1803			DRM_DEBUG_KMS("CRT2 connected\n");
1804			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1805			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1806			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1807		} else {
1808			DRM_DEBUG_KMS("CRT2 disconnected\n");
1809			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1810			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1811			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1812		}
1813	}
1814	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1815	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1816		if (connected) {
1817			DRM_DEBUG_KMS("DFP1 connected\n");
1818			bios_0_scratch |= ATOM_S0_DFP1;
1819			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1820			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1821		} else {
1822			DRM_DEBUG_KMS("DFP1 disconnected\n");
1823			bios_0_scratch &= ~ATOM_S0_DFP1;
1824			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1825			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1826		}
1827	}
1828	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1829	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1830		if (connected) {
1831			DRM_DEBUG_KMS("DFP2 connected\n");
1832			bios_0_scratch |= ATOM_S0_DFP2;
1833			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1834			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1835		} else {
1836			DRM_DEBUG_KMS("DFP2 disconnected\n");
1837			bios_0_scratch &= ~ATOM_S0_DFP2;
1838			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1839			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1840		}
1841	}
1842	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1843	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1844		if (connected) {
1845			DRM_DEBUG_KMS("DFP3 connected\n");
1846			bios_0_scratch |= ATOM_S0_DFP3;
1847			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1848			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1849		} else {
1850			DRM_DEBUG_KMS("DFP3 disconnected\n");
1851			bios_0_scratch &= ~ATOM_S0_DFP3;
1852			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1853			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1854		}
1855	}
1856	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1857	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1858		if (connected) {
1859			DRM_DEBUG_KMS("DFP4 connected\n");
1860			bios_0_scratch |= ATOM_S0_DFP4;
1861			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1862			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1863		} else {
1864			DRM_DEBUG_KMS("DFP4 disconnected\n");
1865			bios_0_scratch &= ~ATOM_S0_DFP4;
1866			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1867			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1868		}
1869	}
1870	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1871	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1872		if (connected) {
1873			DRM_DEBUG_KMS("DFP5 connected\n");
1874			bios_0_scratch |= ATOM_S0_DFP5;
1875			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1876			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1877		} else {
1878			DRM_DEBUG_KMS("DFP5 disconnected\n");
1879			bios_0_scratch &= ~ATOM_S0_DFP5;
1880			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1881			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1882		}
1883	}
1884	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1885	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1886		if (connected) {
1887			DRM_DEBUG_KMS("DFP6 connected\n");
1888			bios_0_scratch |= ATOM_S0_DFP6;
1889			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1890			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1891		} else {
1892			DRM_DEBUG_KMS("DFP6 disconnected\n");
1893			bios_0_scratch &= ~ATOM_S0_DFP6;
1894			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1895			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1896		}
1897	}
1898
1899	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1900	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1901	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1902}
1903
1904union lvds_info {
1905	struct _ATOM_LVDS_INFO info;
1906	struct _ATOM_LVDS_INFO_V12 info_12;
1907};
1908
1909struct amdgpu_encoder_atom_dig *
1910amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1911{
1912	struct drm_device *dev = encoder->base.dev;
1913	struct amdgpu_device *adev = dev->dev_private;
1914	struct amdgpu_mode_info *mode_info = &adev->mode_info;
1915	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1916	uint16_t data_offset, misc;
1917	union lvds_info *lvds_info;
1918	uint8_t frev, crev;
1919	struct amdgpu_encoder_atom_dig *lvds = NULL;
1920	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1921
1922	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
1923				   &frev, &crev, &data_offset)) {
1924		lvds_info =
1925			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
1926		lvds =
1927		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
1928
1929		if (!lvds)
1930			return NULL;
1931
1932		lvds->native_mode.clock =
1933		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
1934		lvds->native_mode.hdisplay =
1935		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
1936		lvds->native_mode.vdisplay =
1937		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
1938		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
1939			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
1940		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
1941			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
1942		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
1943			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
1944		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
1945			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
1946		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
1947			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
1948		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
1949			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
1950		lvds->panel_pwr_delay =
1951		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
1952		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
1953
1954		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
1955		if (misc & ATOM_VSYNC_POLARITY)
1956			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
1957		if (misc & ATOM_HSYNC_POLARITY)
1958			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
1959		if (misc & ATOM_COMPOSITESYNC)
1960			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
1961		if (misc & ATOM_INTERLACE)
1962			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
1963		if (misc & ATOM_DOUBLE_CLOCK_MODE)
1964			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
1965
1966		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
1967		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
1968
1969		/* set crtc values */
1970		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
1971
1972		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
1973
1974		encoder->native_mode = lvds->native_mode;
1975
1976		if (encoder_enum == 2)
1977			lvds->linkb = true;
1978		else
1979			lvds->linkb = false;
1980
1981		/* parse the lcd record table */
1982		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
1983			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
1984			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
1985			bool bad_record = false;
1986			u8 *record;
1987
1988			if ((frev == 1) && (crev < 2))
1989				/* absolute */
1990				record = (u8 *)(mode_info->atom_context->bios +
1991						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
1992			else
1993				/* relative */
1994				record = (u8 *)(mode_info->atom_context->bios +
1995						data_offset +
1996						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
1997			while (*record != ATOM_RECORD_END_TYPE) {
1998				switch (*record) {
1999				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2000					record += sizeof(ATOM_PATCH_RECORD_MODE);
2001					break;
2002				case LCD_RTS_RECORD_TYPE:
2003					record += sizeof(ATOM_LCD_RTS_RECORD);
2004					break;
2005				case LCD_CAP_RECORD_TYPE:
2006					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2007					break;
2008				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2009					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2010					if (fake_edid_record->ucFakeEDIDLength) {
2011						struct edid *edid;
2012						int edid_size =
2013							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2014						edid = kmalloc(edid_size, GFP_KERNEL);
2015						if (edid) {
2016							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2017							       fake_edid_record->ucFakeEDIDLength);
2018
2019							if (drm_edid_is_valid(edid)) {
2020								adev->mode_info.bios_hardcoded_edid = edid;
2021								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2022							} else
2023								kfree(edid);
2024						}
2025					}
2026					record += fake_edid_record->ucFakeEDIDLength ?
2027						fake_edid_record->ucFakeEDIDLength + 2 :
2028						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2029					break;
2030				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2031					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2032					lvds->native_mode.width_mm = panel_res_record->usHSize;
2033					lvds->native_mode.height_mm = panel_res_record->usVSize;
2034					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2035					break;
2036				default:
2037					DRM_ERROR("Bad LCD record %d\n", *record);
2038					bad_record = true;
2039					break;
2040				}
2041				if (bad_record)
2042					break;
2043			}
2044		}
2045	}
2046	return lvds;
2047}
2048
2049struct amdgpu_encoder_atom_dig *
2050amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2051{
2052	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2053	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2054
2055	if (!dig)
2056		return NULL;
2057
2058	/* coherent mode by default */
2059	dig->coherent_mode = true;
2060	dig->dig_encoder = -1;
2061
2062	if (encoder_enum == 2)
2063		dig->linkb = true;
2064	else
2065		dig->linkb = false;
2066
2067	return dig;
2068}
2069
v5.14.15
   1/*
   2 * Copyright 2007-11 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26
  27#include <linux/pci.h>
  28
  29#include <drm/drm_crtc_helper.h>
  30#include <drm/amdgpu_drm.h>
  31#include "amdgpu.h"
  32#include "amdgpu_connectors.h"
  33#include "amdgpu_display.h"
  34#include "atom.h"
  35#include "atombios_encoders.h"
  36#include "atombios_dp.h"
  37#include <linux/backlight.h>
  38#include "bif/bif_4_1_d.h"
  39
  40u8
  41amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
  42{
  43	u8 backlight_level;
  44	u32 bios_2_scratch;
  45
  46	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  47
  48	backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
  49			   ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
  50
  51	return backlight_level;
  52}
  53
  54void
  55amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
  56					    u8 backlight_level)
  57{
  58	u32 bios_2_scratch;
  59
  60	bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
  61
  62	bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
  63	bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
  64			   ATOM_S2_CURRENT_BL_LEVEL_MASK);
  65
  66	WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
  67}
  68
  69u8
  70amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
  71{
  72	struct drm_device *dev = amdgpu_encoder->base.dev;
  73	struct amdgpu_device *adev = drm_to_adev(dev);
  74
  75	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  76		return 0;
  77
  78	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
  79}
  80
  81void
  82amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
  83				     u8 level)
  84{
  85	struct drm_encoder *encoder = &amdgpu_encoder->base;
  86	struct drm_device *dev = amdgpu_encoder->base.dev;
  87	struct amdgpu_device *adev = drm_to_adev(dev);
  88	struct amdgpu_encoder_atom_dig *dig;
  89
  90	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
  91		return;
  92
  93	if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
  94	    amdgpu_encoder->enc_priv) {
  95		dig = amdgpu_encoder->enc_priv;
  96		dig->backlight_level = level;
  97		amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
  98
  99		switch (amdgpu_encoder->encoder_id) {
 100		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 101		case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 102		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 103		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 104		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 105			if (dig->backlight_level == 0)
 106				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 107								       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
 108			else {
 109				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 110								       ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
 111				amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
 112								       ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
 113			}
 114			break;
 115		default:
 116			break;
 117		}
 118	}
 119}
 120
 121#if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
 122
 123static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
 124{
 125	u8 level;
 126
 127	/* Convert brightness to hardware level */
 128	if (bd->props.brightness < 0)
 129		level = 0;
 130	else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
 131		level = AMDGPU_MAX_BL_LEVEL;
 132	else
 133		level = bd->props.brightness;
 134
 135	return level;
 136}
 137
 138static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
 139{
 140	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 141	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 142
 143	amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
 144					     amdgpu_atombios_encoder_backlight_level(bd));
 145
 146	return 0;
 147}
 148
 149static int
 150amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
 151{
 152	struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
 153	struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
 154	struct drm_device *dev = amdgpu_encoder->base.dev;
 155	struct amdgpu_device *adev = drm_to_adev(dev);
 156
 157	return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
 158}
 159
 160static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
 161	.get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
 162	.update_status	= amdgpu_atombios_encoder_update_backlight_status,
 163};
 164
 165void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
 166				     struct drm_connector *drm_connector)
 167{
 168	struct drm_device *dev = amdgpu_encoder->base.dev;
 169	struct amdgpu_device *adev = drm_to_adev(dev);
 170	struct backlight_device *bd;
 171	struct backlight_properties props;
 172	struct amdgpu_backlight_privdata *pdata;
 173	struct amdgpu_encoder_atom_dig *dig;
 
 174	char bl_name[16];
 175
 176	/* Mac laptops with multiple GPUs use the gmux driver for backlight
 177	 * so don't register a backlight device
 178	 */
 179	if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
 180	    (adev->pdev->device == 0x6741))
 181		return;
 182
 183	if (!amdgpu_encoder->enc_priv)
 184		return;
 185
 
 
 
 186	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 187		return;
 188
 189	pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
 190	if (!pdata) {
 191		DRM_ERROR("Memory allocation failed\n");
 192		goto error;
 193	}
 194
 195	memset(&props, 0, sizeof(props));
 196	props.max_brightness = AMDGPU_MAX_BL_LEVEL;
 197	props.type = BACKLIGHT_RAW;
 198	snprintf(bl_name, sizeof(bl_name),
 199		 "amdgpu_bl%d", dev->primary->index);
 200	bd = backlight_device_register(bl_name, drm_connector->kdev,
 201				       pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
 202	if (IS_ERR(bd)) {
 203		DRM_ERROR("Backlight registration failed\n");
 204		goto error;
 205	}
 206
 207	pdata->encoder = amdgpu_encoder;
 208
 
 
 209	dig = amdgpu_encoder->enc_priv;
 210	dig->bl_dev = bd;
 211
 212	bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
 213	bd->props.power = FB_BLANK_UNBLANK;
 214	backlight_update_status(bd);
 215
 216	DRM_INFO("amdgpu atom DIG backlight initialized\n");
 217
 218	return;
 219
 220error:
 221	kfree(pdata);
 222	return;
 223}
 224
 225void
 226amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
 227{
 228	struct drm_device *dev = amdgpu_encoder->base.dev;
 229	struct amdgpu_device *adev = drm_to_adev(dev);
 230	struct backlight_device *bd = NULL;
 231	struct amdgpu_encoder_atom_dig *dig;
 232
 233	if (!amdgpu_encoder->enc_priv)
 234		return;
 235
 
 
 
 236	if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
 237		return;
 238
 239	dig = amdgpu_encoder->enc_priv;
 240	bd = dig->bl_dev;
 241	dig->bl_dev = NULL;
 242
 243	if (bd) {
 244		struct amdgpu_legacy_backlight_privdata *pdata;
 245
 246		pdata = bl_get_data(bd);
 247		backlight_device_unregister(bd);
 248		kfree(pdata);
 249
 250		DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
 251	}
 252}
 253
 254#else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
 255
 256void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
 257{
 258}
 259
 260void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
 261{
 262}
 263
 264#endif
 265
 266bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
 267{
 268	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 269	switch (amdgpu_encoder->encoder_id) {
 270	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 271	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 272	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 273	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 274	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 275		return true;
 276	default:
 277		return false;
 278	}
 279}
 280
 281bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
 282				 const struct drm_display_mode *mode,
 283				 struct drm_display_mode *adjusted_mode)
 284{
 285	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 286
 287	/* set the active encoder to connector routing */
 288	amdgpu_encoder_set_active_device(encoder);
 289	drm_mode_set_crtcinfo(adjusted_mode, 0);
 290
 291	/* hw bug */
 292	if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
 293	    && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
 294		adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
 295
 296	/* vertical FP must be at least 1 */
 297	if (mode->crtc_vsync_start == mode->crtc_vdisplay)
 298		adjusted_mode->crtc_vsync_start++;
 299
 300	/* get the native mode for scaling */
 301	if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
 302		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 303	else if (amdgpu_encoder->rmx_type != RMX_OFF)
 304		amdgpu_panel_mode_fixup(encoder, adjusted_mode);
 305
 306	if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
 307	    (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
 308		struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 309		amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
 310	}
 311
 312	return true;
 313}
 314
 315static void
 316amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
 317{
 318	struct drm_device *dev = encoder->dev;
 319	struct amdgpu_device *adev = drm_to_adev(dev);
 320	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 321	DAC_ENCODER_CONTROL_PS_ALLOCATION args;
 322	int index = 0;
 323
 324	memset(&args, 0, sizeof(args));
 325
 326	switch (amdgpu_encoder->encoder_id) {
 327	case ENCODER_OBJECT_ID_INTERNAL_DAC1:
 328	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
 329		index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
 330		break;
 331	case ENCODER_OBJECT_ID_INTERNAL_DAC2:
 332	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
 333		index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
 334		break;
 335	}
 336
 337	args.ucAction = action;
 338	args.ucDacStandard = ATOM_DAC1_PS2;
 339	args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 340
 341	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 342
 343}
 344
 345static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
 346{
 347	int bpc = 8;
 348
 349	if (encoder->crtc) {
 350		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 351		bpc = amdgpu_crtc->bpc;
 352	}
 353
 354	switch (bpc) {
 355	case 0:
 356		return PANEL_BPC_UNDEFINE;
 357	case 6:
 358		return PANEL_6BIT_PER_COLOR;
 359	case 8:
 360	default:
 361		return PANEL_8BIT_PER_COLOR;
 362	case 10:
 363		return PANEL_10BIT_PER_COLOR;
 364	case 12:
 365		return PANEL_12BIT_PER_COLOR;
 366	case 16:
 367		return PANEL_16BIT_PER_COLOR;
 368	}
 369}
 370
 371union dvo_encoder_control {
 372	ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
 373	DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
 374	DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
 375	DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
 376};
 377
 378static void
 379amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
 380{
 381	struct drm_device *dev = encoder->dev;
 382	struct amdgpu_device *adev = drm_to_adev(dev);
 383	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 384	union dvo_encoder_control args;
 385	int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
 386	uint8_t frev, crev;
 387
 388	memset(&args, 0, sizeof(args));
 389
 390	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 391		return;
 392
 393	switch (frev) {
 394	case 1:
 395		switch (crev) {
 396		case 1:
 397			/* R4xx, R5xx */
 398			args.ext_tmds.sXTmdsEncoder.ucEnable = action;
 399
 400			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 401				args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
 402
 403			args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
 404			break;
 405		case 2:
 406			/* RS600/690/740 */
 407			args.dvo.sDVOEncoder.ucAction = action;
 408			args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 409			/* DFP1, CRT1, TV1 depending on the type of port */
 410			args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
 411
 412			if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 413				args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
 414			break;
 415		case 3:
 416			/* R6xx */
 417			args.dvo_v3.ucAction = action;
 418			args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 419			args.dvo_v3.ucDVOConfig = 0; /* XXX */
 420			break;
 421		case 4:
 422			/* DCE8 */
 423			args.dvo_v4.ucAction = action;
 424			args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 425			args.dvo_v4.ucDVOConfig = 0; /* XXX */
 426			args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 427			break;
 428		default:
 429			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 430			break;
 431		}
 432		break;
 433	default:
 434		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 435		break;
 436	}
 437
 438	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 439}
 440
 441int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
 442{
 443	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 444	struct drm_connector *connector;
 445	struct amdgpu_connector *amdgpu_connector;
 446	struct amdgpu_connector_atom_dig *dig_connector;
 447
 448	/* dp bridges are always DP */
 449	if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
 450		return ATOM_ENCODER_MODE_DP;
 451
 452	/* DVO is always DVO */
 453	if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
 454	    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
 455		return ATOM_ENCODER_MODE_DVO;
 456
 457	connector = amdgpu_get_connector_for_encoder(encoder);
 458	/* if we don't have an active device yet, just use one of
 459	 * the connectors tied to the encoder.
 460	 */
 461	if (!connector)
 462		connector = amdgpu_get_connector_for_encoder_init(encoder);
 463	amdgpu_connector = to_amdgpu_connector(connector);
 464
 465	switch (connector->connector_type) {
 466	case DRM_MODE_CONNECTOR_DVII:
 467	case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
 468		if (amdgpu_audio != 0) {
 469			if (amdgpu_connector->use_digital &&
 470			    (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
 471				return ATOM_ENCODER_MODE_HDMI;
 472			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 473				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 474				return ATOM_ENCODER_MODE_HDMI;
 475			else if (amdgpu_connector->use_digital)
 476				return ATOM_ENCODER_MODE_DVI;
 477			else
 478				return ATOM_ENCODER_MODE_CRT;
 479		} else if (amdgpu_connector->use_digital) {
 480			return ATOM_ENCODER_MODE_DVI;
 481		} else {
 482			return ATOM_ENCODER_MODE_CRT;
 483		}
 484		break;
 485	case DRM_MODE_CONNECTOR_DVID:
 486	case DRM_MODE_CONNECTOR_HDMIA:
 487	default:
 488		if (amdgpu_audio != 0) {
 489			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 490				return ATOM_ENCODER_MODE_HDMI;
 491			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 492				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 493				return ATOM_ENCODER_MODE_HDMI;
 494			else
 495				return ATOM_ENCODER_MODE_DVI;
 496		} else {
 497			return ATOM_ENCODER_MODE_DVI;
 498		}
 
 499	case DRM_MODE_CONNECTOR_LVDS:
 500		return ATOM_ENCODER_MODE_LVDS;
 
 501	case DRM_MODE_CONNECTOR_DisplayPort:
 502		dig_connector = amdgpu_connector->con_priv;
 503		if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
 504		    (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
 505			return ATOM_ENCODER_MODE_DP;
 506		} else if (amdgpu_audio != 0) {
 507			if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
 508				return ATOM_ENCODER_MODE_HDMI;
 509			else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
 510				 (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
 511				return ATOM_ENCODER_MODE_HDMI;
 512			else
 513				return ATOM_ENCODER_MODE_DVI;
 514		} else {
 515			return ATOM_ENCODER_MODE_DVI;
 516		}
 
 517	case DRM_MODE_CONNECTOR_eDP:
 518		return ATOM_ENCODER_MODE_DP;
 519	case DRM_MODE_CONNECTOR_DVIA:
 520	case DRM_MODE_CONNECTOR_VGA:
 521		return ATOM_ENCODER_MODE_CRT;
 
 522	case DRM_MODE_CONNECTOR_Composite:
 523	case DRM_MODE_CONNECTOR_SVIDEO:
 524	case DRM_MODE_CONNECTOR_9PinDIN:
 525		/* fix me */
 526		return ATOM_ENCODER_MODE_TV;
 
 
 527	}
 528}
 529
 530/*
 531 * DIG Encoder/Transmitter Setup
 532 *
 533 * DCE 6.0
 534 * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
 535 * Supports up to 6 digital outputs
 536 * - 6 DIG encoder blocks.
 537 * - DIG to PHY mapping is hardcoded
 538 * DIG1 drives UNIPHY0 link A, A+B
 539 * DIG2 drives UNIPHY0 link B
 540 * DIG3 drives UNIPHY1 link A, A+B
 541 * DIG4 drives UNIPHY1 link B
 542 * DIG5 drives UNIPHY2 link A, A+B
 543 * DIG6 drives UNIPHY2 link B
 544 *
 545 * Routing
 546 * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
 547 * Examples:
 548 * crtc0 -> dig2 -> LVTMA   links A+B -> TMDS/HDMI
 549 * crtc1 -> dig1 -> UNIPHY0 link  B   -> DP
 550 * crtc0 -> dig1 -> UNIPHY2 link  A   -> LVDS
 551 * crtc1 -> dig2 -> UNIPHY1 link  B+A -> TMDS/HDMI
 552 */
 553
 554union dig_encoder_control {
 555	DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
 556	DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
 557	DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
 558	DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
 559	DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
 560};
 561
 562void
 563amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
 564				   int action, int panel_mode)
 565{
 566	struct drm_device *dev = encoder->dev;
 567	struct amdgpu_device *adev = drm_to_adev(dev);
 568	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 569	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 570	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
 571	union dig_encoder_control args;
 572	int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
 573	uint8_t frev, crev;
 574	int dp_clock = 0;
 575	int dp_lane_count = 0;
 576	int hpd_id = AMDGPU_HPD_NONE;
 577
 578	if (connector) {
 579		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 580		struct amdgpu_connector_atom_dig *dig_connector =
 581			amdgpu_connector->con_priv;
 582
 583		dp_clock = dig_connector->dp_clock;
 584		dp_lane_count = dig_connector->dp_lane_count;
 585		hpd_id = amdgpu_connector->hpd.hpd;
 586	}
 587
 588	/* no dig encoder assigned */
 589	if (dig->dig_encoder == -1)
 590		return;
 591
 592	memset(&args, 0, sizeof(args));
 593
 594	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 595		return;
 596
 597	switch (frev) {
 598	case 1:
 599		switch (crev) {
 600		case 1:
 601			args.v1.ucAction = action;
 602			args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 603			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 604				args.v3.ucPanelMode = panel_mode;
 605			else
 606				args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 607
 608			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
 609				args.v1.ucLaneNum = dp_lane_count;
 610			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 611				args.v1.ucLaneNum = 8;
 612			else
 613				args.v1.ucLaneNum = 4;
 614
 615			if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
 616				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
 617			switch (amdgpu_encoder->encoder_id) {
 618			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 619				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
 620				break;
 621			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 622			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 623				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
 624				break;
 625			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 626				args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
 627				break;
 628			}
 629			if (dig->linkb)
 630				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
 631			else
 632				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
 633			break;
 634		case 2:
 635		case 3:
 636			args.v3.ucAction = action;
 637			args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 638			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 639				args.v3.ucPanelMode = panel_mode;
 640			else
 641				args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 642
 643			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
 644				args.v3.ucLaneNum = dp_lane_count;
 645			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 646				args.v3.ucLaneNum = 8;
 647			else
 648				args.v3.ucLaneNum = 4;
 649
 650			if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
 651				args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
 652			args.v3.acConfig.ucDigSel = dig->dig_encoder;
 653			args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 654			break;
 655		case 4:
 656			args.v4.ucAction = action;
 657			args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 658			if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
 659				args.v4.ucPanelMode = panel_mode;
 660			else
 661				args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
 662
 663			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
 664				args.v4.ucLaneNum = dp_lane_count;
 665			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 666				args.v4.ucLaneNum = 8;
 667			else
 668				args.v4.ucLaneNum = 4;
 669
 670			if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
 671				if (dp_clock == 540000)
 672					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
 673				else if (dp_clock == 324000)
 674					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
 675				else if (dp_clock == 270000)
 676					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
 677				else
 678					args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
 679			}
 680			args.v4.acConfig.ucDigSel = dig->dig_encoder;
 681			args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
 682			if (hpd_id == AMDGPU_HPD_NONE)
 683				args.v4.ucHPD_ID = 0;
 684			else
 685				args.v4.ucHPD_ID = hpd_id + 1;
 686			break;
 687		case 5:
 688			switch (action) {
 689			case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
 690				args.v5.asDPPanelModeParam.ucAction = action;
 691				args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
 692				args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
 693				break;
 694			case ATOM_ENCODER_CMD_STREAM_SETUP:
 695				args.v5.asStreamParam.ucAction = action;
 696				args.v5.asStreamParam.ucDigId = dig->dig_encoder;
 697				args.v5.asStreamParam.ucDigMode =
 698					amdgpu_atombios_encoder_get_encoder_mode(encoder);
 699				if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
 700					args.v5.asStreamParam.ucLaneNum = dp_lane_count;
 701				else if (amdgpu_dig_monitor_is_duallink(encoder,
 702									amdgpu_encoder->pixel_clock))
 703					args.v5.asStreamParam.ucLaneNum = 8;
 704				else
 705					args.v5.asStreamParam.ucLaneNum = 4;
 706				args.v5.asStreamParam.ulPixelClock =
 707					cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
 708				args.v5.asStreamParam.ucBitPerColor =
 709					amdgpu_atombios_encoder_get_bpc(encoder);
 710				args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
 711				break;
 712			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
 713			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
 714			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
 715			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
 716			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
 717			case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
 718			case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
 719			case ATOM_ENCODER_CMD_DP_VIDEO_ON:
 720				args.v5.asCmdParam.ucAction = action;
 721				args.v5.asCmdParam.ucDigId = dig->dig_encoder;
 722				break;
 723			default:
 724				DRM_ERROR("Unsupported action 0x%x\n", action);
 725				break;
 726			}
 727			break;
 728		default:
 729			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 730			break;
 731		}
 732		break;
 733	default:
 734		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
 735		break;
 736	}
 737
 738	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
 739
 740}
 741
 742union dig_transmitter_control {
 743	DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
 744	DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
 745	DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
 746	DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
 747	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
 748	DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
 749};
 750
 751void
 752amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
 753					      uint8_t lane_num, uint8_t lane_set)
 754{
 755	struct drm_device *dev = encoder->dev;
 756	struct amdgpu_device *adev = drm_to_adev(dev);
 757	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
 758	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
 759	struct drm_connector *connector;
 760	union dig_transmitter_control args;
 761	int index = 0;
 762	uint8_t frev, crev;
 763	bool is_dp = false;
 764	int pll_id = 0;
 765	int dp_clock = 0;
 766	int dp_lane_count = 0;
 767	int connector_object_id = 0;
 768	int igp_lane_info = 0;
 769	int dig_encoder = dig->dig_encoder;
 770	int hpd_id = AMDGPU_HPD_NONE;
 771
 772	if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 773		connector = amdgpu_get_connector_for_encoder_init(encoder);
 774		/* just needed to avoid bailing in the encoder check.  the encoder
 775		 * isn't used for init
 776		 */
 777		dig_encoder = 0;
 778	} else
 779		connector = amdgpu_get_connector_for_encoder(encoder);
 780
 781	if (connector) {
 782		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
 783		struct amdgpu_connector_atom_dig *dig_connector =
 784			amdgpu_connector->con_priv;
 785
 786		hpd_id = amdgpu_connector->hpd.hpd;
 787		dp_clock = dig_connector->dp_clock;
 788		dp_lane_count = dig_connector->dp_lane_count;
 789		connector_object_id =
 790			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
 791	}
 792
 793	if (encoder->crtc) {
 794		struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
 795		pll_id = amdgpu_crtc->pll_id;
 796	}
 797
 798	/* no dig encoder assigned */
 799	if (dig_encoder == -1)
 800		return;
 801
 802	if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
 803		is_dp = true;
 804
 805	memset(&args, 0, sizeof(args));
 806
 807	switch (amdgpu_encoder->encoder_id) {
 808	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
 809		index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
 810		break;
 811	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 812	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 813	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 814	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
 815		index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
 816		break;
 817	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
 818		index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
 819		break;
 820	}
 821
 822	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
 823		return;
 824
 825	switch (frev) {
 826	case 1:
 827		switch (crev) {
 828		case 1:
 829			args.v1.ucAction = action;
 830			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 831				args.v1.usInitInfo = cpu_to_le16(connector_object_id);
 832			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 833				args.v1.asMode.ucLaneSel = lane_num;
 834				args.v1.asMode.ucLaneSet = lane_set;
 835			} else {
 836				if (is_dp)
 837					args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
 838				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 839					args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 840				else
 841					args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 842			}
 843
 844			args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
 845
 846			if (dig_encoder)
 847				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
 848			else
 849				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
 850
 851			if ((adev->flags & AMD_IS_APU) &&
 852			    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
 853				if (is_dp ||
 854				    !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
 855					if (igp_lane_info & 0x1)
 856						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
 857					else if (igp_lane_info & 0x2)
 858						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
 859					else if (igp_lane_info & 0x4)
 860						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
 861					else if (igp_lane_info & 0x8)
 862						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
 863				} else {
 864					if (igp_lane_info & 0x3)
 865						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
 866					else if (igp_lane_info & 0xc)
 867						args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
 868				}
 869			}
 870
 871			if (dig->linkb)
 872				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
 873			else
 874				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
 875
 876			if (is_dp)
 877				args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 878			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 879				if (dig->coherent_mode)
 880					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
 881				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 882					args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
 883			}
 884			break;
 885		case 2:
 886			args.v2.ucAction = action;
 887			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 888				args.v2.usInitInfo = cpu_to_le16(connector_object_id);
 889			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 890				args.v2.asMode.ucLaneSel = lane_num;
 891				args.v2.asMode.ucLaneSet = lane_set;
 892			} else {
 893				if (is_dp)
 894					args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
 895				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 896					args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 897				else
 898					args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 899			}
 900
 901			args.v2.acConfig.ucEncoderSel = dig_encoder;
 902			if (dig->linkb)
 903				args.v2.acConfig.ucLinkSel = 1;
 904
 905			switch (amdgpu_encoder->encoder_id) {
 906			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 907				args.v2.acConfig.ucTransmitterSel = 0;
 908				break;
 909			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 910				args.v2.acConfig.ucTransmitterSel = 1;
 911				break;
 912			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 913				args.v2.acConfig.ucTransmitterSel = 2;
 914				break;
 915			}
 916
 917			if (is_dp) {
 918				args.v2.acConfig.fCoherentMode = 1;
 919				args.v2.acConfig.fDPConnector = 1;
 920			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 921				if (dig->coherent_mode)
 922					args.v2.acConfig.fCoherentMode = 1;
 923				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 924					args.v2.acConfig.fDualLinkConnector = 1;
 925			}
 926			break;
 927		case 3:
 928			args.v3.ucAction = action;
 929			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 930				args.v3.usInitInfo = cpu_to_le16(connector_object_id);
 931			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 932				args.v3.asMode.ucLaneSel = lane_num;
 933				args.v3.asMode.ucLaneSet = lane_set;
 934			} else {
 935				if (is_dp)
 936					args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
 937				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 938					args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 939				else
 940					args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
 941			}
 942
 943			if (is_dp)
 944				args.v3.ucLaneNum = dp_lane_count;
 945			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 946				args.v3.ucLaneNum = 8;
 947			else
 948				args.v3.ucLaneNum = 4;
 949
 950			if (dig->linkb)
 951				args.v3.acConfig.ucLinkSel = 1;
 952			if (dig_encoder & 1)
 953				args.v3.acConfig.ucEncoderSel = 1;
 954
 955			/* Select the PLL for the PHY
 956			 * DP PHY should be clocked from external src if there is
 957			 * one.
 958			 */
 959			/* On DCE4, if there is an external clock, it generates the DP ref clock */
 960			if (is_dp && adev->clock.dp_extclk)
 961				args.v3.acConfig.ucRefClkSource = 2; /* external src */
 962			else
 963				args.v3.acConfig.ucRefClkSource = pll_id;
 964
 965			switch (amdgpu_encoder->encoder_id) {
 966			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
 967				args.v3.acConfig.ucTransmitterSel = 0;
 968				break;
 969			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
 970				args.v3.acConfig.ucTransmitterSel = 1;
 971				break;
 972			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
 973				args.v3.acConfig.ucTransmitterSel = 2;
 974				break;
 975			}
 976
 977			if (is_dp)
 978				args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
 979			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
 980				if (dig->coherent_mode)
 981					args.v3.acConfig.fCoherentMode = 1;
 982				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 983					args.v3.acConfig.fDualLinkConnector = 1;
 984			}
 985			break;
 986		case 4:
 987			args.v4.ucAction = action;
 988			if (action == ATOM_TRANSMITTER_ACTION_INIT) {
 989				args.v4.usInitInfo = cpu_to_le16(connector_object_id);
 990			} else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
 991				args.v4.asMode.ucLaneSel = lane_num;
 992				args.v4.asMode.ucLaneSet = lane_set;
 993			} else {
 994				if (is_dp)
 995					args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
 996				else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
 997					args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
 998				else
 999					args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1000			}
1001
1002			if (is_dp)
1003				args.v4.ucLaneNum = dp_lane_count;
1004			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1005				args.v4.ucLaneNum = 8;
1006			else
1007				args.v4.ucLaneNum = 4;
1008
1009			if (dig->linkb)
1010				args.v4.acConfig.ucLinkSel = 1;
1011			if (dig_encoder & 1)
1012				args.v4.acConfig.ucEncoderSel = 1;
1013
1014			/* Select the PLL for the PHY
1015			 * DP PHY should be clocked from external src if there is
1016			 * one.
1017			 */
1018			/* On DCE5 DCPLL usually generates the DP ref clock */
1019			if (is_dp) {
1020				if (adev->clock.dp_extclk)
1021					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1022				else
1023					args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1024			} else
1025				args.v4.acConfig.ucRefClkSource = pll_id;
1026
1027			switch (amdgpu_encoder->encoder_id) {
1028			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1029				args.v4.acConfig.ucTransmitterSel = 0;
1030				break;
1031			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1032				args.v4.acConfig.ucTransmitterSel = 1;
1033				break;
1034			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1035				args.v4.acConfig.ucTransmitterSel = 2;
1036				break;
1037			}
1038
1039			if (is_dp)
1040				args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1041			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1042				if (dig->coherent_mode)
1043					args.v4.acConfig.fCoherentMode = 1;
1044				if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1045					args.v4.acConfig.fDualLinkConnector = 1;
1046			}
1047			break;
1048		case 5:
1049			args.v5.ucAction = action;
1050			if (is_dp)
1051				args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1052			else
1053				args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1054
1055			switch (amdgpu_encoder->encoder_id) {
1056			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1057				if (dig->linkb)
1058					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1059				else
1060					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1061				break;
1062			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1063				if (dig->linkb)
1064					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1065				else
1066					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1067				break;
1068			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1069				if (dig->linkb)
1070					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1071				else
1072					args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1073				break;
1074			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1075				args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1076				break;
1077			}
1078			if (is_dp)
1079				args.v5.ucLaneNum = dp_lane_count;
1080			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1081				args.v5.ucLaneNum = 8;
1082			else
1083				args.v5.ucLaneNum = 4;
1084			args.v5.ucConnObjId = connector_object_id;
1085			args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1086
1087			if (is_dp && adev->clock.dp_extclk)
1088				args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1089			else
1090				args.v5.asConfig.ucPhyClkSrcId = pll_id;
1091
1092			if (is_dp)
1093				args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1094			else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1095				if (dig->coherent_mode)
1096					args.v5.asConfig.ucCoherentMode = 1;
1097			}
1098			if (hpd_id == AMDGPU_HPD_NONE)
1099				args.v5.asConfig.ucHPDSel = 0;
1100			else
1101				args.v5.asConfig.ucHPDSel = hpd_id + 1;
1102			args.v5.ucDigEncoderSel = 1 << dig_encoder;
1103			args.v5.ucDPLaneSet = lane_set;
1104			break;
1105		case 6:
1106			args.v6.ucAction = action;
1107			if (is_dp)
1108				args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1109			else
1110				args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1111
1112			switch (amdgpu_encoder->encoder_id) {
1113			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1114				if (dig->linkb)
1115					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1116				else
1117					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1118				break;
1119			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1120				if (dig->linkb)
1121					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1122				else
1123					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1124				break;
1125			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1126				if (dig->linkb)
1127					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1128				else
1129					args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1130				break;
1131			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1132				args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1133				break;
1134			}
1135			if (is_dp)
1136				args.v6.ucLaneNum = dp_lane_count;
1137			else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1138				args.v6.ucLaneNum = 8;
1139			else
1140				args.v6.ucLaneNum = 4;
1141			args.v6.ucConnObjId = connector_object_id;
1142			if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1143				args.v6.ucDPLaneSet = lane_set;
1144			else
1145				args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1146
1147			if (hpd_id == AMDGPU_HPD_NONE)
1148				args.v6.ucHPDSel = 0;
1149			else
1150				args.v6.ucHPDSel = hpd_id + 1;
1151			args.v6.ucDigEncoderSel = 1 << dig_encoder;
1152			break;
1153		default:
1154			DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1155			break;
1156		}
1157		break;
1158	default:
1159		DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1160		break;
1161	}
1162
1163	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1164}
1165
1166bool
1167amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1168				     int action)
1169{
1170	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1171	struct drm_device *dev = amdgpu_connector->base.dev;
1172	struct amdgpu_device *adev = drm_to_adev(dev);
1173	union dig_transmitter_control args;
1174	int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1175	uint8_t frev, crev;
1176
1177	if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1178		goto done;
1179
1180	if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1181	    (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1182		goto done;
1183
1184	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1185		goto done;
1186
1187	memset(&args, 0, sizeof(args));
1188
1189	args.v1.ucAction = action;
1190
1191	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1192
1193	/* wait for the panel to power up */
1194	if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1195		int i;
1196
1197		for (i = 0; i < 300; i++) {
1198			if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1199				return true;
1200			mdelay(1);
1201		}
1202		return false;
1203	}
1204done:
1205	return true;
1206}
1207
1208union external_encoder_control {
1209	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1210	EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1211};
1212
1213static void
1214amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1215					struct drm_encoder *ext_encoder,
1216					int action)
1217{
1218	struct drm_device *dev = encoder->dev;
1219	struct amdgpu_device *adev = drm_to_adev(dev);
1220	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1221	struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1222	union external_encoder_control args;
1223	struct drm_connector *connector;
1224	int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1225	u8 frev, crev;
1226	int dp_clock = 0;
1227	int dp_lane_count = 0;
1228	int connector_object_id = 0;
1229	u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1230
1231	if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1232		connector = amdgpu_get_connector_for_encoder_init(encoder);
1233	else
1234		connector = amdgpu_get_connector_for_encoder(encoder);
1235
1236	if (connector) {
1237		struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1238		struct amdgpu_connector_atom_dig *dig_connector =
1239			amdgpu_connector->con_priv;
1240
1241		dp_clock = dig_connector->dp_clock;
1242		dp_lane_count = dig_connector->dp_lane_count;
1243		connector_object_id =
1244			(amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1245	}
1246
1247	memset(&args, 0, sizeof(args));
1248
1249	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1250		return;
1251
1252	switch (frev) {
1253	case 1:
1254		/* no params on frev 1 */
1255		break;
1256	case 2:
1257		switch (crev) {
1258		case 1:
1259		case 2:
1260			args.v1.sDigEncoder.ucAction = action;
1261			args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1262			args.v1.sDigEncoder.ucEncoderMode =
1263				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1264
1265			if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1266				if (dp_clock == 270000)
1267					args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1268				args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1269			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1270				args.v1.sDigEncoder.ucLaneNum = 8;
1271			else
1272				args.v1.sDigEncoder.ucLaneNum = 4;
1273			break;
1274		case 3:
1275			args.v3.sExtEncoder.ucAction = action;
1276			if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1277				args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1278			else
1279				args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1280			args.v3.sExtEncoder.ucEncoderMode =
1281				amdgpu_atombios_encoder_get_encoder_mode(encoder);
1282
1283			if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1284				if (dp_clock == 270000)
1285					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1286				else if (dp_clock == 540000)
1287					args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1288				args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1289			} else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1290				args.v3.sExtEncoder.ucLaneNum = 8;
1291			else
1292				args.v3.sExtEncoder.ucLaneNum = 4;
1293			switch (ext_enum) {
1294			case GRAPH_OBJECT_ENUM_ID1:
1295				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1296				break;
1297			case GRAPH_OBJECT_ENUM_ID2:
1298				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1299				break;
1300			case GRAPH_OBJECT_ENUM_ID3:
1301				args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1302				break;
1303			}
1304			args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1305			break;
1306		default:
1307			DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1308			return;
1309		}
1310		break;
1311	default:
1312		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1313		return;
1314	}
1315	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1316}
1317
1318static void
1319amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1320{
1321	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1322	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1323	struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1324	struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1325	struct amdgpu_connector *amdgpu_connector = NULL;
1326	struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1327
1328	if (connector) {
1329		amdgpu_connector = to_amdgpu_connector(connector);
1330		amdgpu_dig_connector = amdgpu_connector->con_priv;
1331	}
1332
1333	if (action == ATOM_ENABLE) {
1334		if (!connector)
1335			dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1336		else
1337			dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1338
1339		/* setup and enable the encoder */
1340		amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1341		amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1342						   ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1343						   dig->panel_mode);
1344		if (ext_encoder)
1345			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1346								EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1347		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1348		    connector) {
1349			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1350				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1351								     ATOM_TRANSMITTER_ACTION_POWER_ON);
1352				amdgpu_dig_connector->edp_on = true;
1353			}
1354		}
1355		/* enable the transmitter */
1356		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1357						       ATOM_TRANSMITTER_ACTION_ENABLE,
1358						       0, 0);
1359		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1360		    connector) {
1361			/* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1362			amdgpu_atombios_dp_link_train(encoder, connector);
1363			amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1364		}
1365		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1366			amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1367		if (ext_encoder)
1368			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1369	} else {
1370		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1371		    connector)
1372			amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1373							   ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1374		if (ext_encoder)
1375			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1376		if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1377			amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1378							       ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1379
1380		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1381		    connector)
1382			amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1383		/* disable the transmitter */
1384		amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1385						       ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1386		if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1387		    connector) {
1388			if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1389				amdgpu_atombios_encoder_set_edp_panel_power(connector,
1390								     ATOM_TRANSMITTER_ACTION_POWER_OFF);
1391				amdgpu_dig_connector->edp_on = false;
1392			}
1393		}
1394	}
1395}
1396
1397void
1398amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1399{
1400	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1401
1402	DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1403		  amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1404		  amdgpu_encoder->active_device);
1405	switch (amdgpu_encoder->encoder_id) {
1406	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1407	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1408	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1409	case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1410		switch (mode) {
1411		case DRM_MODE_DPMS_ON:
1412			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1413			break;
1414		case DRM_MODE_DPMS_STANDBY:
1415		case DRM_MODE_DPMS_SUSPEND:
1416		case DRM_MODE_DPMS_OFF:
1417			amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1418			break;
1419		}
1420		break;
1421	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1422		switch (mode) {
1423		case DRM_MODE_DPMS_ON:
1424			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1425			break;
1426		case DRM_MODE_DPMS_STANDBY:
1427		case DRM_MODE_DPMS_SUSPEND:
1428		case DRM_MODE_DPMS_OFF:
1429			amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1430			break;
1431		}
1432		break;
1433	case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1434		switch (mode) {
1435		case DRM_MODE_DPMS_ON:
1436			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1437			break;
1438		case DRM_MODE_DPMS_STANDBY:
1439		case DRM_MODE_DPMS_SUSPEND:
1440		case DRM_MODE_DPMS_OFF:
1441			amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1442			break;
1443		}
1444		break;
1445	default:
1446		return;
1447	}
1448}
1449
1450union crtc_source_param {
1451	SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1452	SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1453	SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1454};
1455
1456void
1457amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1458{
1459	struct drm_device *dev = encoder->dev;
1460	struct amdgpu_device *adev = drm_to_adev(dev);
1461	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1462	struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1463	union crtc_source_param args;
1464	int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1465	uint8_t frev, crev;
1466	struct amdgpu_encoder_atom_dig *dig;
1467
1468	memset(&args, 0, sizeof(args));
1469
1470	if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1471		return;
1472
1473	switch (frev) {
1474	case 1:
1475		switch (crev) {
1476		case 1:
1477		default:
1478			args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1479			switch (amdgpu_encoder->encoder_id) {
1480			case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1481			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1482				args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1483				break;
1484			case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1485			case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1486				if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1487					args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1488				else
1489					args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1490				break;
1491			case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1492			case ENCODER_OBJECT_ID_INTERNAL_DDI:
1493			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1494				args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1495				break;
1496			case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1497			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1498				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1499					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1500				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1501					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1502				else
1503					args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1504				break;
1505			case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1506			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1507				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1508					args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1509				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1510					args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1511				else
1512					args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1513				break;
1514			}
1515			break;
1516		case 2:
1517			args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1518			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1519				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1520
1521				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1522					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1523				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1524					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1525				else
1526					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1527			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1528				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1529			} else {
1530				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1531			}
1532			switch (amdgpu_encoder->encoder_id) {
1533			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1534			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1535			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1536			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1537			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1538				dig = amdgpu_encoder->enc_priv;
1539				switch (dig->dig_encoder) {
1540				case 0:
1541					args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1542					break;
1543				case 1:
1544					args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1545					break;
1546				case 2:
1547					args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1548					break;
1549				case 3:
1550					args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1551					break;
1552				case 4:
1553					args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1554					break;
1555				case 5:
1556					args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1557					break;
1558				case 6:
1559					args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1560					break;
1561				}
1562				break;
1563			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1564				args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1565				break;
1566			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1567				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1568					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1569				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1570					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1571				else
1572					args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1573				break;
1574			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1575				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1576					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1577				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1578					args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1579				else
1580					args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1581				break;
1582			}
1583			break;
1584		case 3:
1585			args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1586			if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1587				struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1588
1589				if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1590					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1591				else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1592					args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1593				else
1594					args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1595			} else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1596				args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1597			} else {
1598				args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1599			}
1600			args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1601			switch (amdgpu_encoder->encoder_id) {
1602			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1603			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1604			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1605			case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1606			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1607				dig = amdgpu_encoder->enc_priv;
1608				switch (dig->dig_encoder) {
1609				case 0:
1610					args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1611					break;
1612				case 1:
1613					args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1614					break;
1615				case 2:
1616					args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1617					break;
1618				case 3:
1619					args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1620					break;
1621				case 4:
1622					args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1623					break;
1624				case 5:
1625					args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1626					break;
1627				case 6:
1628					args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1629					break;
1630				}
1631				break;
1632			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1633				args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1634				break;
1635			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1636				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1637					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1638				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1639					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1640				else
1641					args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1642				break;
1643			case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1644				if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1645					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1646				else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1647					args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1648				else
1649					args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1650				break;
1651			}
1652			break;
1653		}
1654		break;
1655	default:
1656		DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1657		return;
1658	}
1659
1660	amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1661}
1662
1663/* This only needs to be called once at startup */
1664void
1665amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1666{
1667	struct drm_device *dev = adev_to_drm(adev);
1668	struct drm_encoder *encoder;
1669
1670	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1671		struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1672		struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1673
1674		switch (amdgpu_encoder->encoder_id) {
1675		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1676		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1677		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1678		case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1679			amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1680							       0, 0);
1681			break;
1682		}
1683
1684		if (ext_encoder)
1685			amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1686								EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1687	}
1688}
1689
1690static bool
1691amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1692				 struct drm_connector *connector)
1693{
1694	struct drm_device *dev = encoder->dev;
1695	struct amdgpu_device *adev = drm_to_adev(dev);
1696	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1697	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1698
1699	if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1700				       ATOM_DEVICE_CV_SUPPORT |
1701				       ATOM_DEVICE_CRT_SUPPORT)) {
1702		DAC_LOAD_DETECTION_PS_ALLOCATION args;
1703		int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1704		uint8_t frev, crev;
1705
1706		memset(&args, 0, sizeof(args));
1707
1708		if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1709			return false;
1710
1711		args.sDacload.ucMisc = 0;
1712
1713		if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1714		    (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1715			args.sDacload.ucDacType = ATOM_DAC_A;
1716		else
1717			args.sDacload.ucDacType = ATOM_DAC_B;
1718
1719		if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1720			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1721		else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1722			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1723		else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1724			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1725			if (crev >= 3)
1726				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1727		} else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1728			args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1729			if (crev >= 3)
1730				args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1731		}
1732
1733		amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1734
1735		return true;
1736	} else
1737		return false;
1738}
1739
1740enum drm_connector_status
1741amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1742			    struct drm_connector *connector)
1743{
1744	struct drm_device *dev = encoder->dev;
1745	struct amdgpu_device *adev = drm_to_adev(dev);
1746	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1747	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1748	uint32_t bios_0_scratch;
1749
1750	if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1751		DRM_DEBUG_KMS("detect returned false \n");
1752		return connector_status_unknown;
1753	}
1754
1755	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1756
1757	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1758	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1759		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1760			return connector_status_connected;
1761	}
1762	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1763		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1764			return connector_status_connected;
1765	}
1766	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1767		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1768			return connector_status_connected;
1769	}
1770	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1771		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1772			return connector_status_connected; /* CTV */
1773		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1774			return connector_status_connected; /* STV */
1775	}
1776	return connector_status_disconnected;
1777}
1778
1779enum drm_connector_status
1780amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1781			    struct drm_connector *connector)
1782{
1783	struct drm_device *dev = encoder->dev;
1784	struct amdgpu_device *adev = drm_to_adev(dev);
1785	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1786	struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1787	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1788	u32 bios_0_scratch;
1789
1790	if (!ext_encoder)
1791		return connector_status_unknown;
1792
1793	if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1794		return connector_status_unknown;
1795
1796	/* load detect on the dp bridge */
1797	amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1798						EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1799
1800	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1801
1802	DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1803	if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1804		if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1805			return connector_status_connected;
1806	}
1807	if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1808		if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1809			return connector_status_connected;
1810	}
1811	if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1812		if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1813			return connector_status_connected;
1814	}
1815	if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1816		if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1817			return connector_status_connected; /* CTV */
1818		else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1819			return connector_status_connected; /* STV */
1820	}
1821	return connector_status_disconnected;
1822}
1823
1824void
1825amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1826{
1827	struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1828
1829	if (ext_encoder)
1830		/* ddc_setup on the dp bridge */
1831		amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1832							EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1833
1834}
1835
1836void
1837amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1838				       struct drm_encoder *encoder,
1839				       bool connected)
1840{
1841	struct drm_device *dev = connector->dev;
1842	struct amdgpu_device *adev = drm_to_adev(dev);
1843	struct amdgpu_connector *amdgpu_connector =
1844	    to_amdgpu_connector(connector);
1845	struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1846	uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1847
1848	bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1849	bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1850	bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1851
1852	if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1853	    (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1854		if (connected) {
1855			DRM_DEBUG_KMS("LCD1 connected\n");
1856			bios_0_scratch |= ATOM_S0_LCD1;
1857			bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1858			bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1859		} else {
1860			DRM_DEBUG_KMS("LCD1 disconnected\n");
1861			bios_0_scratch &= ~ATOM_S0_LCD1;
1862			bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1863			bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1864		}
1865	}
1866	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1867	    (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1868		if (connected) {
1869			DRM_DEBUG_KMS("CRT1 connected\n");
1870			bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1871			bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1872			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1873		} else {
1874			DRM_DEBUG_KMS("CRT1 disconnected\n");
1875			bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1876			bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1877			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1878		}
1879	}
1880	if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1881	    (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1882		if (connected) {
1883			DRM_DEBUG_KMS("CRT2 connected\n");
1884			bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1885			bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1886			bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1887		} else {
1888			DRM_DEBUG_KMS("CRT2 disconnected\n");
1889			bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1890			bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1891			bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1892		}
1893	}
1894	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1895	    (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1896		if (connected) {
1897			DRM_DEBUG_KMS("DFP1 connected\n");
1898			bios_0_scratch |= ATOM_S0_DFP1;
1899			bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1900			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1901		} else {
1902			DRM_DEBUG_KMS("DFP1 disconnected\n");
1903			bios_0_scratch &= ~ATOM_S0_DFP1;
1904			bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1905			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1906		}
1907	}
1908	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1909	    (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1910		if (connected) {
1911			DRM_DEBUG_KMS("DFP2 connected\n");
1912			bios_0_scratch |= ATOM_S0_DFP2;
1913			bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1914			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1915		} else {
1916			DRM_DEBUG_KMS("DFP2 disconnected\n");
1917			bios_0_scratch &= ~ATOM_S0_DFP2;
1918			bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1919			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1920		}
1921	}
1922	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1923	    (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1924		if (connected) {
1925			DRM_DEBUG_KMS("DFP3 connected\n");
1926			bios_0_scratch |= ATOM_S0_DFP3;
1927			bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1928			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1929		} else {
1930			DRM_DEBUG_KMS("DFP3 disconnected\n");
1931			bios_0_scratch &= ~ATOM_S0_DFP3;
1932			bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1933			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1934		}
1935	}
1936	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1937	    (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1938		if (connected) {
1939			DRM_DEBUG_KMS("DFP4 connected\n");
1940			bios_0_scratch |= ATOM_S0_DFP4;
1941			bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1942			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1943		} else {
1944			DRM_DEBUG_KMS("DFP4 disconnected\n");
1945			bios_0_scratch &= ~ATOM_S0_DFP4;
1946			bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1947			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1948		}
1949	}
1950	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1951	    (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1952		if (connected) {
1953			DRM_DEBUG_KMS("DFP5 connected\n");
1954			bios_0_scratch |= ATOM_S0_DFP5;
1955			bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1956			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1957		} else {
1958			DRM_DEBUG_KMS("DFP5 disconnected\n");
1959			bios_0_scratch &= ~ATOM_S0_DFP5;
1960			bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1961			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1962		}
1963	}
1964	if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1965	    (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1966		if (connected) {
1967			DRM_DEBUG_KMS("DFP6 connected\n");
1968			bios_0_scratch |= ATOM_S0_DFP6;
1969			bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1970			bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1971		} else {
1972			DRM_DEBUG_KMS("DFP6 disconnected\n");
1973			bios_0_scratch &= ~ATOM_S0_DFP6;
1974			bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1975			bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1976		}
1977	}
1978
1979	WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1980	WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1981	WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1982}
1983
1984union lvds_info {
1985	struct _ATOM_LVDS_INFO info;
1986	struct _ATOM_LVDS_INFO_V12 info_12;
1987};
1988
1989struct amdgpu_encoder_atom_dig *
1990amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
1991{
1992	struct drm_device *dev = encoder->base.dev;
1993	struct amdgpu_device *adev = drm_to_adev(dev);
1994	struct amdgpu_mode_info *mode_info = &adev->mode_info;
1995	int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
1996	uint16_t data_offset, misc;
1997	union lvds_info *lvds_info;
1998	uint8_t frev, crev;
1999	struct amdgpu_encoder_atom_dig *lvds = NULL;
2000	int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2001
2002	if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2003				   &frev, &crev, &data_offset)) {
2004		lvds_info =
2005			(union lvds_info *)(mode_info->atom_context->bios + data_offset);
2006		lvds =
2007		    kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2008
2009		if (!lvds)
2010			return NULL;
2011
2012		lvds->native_mode.clock =
2013		    le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2014		lvds->native_mode.hdisplay =
2015		    le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2016		lvds->native_mode.vdisplay =
2017		    le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2018		lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2019			le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2020		lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2021			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2022		lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2023			le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2024		lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2025			le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2026		lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2027			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2028		lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2029			le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2030		lvds->panel_pwr_delay =
2031		    le16_to_cpu(lvds_info->info.usOffDelayInMs);
2032		lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2033
2034		misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2035		if (misc & ATOM_VSYNC_POLARITY)
2036			lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2037		if (misc & ATOM_HSYNC_POLARITY)
2038			lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2039		if (misc & ATOM_COMPOSITESYNC)
2040			lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2041		if (misc & ATOM_INTERLACE)
2042			lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2043		if (misc & ATOM_DOUBLE_CLOCK_MODE)
2044			lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2045
2046		lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2047		lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2048
2049		/* set crtc values */
2050		drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2051
2052		lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2053
2054		encoder->native_mode = lvds->native_mode;
2055
2056		if (encoder_enum == 2)
2057			lvds->linkb = true;
2058		else
2059			lvds->linkb = false;
2060
2061		/* parse the lcd record table */
2062		if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2063			ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2064			ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2065			bool bad_record = false;
2066			u8 *record;
2067
2068			if ((frev == 1) && (crev < 2))
2069				/* absolute */
2070				record = (u8 *)(mode_info->atom_context->bios +
2071						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2072			else
2073				/* relative */
2074				record = (u8 *)(mode_info->atom_context->bios +
2075						data_offset +
2076						le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2077			while (*record != ATOM_RECORD_END_TYPE) {
2078				switch (*record) {
2079				case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2080					record += sizeof(ATOM_PATCH_RECORD_MODE);
2081					break;
2082				case LCD_RTS_RECORD_TYPE:
2083					record += sizeof(ATOM_LCD_RTS_RECORD);
2084					break;
2085				case LCD_CAP_RECORD_TYPE:
2086					record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2087					break;
2088				case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2089					fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2090					if (fake_edid_record->ucFakeEDIDLength) {
2091						struct edid *edid;
2092						int edid_size =
2093							max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2094						edid = kmalloc(edid_size, GFP_KERNEL);
2095						if (edid) {
2096							memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2097							       fake_edid_record->ucFakeEDIDLength);
2098
2099							if (drm_edid_is_valid(edid)) {
2100								adev->mode_info.bios_hardcoded_edid = edid;
2101								adev->mode_info.bios_hardcoded_edid_size = edid_size;
2102							} else
2103								kfree(edid);
2104						}
2105					}
2106					record += fake_edid_record->ucFakeEDIDLength ?
2107						fake_edid_record->ucFakeEDIDLength + 2 :
2108						sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2109					break;
2110				case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2111					panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2112					lvds->native_mode.width_mm = panel_res_record->usHSize;
2113					lvds->native_mode.height_mm = panel_res_record->usVSize;
2114					record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2115					break;
2116				default:
2117					DRM_ERROR("Bad LCD record %d\n", *record);
2118					bad_record = true;
2119					break;
2120				}
2121				if (bad_record)
2122					break;
2123			}
2124		}
2125	}
2126	return lvds;
2127}
2128
2129struct amdgpu_encoder_atom_dig *
2130amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2131{
2132	int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2133	struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2134
2135	if (!dig)
2136		return NULL;
2137
2138	/* coherent mode by default */
2139	dig->coherent_mode = true;
2140	dig->dig_encoder = -1;
2141
2142	if (encoder_enum == 2)
2143		dig->linkb = true;
2144	else
2145		dig->linkb = false;
2146
2147	return dig;
2148}
2149