Linux Audio

Check our new training course

Loading...
v3.1
   1/*
   2 * Copyright 2010 Advanced Micro Devices, Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Alex Deucher
  23 */
  24#include <linux/firmware.h>
  25#include <linux/platform_device.h>
  26#include <linux/slab.h>
  27#include "drmP.h"
  28#include "radeon.h"
  29#include "radeon_asic.h"
  30#include "radeon_drm.h"
  31#include "evergreend.h"
  32#include "atom.h"
  33#include "avivod.h"
  34#include "evergreen_reg.h"
  35#include "evergreen_blit_shaders.h"
 
  36
  37#define EVERGREEN_PFP_UCODE_SIZE 1120
  38#define EVERGREEN_PM4_UCODE_SIZE 1376
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  39
  40static void evergreen_gpu_init(struct radeon_device *rdev);
  41void evergreen_fini(struct radeon_device *rdev);
  42static void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  43
  44void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
  45{
  46	u16 ctl, v;
  47	int cap, err;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  48
  49	cap = pci_pcie_cap(rdev->pdev);
  50	if (!cap)
  51		return;
  52
  53	err = pci_read_config_word(rdev->pdev, cap + PCI_EXP_DEVCTL, &ctl);
  54	if (err)
 
  55		return;
  56
  57	v = (ctl & PCI_EXP_DEVCTL_READRQ) >> 12;
 
  58
  59	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
  60	 * to avoid hangs or perfomance issues
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
  61	 */
  62	if ((v == 0) || (v == 6) || (v == 7)) {
  63		ctl &= ~PCI_EXP_DEVCTL_READRQ;
  64		ctl |= (2 << 12);
  65		pci_write_config_word(rdev->pdev, cap + PCI_EXP_DEVCTL, ctl);
 
 
 
 
 
 
 
 
  66	}
  67}
  68
 
 
 
 
 
 
 
 
 
  69void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
  70{
  71	/* enable the pflip int */
  72	radeon_irq_kms_pflip_irq_get(rdev, crtc);
  73}
  74
 
 
 
 
 
 
 
 
 
  75void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
  76{
  77	/* disable the pflip int */
  78	radeon_irq_kms_pflip_irq_put(rdev, crtc);
  79}
  80
 
 
 
 
 
 
 
 
 
 
 
 
 
  81u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
  82{
  83	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
  84	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
 
  85
  86	/* Lock the graphics update lock */
  87	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
  88	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
  89
  90	/* update the scanout addresses */
  91	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
  92	       upper_32_bits(crtc_base));
  93	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
  94	       (u32)crtc_base);
  95
  96	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
  97	       upper_32_bits(crtc_base));
  98	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
  99	       (u32)crtc_base);
 100
 101	/* Wait for update_pending to go high. */
 102	while (!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING));
 
 
 
 
 103	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
 104
 105	/* Unlock the lock, so double-buffering can take place inside vblank */
 106	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
 107	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
 108
 109	/* Return current update_pending status: */
 110	return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
 111}
 112
 113/* get temperature in millidegrees */
 114int evergreen_get_temp(struct radeon_device *rdev)
 115{
 116	u32 temp, toffset;
 117	int actual_temp = 0;
 118
 119	if (rdev->family == CHIP_JUNIPER) {
 120		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
 121			TOFFSET_SHIFT;
 122		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
 123			TS0_ADC_DOUT_SHIFT;
 124
 125		if (toffset & 0x100)
 126			actual_temp = temp / 2 - (0x200 - toffset);
 127		else
 128			actual_temp = temp / 2 + toffset;
 129
 130		actual_temp = actual_temp * 1000;
 131
 132	} else {
 133		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
 134			ASIC_T_SHIFT;
 135
 136		if (temp & 0x400)
 137			actual_temp = -256;
 138		else if (temp & 0x200)
 139			actual_temp = 255;
 140		else if (temp & 0x100) {
 141			actual_temp = temp & 0x1ff;
 142			actual_temp |= ~0x1ff;
 143		} else
 144			actual_temp = temp & 0xff;
 145
 146		actual_temp = (actual_temp * 1000) / 2;
 147	}
 148
 149	return actual_temp;
 150}
 151
 152int sumo_get_temp(struct radeon_device *rdev)
 153{
 154	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
 155	int actual_temp = temp - 49;
 156
 157	return actual_temp * 1000;
 158}
 159
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 160void evergreen_pm_misc(struct radeon_device *rdev)
 161{
 162	int req_ps_idx = rdev->pm.requested_power_state_index;
 163	int req_cm_idx = rdev->pm.requested_clock_mode_index;
 164	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
 165	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
 166
 167	if (voltage->type == VOLTAGE_SW) {
 168		/* 0xff01 is a flag rather then an actual voltage */
 169		if (voltage->voltage == 0xff01)
 170			return;
 171		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
 172			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
 173			rdev->pm.current_vddc = voltage->voltage;
 174			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
 175		}
 176		/* 0xff01 is a flag rather then an actual voltage */
 177		if (voltage->vddci == 0xff01)
 
 
 
 
 
 
 
 
 
 
 
 
 
 178			return;
 179		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
 180			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
 181			rdev->pm.current_vddci = voltage->vddci;
 182			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
 183		}
 184	}
 185}
 186
 
 
 
 
 
 
 
 187void evergreen_pm_prepare(struct radeon_device *rdev)
 188{
 189	struct drm_device *ddev = rdev->ddev;
 190	struct drm_crtc *crtc;
 191	struct radeon_crtc *radeon_crtc;
 192	u32 tmp;
 193
 194	/* disable any active CRTCs */
 195	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
 196		radeon_crtc = to_radeon_crtc(crtc);
 197		if (radeon_crtc->enabled) {
 198			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
 199			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
 200			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
 201		}
 202	}
 203}
 204
 
 
 
 
 
 
 
 205void evergreen_pm_finish(struct radeon_device *rdev)
 206{
 207	struct drm_device *ddev = rdev->ddev;
 208	struct drm_crtc *crtc;
 209	struct radeon_crtc *radeon_crtc;
 210	u32 tmp;
 211
 212	/* enable any active CRTCs */
 213	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
 214		radeon_crtc = to_radeon_crtc(crtc);
 215		if (radeon_crtc->enabled) {
 216			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
 217			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
 218			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
 219		}
 220	}
 221}
 222
 
 
 
 
 
 
 
 
 
 223bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
 224{
 225	bool connected = false;
 226
 227	switch (hpd) {
 228	case RADEON_HPD_1:
 229		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
 230			connected = true;
 231		break;
 232	case RADEON_HPD_2:
 233		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
 234			connected = true;
 235		break;
 236	case RADEON_HPD_3:
 237		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
 238			connected = true;
 239		break;
 240	case RADEON_HPD_4:
 241		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
 242			connected = true;
 243		break;
 244	case RADEON_HPD_5:
 245		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
 246			connected = true;
 247		break;
 248	case RADEON_HPD_6:
 249		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
 250			connected = true;
 251			break;
 252	default:
 253		break;
 254	}
 255
 256	return connected;
 257}
 258
 
 
 
 
 
 
 
 
 259void evergreen_hpd_set_polarity(struct radeon_device *rdev,
 260				enum radeon_hpd_id hpd)
 261{
 262	u32 tmp;
 263	bool connected = evergreen_hpd_sense(rdev, hpd);
 264
 265	switch (hpd) {
 266	case RADEON_HPD_1:
 267		tmp = RREG32(DC_HPD1_INT_CONTROL);
 268		if (connected)
 269			tmp &= ~DC_HPDx_INT_POLARITY;
 270		else
 271			tmp |= DC_HPDx_INT_POLARITY;
 272		WREG32(DC_HPD1_INT_CONTROL, tmp);
 273		break;
 274	case RADEON_HPD_2:
 275		tmp = RREG32(DC_HPD2_INT_CONTROL);
 276		if (connected)
 277			tmp &= ~DC_HPDx_INT_POLARITY;
 278		else
 279			tmp |= DC_HPDx_INT_POLARITY;
 280		WREG32(DC_HPD2_INT_CONTROL, tmp);
 281		break;
 282	case RADEON_HPD_3:
 283		tmp = RREG32(DC_HPD3_INT_CONTROL);
 284		if (connected)
 285			tmp &= ~DC_HPDx_INT_POLARITY;
 286		else
 287			tmp |= DC_HPDx_INT_POLARITY;
 288		WREG32(DC_HPD3_INT_CONTROL, tmp);
 289		break;
 290	case RADEON_HPD_4:
 291		tmp = RREG32(DC_HPD4_INT_CONTROL);
 292		if (connected)
 293			tmp &= ~DC_HPDx_INT_POLARITY;
 294		else
 295			tmp |= DC_HPDx_INT_POLARITY;
 296		WREG32(DC_HPD4_INT_CONTROL, tmp);
 297		break;
 298	case RADEON_HPD_5:
 299		tmp = RREG32(DC_HPD5_INT_CONTROL);
 300		if (connected)
 301			tmp &= ~DC_HPDx_INT_POLARITY;
 302		else
 303			tmp |= DC_HPDx_INT_POLARITY;
 304		WREG32(DC_HPD5_INT_CONTROL, tmp);
 305			break;
 306	case RADEON_HPD_6:
 307		tmp = RREG32(DC_HPD6_INT_CONTROL);
 308		if (connected)
 309			tmp &= ~DC_HPDx_INT_POLARITY;
 310		else
 311			tmp |= DC_HPDx_INT_POLARITY;
 312		WREG32(DC_HPD6_INT_CONTROL, tmp);
 313		break;
 314	default:
 315		break;
 316	}
 317}
 318
 
 
 
 
 
 
 
 
 319void evergreen_hpd_init(struct radeon_device *rdev)
 320{
 321	struct drm_device *dev = rdev->ddev;
 322	struct drm_connector *connector;
 
 323	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
 324		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
 325
 326	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
 327		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
 
 
 
 
 
 
 
 
 
 
 328		switch (radeon_connector->hpd.hpd) {
 329		case RADEON_HPD_1:
 330			WREG32(DC_HPD1_CONTROL, tmp);
 331			rdev->irq.hpd[0] = true;
 332			break;
 333		case RADEON_HPD_2:
 334			WREG32(DC_HPD2_CONTROL, tmp);
 335			rdev->irq.hpd[1] = true;
 336			break;
 337		case RADEON_HPD_3:
 338			WREG32(DC_HPD3_CONTROL, tmp);
 339			rdev->irq.hpd[2] = true;
 340			break;
 341		case RADEON_HPD_4:
 342			WREG32(DC_HPD4_CONTROL, tmp);
 343			rdev->irq.hpd[3] = true;
 344			break;
 345		case RADEON_HPD_5:
 346			WREG32(DC_HPD5_CONTROL, tmp);
 347			rdev->irq.hpd[4] = true;
 348			break;
 349		case RADEON_HPD_6:
 350			WREG32(DC_HPD6_CONTROL, tmp);
 351			rdev->irq.hpd[5] = true;
 352			break;
 353		default:
 354			break;
 355		}
 
 
 356	}
 357	if (rdev->irq.installed)
 358		evergreen_irq_set(rdev);
 359}
 360
 
 
 
 
 
 
 
 
 361void evergreen_hpd_fini(struct radeon_device *rdev)
 362{
 363	struct drm_device *dev = rdev->ddev;
 364	struct drm_connector *connector;
 
 365
 366	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
 367		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
 368		switch (radeon_connector->hpd.hpd) {
 369		case RADEON_HPD_1:
 370			WREG32(DC_HPD1_CONTROL, 0);
 371			rdev->irq.hpd[0] = false;
 372			break;
 373		case RADEON_HPD_2:
 374			WREG32(DC_HPD2_CONTROL, 0);
 375			rdev->irq.hpd[1] = false;
 376			break;
 377		case RADEON_HPD_3:
 378			WREG32(DC_HPD3_CONTROL, 0);
 379			rdev->irq.hpd[2] = false;
 380			break;
 381		case RADEON_HPD_4:
 382			WREG32(DC_HPD4_CONTROL, 0);
 383			rdev->irq.hpd[3] = false;
 384			break;
 385		case RADEON_HPD_5:
 386			WREG32(DC_HPD5_CONTROL, 0);
 387			rdev->irq.hpd[4] = false;
 388			break;
 389		case RADEON_HPD_6:
 390			WREG32(DC_HPD6_CONTROL, 0);
 391			rdev->irq.hpd[5] = false;
 392			break;
 393		default:
 394			break;
 395		}
 
 396	}
 
 397}
 398
 399/* watermark setup */
 400
 401static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
 402					struct radeon_crtc *radeon_crtc,
 403					struct drm_display_mode *mode,
 404					struct drm_display_mode *other_mode)
 405{
 406	u32 tmp;
 
 407	/*
 408	 * Line Buffer Setup
 409	 * There are 3 line buffers, each one shared by 2 display controllers.
 410	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
 411	 * the display controllers.  The paritioning is done via one of four
 412	 * preset allocations specified in bits 2:0:
 413	 * first display controller
 414	 *  0 - first half of lb (3840 * 2)
 415	 *  1 - first 3/4 of lb (5760 * 2)
 416	 *  2 - whole lb (7680 * 2), other crtc must be disabled
 417	 *  3 - first 1/4 of lb (1920 * 2)
 418	 * second display controller
 419	 *  4 - second half of lb (3840 * 2)
 420	 *  5 - second 3/4 of lb (5760 * 2)
 421	 *  6 - whole lb (7680 * 2), other crtc must be disabled
 422	 *  7 - last 1/4 of lb (1920 * 2)
 423	 */
 424	/* this can get tricky if we have two large displays on a paired group
 425	 * of crtcs.  Ideally for multiple large displays we'd assign them to
 426	 * non-linked crtcs for maximum line buffer allocation.
 427	 */
 428	if (radeon_crtc->base.enabled && mode) {
 429		if (other_mode)
 430			tmp = 0; /* 1/2 */
 431		else
 
 432			tmp = 2; /* whole */
 433	} else
 
 
 434		tmp = 0;
 
 
 435
 436	/* second controller of the pair uses second half of the lb */
 437	if (radeon_crtc->crtc_id % 2)
 438		tmp += 4;
 439	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
 440
 
 
 
 
 
 
 
 
 
 
 
 441	if (radeon_crtc->base.enabled && mode) {
 442		switch (tmp) {
 443		case 0:
 444		case 4:
 445		default:
 446			if (ASIC_IS_DCE5(rdev))
 447				return 4096 * 2;
 448			else
 449				return 3840 * 2;
 450		case 1:
 451		case 5:
 452			if (ASIC_IS_DCE5(rdev))
 453				return 6144 * 2;
 454			else
 455				return 5760 * 2;
 456		case 2:
 457		case 6:
 458			if (ASIC_IS_DCE5(rdev))
 459				return 8192 * 2;
 460			else
 461				return 7680 * 2;
 462		case 3:
 463		case 7:
 464			if (ASIC_IS_DCE5(rdev))
 465				return 2048 * 2;
 466			else
 467				return 1920 * 2;
 468		}
 469	}
 470
 471	/* controller not enabled, so no lb used */
 472	return 0;
 473}
 474
 475static u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
 476{
 477	u32 tmp = RREG32(MC_SHARED_CHMAP);
 478
 479	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
 480	case 0:
 481	default:
 482		return 1;
 483	case 1:
 484		return 2;
 485	case 2:
 486		return 4;
 487	case 3:
 488		return 8;
 489	}
 490}
 491
 492struct evergreen_wm_params {
 493	u32 dram_channels; /* number of dram channels */
 494	u32 yclk;          /* bandwidth per dram data pin in kHz */
 495	u32 sclk;          /* engine clock in kHz */
 496	u32 disp_clk;      /* display clock in kHz */
 497	u32 src_width;     /* viewport width */
 498	u32 active_time;   /* active display time in ns */
 499	u32 blank_time;    /* blank time in ns */
 500	bool interlaced;    /* mode is interlaced */
 501	fixed20_12 vsc;    /* vertical scale ratio */
 502	u32 num_heads;     /* number of active crtcs */
 503	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
 504	u32 lb_size;       /* line buffer allocated to pipe */
 505	u32 vtaps;         /* vertical scaler taps */
 506};
 507
 508static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
 509{
 510	/* Calculate DRAM Bandwidth and the part allocated to display. */
 511	fixed20_12 dram_efficiency; /* 0.7 */
 512	fixed20_12 yclk, dram_channels, bandwidth;
 513	fixed20_12 a;
 514
 515	a.full = dfixed_const(1000);
 516	yclk.full = dfixed_const(wm->yclk);
 517	yclk.full = dfixed_div(yclk, a);
 518	dram_channels.full = dfixed_const(wm->dram_channels * 4);
 519	a.full = dfixed_const(10);
 520	dram_efficiency.full = dfixed_const(7);
 521	dram_efficiency.full = dfixed_div(dram_efficiency, a);
 522	bandwidth.full = dfixed_mul(dram_channels, yclk);
 523	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
 524
 525	return dfixed_trunc(bandwidth);
 526}
 527
 528static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
 529{
 530	/* Calculate DRAM Bandwidth and the part allocated to display. */
 531	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
 532	fixed20_12 yclk, dram_channels, bandwidth;
 533	fixed20_12 a;
 534
 535	a.full = dfixed_const(1000);
 536	yclk.full = dfixed_const(wm->yclk);
 537	yclk.full = dfixed_div(yclk, a);
 538	dram_channels.full = dfixed_const(wm->dram_channels * 4);
 539	a.full = dfixed_const(10);
 540	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
 541	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
 542	bandwidth.full = dfixed_mul(dram_channels, yclk);
 543	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
 544
 545	return dfixed_trunc(bandwidth);
 546}
 547
 548static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
 549{
 550	/* Calculate the display Data return Bandwidth */
 551	fixed20_12 return_efficiency; /* 0.8 */
 552	fixed20_12 sclk, bandwidth;
 553	fixed20_12 a;
 554
 555	a.full = dfixed_const(1000);
 556	sclk.full = dfixed_const(wm->sclk);
 557	sclk.full = dfixed_div(sclk, a);
 558	a.full = dfixed_const(10);
 559	return_efficiency.full = dfixed_const(8);
 560	return_efficiency.full = dfixed_div(return_efficiency, a);
 561	a.full = dfixed_const(32);
 562	bandwidth.full = dfixed_mul(a, sclk);
 563	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
 564
 565	return dfixed_trunc(bandwidth);
 566}
 567
 568static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
 569{
 570	/* Calculate the DMIF Request Bandwidth */
 571	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
 572	fixed20_12 disp_clk, bandwidth;
 573	fixed20_12 a;
 574
 575	a.full = dfixed_const(1000);
 576	disp_clk.full = dfixed_const(wm->disp_clk);
 577	disp_clk.full = dfixed_div(disp_clk, a);
 578	a.full = dfixed_const(10);
 579	disp_clk_request_efficiency.full = dfixed_const(8);
 580	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
 581	a.full = dfixed_const(32);
 582	bandwidth.full = dfixed_mul(a, disp_clk);
 583	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
 584
 585	return dfixed_trunc(bandwidth);
 586}
 587
 588static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
 589{
 590	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
 591	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
 592	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
 593	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
 594
 595	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
 596}
 597
 598static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
 599{
 600	/* Calculate the display mode Average Bandwidth
 601	 * DisplayMode should contain the source and destination dimensions,
 602	 * timing, etc.
 603	 */
 604	fixed20_12 bpp;
 605	fixed20_12 line_time;
 606	fixed20_12 src_width;
 607	fixed20_12 bandwidth;
 608	fixed20_12 a;
 609
 610	a.full = dfixed_const(1000);
 611	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
 612	line_time.full = dfixed_div(line_time, a);
 613	bpp.full = dfixed_const(wm->bytes_per_pixel);
 614	src_width.full = dfixed_const(wm->src_width);
 615	bandwidth.full = dfixed_mul(src_width, bpp);
 616	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
 617	bandwidth.full = dfixed_div(bandwidth, line_time);
 618
 619	return dfixed_trunc(bandwidth);
 620}
 621
 622static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
 623{
 624	/* First calcualte the latency in ns */
 625	u32 mc_latency = 2000; /* 2000 ns. */
 626	u32 available_bandwidth = evergreen_available_bandwidth(wm);
 627	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
 628	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
 629	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
 630	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
 631		(wm->num_heads * cursor_line_pair_return_time);
 632	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
 633	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
 634	fixed20_12 a, b, c;
 635
 636	if (wm->num_heads == 0)
 637		return 0;
 638
 639	a.full = dfixed_const(2);
 640	b.full = dfixed_const(1);
 641	if ((wm->vsc.full > a.full) ||
 642	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
 643	    (wm->vtaps >= 5) ||
 644	    ((wm->vsc.full >= a.full) && wm->interlaced))
 645		max_src_lines_per_dst_line = 4;
 646	else
 647		max_src_lines_per_dst_line = 2;
 648
 649	a.full = dfixed_const(available_bandwidth);
 650	b.full = dfixed_const(wm->num_heads);
 651	a.full = dfixed_div(a, b);
 652
 653	b.full = dfixed_const(1000);
 654	c.full = dfixed_const(wm->disp_clk);
 655	b.full = dfixed_div(c, b);
 656	c.full = dfixed_const(wm->bytes_per_pixel);
 657	b.full = dfixed_mul(b, c);
 658
 659	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
 660
 661	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
 662	b.full = dfixed_const(1000);
 663	c.full = dfixed_const(lb_fill_bw);
 664	b.full = dfixed_div(c, b);
 665	a.full = dfixed_div(a, b);
 666	line_fill_time = dfixed_trunc(a);
 667
 668	if (line_fill_time < wm->active_time)
 669		return latency;
 670	else
 671		return latency + (line_fill_time - wm->active_time);
 672
 673}
 674
 675static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
 676{
 677	if (evergreen_average_bandwidth(wm) <=
 678	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
 679		return true;
 680	else
 681		return false;
 682};
 683
 684static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
 685{
 686	if (evergreen_average_bandwidth(wm) <=
 687	    (evergreen_available_bandwidth(wm) / wm->num_heads))
 688		return true;
 689	else
 690		return false;
 691};
 692
 693static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
 694{
 695	u32 lb_partitions = wm->lb_size / wm->src_width;
 696	u32 line_time = wm->active_time + wm->blank_time;
 697	u32 latency_tolerant_lines;
 698	u32 latency_hiding;
 699	fixed20_12 a;
 700
 701	a.full = dfixed_const(1);
 702	if (wm->vsc.full > a.full)
 703		latency_tolerant_lines = 1;
 704	else {
 705		if (lb_partitions <= (wm->vtaps + 1))
 706			latency_tolerant_lines = 1;
 707		else
 708			latency_tolerant_lines = 2;
 709	}
 710
 711	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
 712
 713	if (evergreen_latency_watermark(wm) <= latency_hiding)
 714		return true;
 715	else
 716		return false;
 717}
 718
 719static void evergreen_program_watermarks(struct radeon_device *rdev,
 720					 struct radeon_crtc *radeon_crtc,
 721					 u32 lb_size, u32 num_heads)
 722{
 723	struct drm_display_mode *mode = &radeon_crtc->base.mode;
 724	struct evergreen_wm_params wm;
 
 725	u32 pixel_period;
 726	u32 line_time = 0;
 727	u32 latency_watermark_a = 0, latency_watermark_b = 0;
 728	u32 priority_a_mark = 0, priority_b_mark = 0;
 729	u32 priority_a_cnt = PRIORITY_OFF;
 730	u32 priority_b_cnt = PRIORITY_OFF;
 731	u32 pipe_offset = radeon_crtc->crtc_id * 16;
 732	u32 tmp, arb_control3;
 733	fixed20_12 a, b, c;
 734
 735	if (radeon_crtc->base.enabled && num_heads && mode) {
 736		pixel_period = 1000000 / (u32)mode->clock;
 737		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
 738		priority_a_cnt = 0;
 739		priority_b_cnt = 0;
 
 740
 741		wm.yclk = rdev->pm.current_mclk * 10;
 742		wm.sclk = rdev->pm.current_sclk * 10;
 743		wm.disp_clk = mode->clock;
 744		wm.src_width = mode->crtc_hdisplay;
 745		wm.active_time = mode->crtc_hdisplay * pixel_period;
 746		wm.blank_time = line_time - wm.active_time;
 747		wm.interlaced = false;
 
 
 
 
 
 
 
 
 
 748		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
 749			wm.interlaced = true;
 750		wm.vsc = radeon_crtc->vsc;
 751		wm.vtaps = 1;
 752		if (radeon_crtc->rmx_type != RMX_OFF)
 753			wm.vtaps = 2;
 754		wm.bytes_per_pixel = 4; /* XXX: get this from fb config */
 755		wm.lb_size = lb_size;
 756		wm.dram_channels = evergreen_get_number_of_dram_channels(rdev);
 757		wm.num_heads = num_heads;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 758
 759		/* set for high clocks */
 760		latency_watermark_a = min(evergreen_latency_watermark(&wm), (u32)65535);
 761		/* set for low clocks */
 762		/* wm.yclk = low clk; wm.sclk = low clk */
 763		latency_watermark_b = min(evergreen_latency_watermark(&wm), (u32)65535);
 764
 765		/* possibly force display priority to high */
 766		/* should really do this at mode validation time... */
 767		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm) ||
 768		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm) ||
 769		    !evergreen_check_latency_hiding(&wm) ||
 770		    (rdev->disp_priority == 2)) {
 771			DRM_DEBUG_KMS("force priority to high\n");
 772			priority_a_cnt |= PRIORITY_ALWAYS_ON;
 
 
 
 
 
 
 773			priority_b_cnt |= PRIORITY_ALWAYS_ON;
 774		}
 775
 776		a.full = dfixed_const(1000);
 777		b.full = dfixed_const(mode->clock);
 778		b.full = dfixed_div(b, a);
 779		c.full = dfixed_const(latency_watermark_a);
 780		c.full = dfixed_mul(c, b);
 781		c.full = dfixed_mul(c, radeon_crtc->hsc);
 782		c.full = dfixed_div(c, a);
 783		a.full = dfixed_const(16);
 784		c.full = dfixed_div(c, a);
 785		priority_a_mark = dfixed_trunc(c);
 786		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
 787
 788		a.full = dfixed_const(1000);
 789		b.full = dfixed_const(mode->clock);
 790		b.full = dfixed_div(b, a);
 791		c.full = dfixed_const(latency_watermark_b);
 792		c.full = dfixed_mul(c, b);
 793		c.full = dfixed_mul(c, radeon_crtc->hsc);
 794		c.full = dfixed_div(c, a);
 795		a.full = dfixed_const(16);
 796		c.full = dfixed_div(c, a);
 797		priority_b_mark = dfixed_trunc(c);
 798		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
 799	}
 800
 801	/* select wm A */
 802	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
 803	tmp = arb_control3;
 804	tmp &= ~LATENCY_WATERMARK_MASK(3);
 805	tmp |= LATENCY_WATERMARK_MASK(1);
 806	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
 807	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
 808	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
 809		LATENCY_HIGH_WATERMARK(line_time)));
 810	/* select wm B */
 811	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
 812	tmp &= ~LATENCY_WATERMARK_MASK(3);
 813	tmp |= LATENCY_WATERMARK_MASK(2);
 814	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
 815	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
 816	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
 817		LATENCY_HIGH_WATERMARK(line_time)));
 818	/* restore original selection */
 819	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
 820
 821	/* write the priority marks */
 822	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
 823	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
 824
 
 
 
 
 825}
 826
 
 
 
 
 
 
 
 
 827void evergreen_bandwidth_update(struct radeon_device *rdev)
 828{
 829	struct drm_display_mode *mode0 = NULL;
 830	struct drm_display_mode *mode1 = NULL;
 831	u32 num_heads = 0, lb_size;
 832	int i;
 833
 834	radeon_update_display_priority(rdev);
 835
 836	for (i = 0; i < rdev->num_crtc; i++) {
 837		if (rdev->mode_info.crtcs[i]->base.enabled)
 838			num_heads++;
 839	}
 840	for (i = 0; i < rdev->num_crtc; i += 2) {
 841		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
 842		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
 843		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
 844		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
 845		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
 846		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
 847	}
 848}
 849
 
 
 
 
 
 
 
 
 
 850int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
 851{
 852	unsigned i;
 853	u32 tmp;
 854
 855	for (i = 0; i < rdev->usec_timeout; i++) {
 856		/* read MC_STATUS */
 857		tmp = RREG32(SRBM_STATUS) & 0x1F00;
 858		if (!tmp)
 859			return 0;
 860		udelay(1);
 861	}
 862	return -1;
 863}
 864
 865/*
 866 * GART
 867 */
 868void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
 869{
 870	unsigned i;
 871	u32 tmp;
 872
 873	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
 874
 875	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
 876	for (i = 0; i < rdev->usec_timeout; i++) {
 877		/* read MC_STATUS */
 878		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
 879		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
 880		if (tmp == 2) {
 881			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
 882			return;
 883		}
 884		if (tmp) {
 885			return;
 886		}
 887		udelay(1);
 888	}
 889}
 890
 891int evergreen_pcie_gart_enable(struct radeon_device *rdev)
 892{
 893	u32 tmp;
 894	int r;
 895
 896	if (rdev->gart.table.vram.robj == NULL) {
 897		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
 898		return -EINVAL;
 899	}
 900	r = radeon_gart_table_vram_pin(rdev);
 901	if (r)
 902		return r;
 903	radeon_gart_restore(rdev);
 904	/* Setup L2 cache */
 905	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
 906				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
 907				EFFECTIVE_L2_QUEUE_SIZE(7));
 908	WREG32(VM_L2_CNTL2, 0);
 909	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
 910	/* Setup TLB control */
 911	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
 912		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
 913		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
 914		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
 915	if (rdev->flags & RADEON_IS_IGP) {
 916		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
 917		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
 918		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
 919	} else {
 920		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
 921		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
 922		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
 
 
 
 
 
 923	}
 924	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
 925	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
 926	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
 927	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
 928	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
 929	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
 930	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
 931	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
 932				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
 933	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
 934			(u32)(rdev->dummy_page.addr >> 12));
 935	WREG32(VM_CONTEXT1_CNTL, 0);
 936
 937	evergreen_pcie_gart_tlb_flush(rdev);
 
 
 
 938	rdev->gart.ready = true;
 939	return 0;
 940}
 941
 942void evergreen_pcie_gart_disable(struct radeon_device *rdev)
 943{
 944	u32 tmp;
 945	int r;
 946
 947	/* Disable all tables */
 948	WREG32(VM_CONTEXT0_CNTL, 0);
 949	WREG32(VM_CONTEXT1_CNTL, 0);
 950
 951	/* Setup L2 cache */
 952	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
 953				EFFECTIVE_L2_QUEUE_SIZE(7));
 954	WREG32(VM_L2_CNTL2, 0);
 955	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
 956	/* Setup TLB control */
 957	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
 958	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
 959	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
 960	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
 961	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
 962	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
 963	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
 964	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
 965	if (rdev->gart.table.vram.robj) {
 966		r = radeon_bo_reserve(rdev->gart.table.vram.robj, false);
 967		if (likely(r == 0)) {
 968			radeon_bo_kunmap(rdev->gart.table.vram.robj);
 969			radeon_bo_unpin(rdev->gart.table.vram.robj);
 970			radeon_bo_unreserve(rdev->gart.table.vram.robj);
 971		}
 972	}
 973}
 974
 975void evergreen_pcie_gart_fini(struct radeon_device *rdev)
 976{
 977	evergreen_pcie_gart_disable(rdev);
 978	radeon_gart_table_vram_free(rdev);
 979	radeon_gart_fini(rdev);
 980}
 981
 982
 983void evergreen_agp_enable(struct radeon_device *rdev)
 984{
 985	u32 tmp;
 986
 987	/* Setup L2 cache */
 988	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
 989				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
 990				EFFECTIVE_L2_QUEUE_SIZE(7));
 991	WREG32(VM_L2_CNTL2, 0);
 992	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
 993	/* Setup TLB control */
 994	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
 995		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
 996		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
 997		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
 998	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
 999	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
1000	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
1001	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
1002	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
1003	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
1004	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
1005	WREG32(VM_CONTEXT0_CNTL, 0);
1006	WREG32(VM_CONTEXT1_CNTL, 0);
1007}
1008
1009void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
1010{
1011	save->vga_control[0] = RREG32(D1VGA_CONTROL);
1012	save->vga_control[1] = RREG32(D2VGA_CONTROL);
1013	save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
1014	save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
1015	save->crtc_control[0] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
1016	save->crtc_control[1] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
1017	if (rdev->num_crtc >= 4) {
1018		save->vga_control[2] = RREG32(EVERGREEN_D3VGA_CONTROL);
1019		save->vga_control[3] = RREG32(EVERGREEN_D4VGA_CONTROL);
1020		save->crtc_control[2] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
1021		save->crtc_control[3] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
1022	}
1023	if (rdev->num_crtc >= 6) {
1024		save->vga_control[4] = RREG32(EVERGREEN_D5VGA_CONTROL);
1025		save->vga_control[5] = RREG32(EVERGREEN_D6VGA_CONTROL);
1026		save->crtc_control[4] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
1027		save->crtc_control[5] = RREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
1028	}
1029
1030	/* Stop all video */
1031	WREG32(VGA_RENDER_CONTROL, 0);
1032	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
1033	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
1034	if (rdev->num_crtc >= 4) {
1035		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
1036		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
1037	}
1038	if (rdev->num_crtc >= 6) {
1039		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
1040		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
1041	}
1042	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1043	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1044	if (rdev->num_crtc >= 4) {
1045		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1046		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1047	}
1048	if (rdev->num_crtc >= 6) {
1049		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1050		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
1051	}
1052	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1053	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1054	if (rdev->num_crtc >= 4) {
1055		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1056		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
1057	}
1058	if (rdev->num_crtc >= 6) {
1059		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1060		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1061	}
1062
1063	WREG32(D1VGA_CONTROL, 0);
1064	WREG32(D2VGA_CONTROL, 0);
1065	if (rdev->num_crtc >= 4) {
1066		WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1067		WREG32(EVERGREEN_D4VGA_CONTROL, 0);
 
 
 
 
1068	}
1069	if (rdev->num_crtc >= 6) {
1070		WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1071		WREG32(EVERGREEN_D6VGA_CONTROL, 0);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1072	}
1073}
1074
1075void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
1076{
1077	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
1078	       upper_32_bits(rdev->mc.vram_start));
1079	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC0_REGISTER_OFFSET,
1080	       upper_32_bits(rdev->mc.vram_start));
1081	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
1082	       (u32)rdev->mc.vram_start);
1083	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC0_REGISTER_OFFSET,
1084	       (u32)rdev->mc.vram_start);
1085
1086	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
1087	       upper_32_bits(rdev->mc.vram_start));
1088	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC1_REGISTER_OFFSET,
1089	       upper_32_bits(rdev->mc.vram_start));
1090	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
1091	       (u32)rdev->mc.vram_start);
1092	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC1_REGISTER_OFFSET,
1093	       (u32)rdev->mc.vram_start);
1094
1095	if (rdev->num_crtc >= 4) {
1096		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
1097		       upper_32_bits(rdev->mc.vram_start));
1098		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC2_REGISTER_OFFSET,
1099		       upper_32_bits(rdev->mc.vram_start));
1100		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
1101		       (u32)rdev->mc.vram_start);
1102		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC2_REGISTER_OFFSET,
1103		       (u32)rdev->mc.vram_start);
1104
1105		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
 
 
1106		       upper_32_bits(rdev->mc.vram_start));
1107		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC3_REGISTER_OFFSET,
1108		       upper_32_bits(rdev->mc.vram_start));
1109		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
1110		       (u32)rdev->mc.vram_start);
1111		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC3_REGISTER_OFFSET,
1112		       (u32)rdev->mc.vram_start);
1113	}
1114	if (rdev->num_crtc >= 6) {
1115		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
1116		       upper_32_bits(rdev->mc.vram_start));
1117		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC4_REGISTER_OFFSET,
1118		       upper_32_bits(rdev->mc.vram_start));
1119		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
1120		       (u32)rdev->mc.vram_start);
1121		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC4_REGISTER_OFFSET,
1122		       (u32)rdev->mc.vram_start);
1123
1124		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
1125		       upper_32_bits(rdev->mc.vram_start));
1126		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + EVERGREEN_CRTC5_REGISTER_OFFSET,
1127		       upper_32_bits(rdev->mc.vram_start));
1128		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
1129		       (u32)rdev->mc.vram_start);
1130		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + EVERGREEN_CRTC5_REGISTER_OFFSET,
1131		       (u32)rdev->mc.vram_start);
1132	}
1133
1134	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
1135	WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
1136	/* Unlock host access */
1137	WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
1138	mdelay(1);
1139	/* Restore video state */
1140	WREG32(D1VGA_CONTROL, save->vga_control[0]);
1141	WREG32(D2VGA_CONTROL, save->vga_control[1]);
1142	if (rdev->num_crtc >= 4) {
1143		WREG32(EVERGREEN_D3VGA_CONTROL, save->vga_control[2]);
1144		WREG32(EVERGREEN_D4VGA_CONTROL, save->vga_control[3]);
1145	}
1146	if (rdev->num_crtc >= 6) {
1147		WREG32(EVERGREEN_D5VGA_CONTROL, save->vga_control[4]);
1148		WREG32(EVERGREEN_D6VGA_CONTROL, save->vga_control[5]);
1149	}
1150	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 1);
1151	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 1);
1152	if (rdev->num_crtc >= 4) {
1153		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 1);
1154		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 1);
1155	}
1156	if (rdev->num_crtc >= 6) {
1157		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 1);
1158		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 1);
1159	}
1160	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, save->crtc_control[0]);
1161	WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, save->crtc_control[1]);
1162	if (rdev->num_crtc >= 4) {
1163		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, save->crtc_control[2]);
1164		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, save->crtc_control[3]);
1165	}
1166	if (rdev->num_crtc >= 6) {
1167		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, save->crtc_control[4]);
1168		WREG32(EVERGREEN_CRTC_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, save->crtc_control[5]);
1169	}
1170	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
1171	WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
1172	if (rdev->num_crtc >= 4) {
1173		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
1174		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1175	}
1176	if (rdev->num_crtc >= 6) {
1177		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
1178		WREG32(EVERGREEN_CRTC_UPDATE_LOCK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
 
 
1179	}
1180	WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
1181}
1182
1183void evergreen_mc_program(struct radeon_device *rdev)
1184{
1185	struct evergreen_mc_save save;
1186	u32 tmp;
1187	int i, j;
1188
1189	/* Initialize HDP */
1190	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1191		WREG32((0x2c14 + j), 0x00000000);
1192		WREG32((0x2c18 + j), 0x00000000);
1193		WREG32((0x2c1c + j), 0x00000000);
1194		WREG32((0x2c20 + j), 0x00000000);
1195		WREG32((0x2c24 + j), 0x00000000);
1196	}
1197	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
1198
1199	evergreen_mc_stop(rdev, &save);
1200	if (evergreen_mc_wait_for_idle(rdev)) {
1201		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1202	}
1203	/* Lockout access through VGA aperture*/
1204	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
1205	/* Update configuration */
1206	if (rdev->flags & RADEON_IS_AGP) {
1207		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
1208			/* VRAM before AGP */
1209			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1210				rdev->mc.vram_start >> 12);
1211			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1212				rdev->mc.gtt_end >> 12);
1213		} else {
1214			/* VRAM after AGP */
1215			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1216				rdev->mc.gtt_start >> 12);
1217			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1218				rdev->mc.vram_end >> 12);
1219		}
1220	} else {
1221		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
1222			rdev->mc.vram_start >> 12);
1223		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
1224			rdev->mc.vram_end >> 12);
1225	}
1226	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, 0);
1227	if (rdev->flags & RADEON_IS_IGP) {
 
 
 
1228		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
1229		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
1230		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
1231		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
1232	}
1233	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
1234	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
1235	WREG32(MC_VM_FB_LOCATION, tmp);
1236	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
1237	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
1238	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
1239	if (rdev->flags & RADEON_IS_AGP) {
1240		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
1241		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
1242		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
1243	} else {
1244		WREG32(MC_VM_AGP_BASE, 0);
1245		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
1246		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
1247	}
1248	if (evergreen_mc_wait_for_idle(rdev)) {
1249		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
1250	}
1251	evergreen_mc_resume(rdev, &save);
1252	/* we need to own VRAM, so turn off the VGA renderer here
1253	 * to stop it overwriting our objects */
1254	rv515_vga_render_disable(rdev);
1255}
1256
1257/*
1258 * CP.
1259 */
1260void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
1261{
 
 
 
1262	/* set to DX10/11 mode */
1263	radeon_ring_write(rdev, PACKET3(PACKET3_MODE_CONTROL, 0));
1264	radeon_ring_write(rdev, 1);
1265	/* FIXME: implement */
1266	radeon_ring_write(rdev, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
1267	radeon_ring_write(rdev,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1268#ifdef __BIG_ENDIAN
1269			  (2 << 0) |
1270#endif
1271			  (ib->gpu_addr & 0xFFFFFFFC));
1272	radeon_ring_write(rdev, upper_32_bits(ib->gpu_addr) & 0xFF);
1273	radeon_ring_write(rdev, ib->length_dw);
1274}
1275
1276
1277static int evergreen_cp_load_microcode(struct radeon_device *rdev)
1278{
1279	const __be32 *fw_data;
1280	int i;
1281
1282	if (!rdev->me_fw || !rdev->pfp_fw)
1283		return -EINVAL;
1284
1285	r700_cp_stop(rdev);
1286	WREG32(CP_RB_CNTL,
1287#ifdef __BIG_ENDIAN
1288	       BUF_SWAP_32BIT |
1289#endif
1290	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
1291
1292	fw_data = (const __be32 *)rdev->pfp_fw->data;
1293	WREG32(CP_PFP_UCODE_ADDR, 0);
1294	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
1295		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
1296	WREG32(CP_PFP_UCODE_ADDR, 0);
1297
1298	fw_data = (const __be32 *)rdev->me_fw->data;
1299	WREG32(CP_ME_RAM_WADDR, 0);
1300	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
1301		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
1302
1303	WREG32(CP_PFP_UCODE_ADDR, 0);
1304	WREG32(CP_ME_RAM_WADDR, 0);
1305	WREG32(CP_ME_RAM_RADDR, 0);
1306	return 0;
1307}
1308
1309static int evergreen_cp_start(struct radeon_device *rdev)
1310{
 
1311	int r, i;
1312	uint32_t cp_me;
1313
1314	r = radeon_ring_lock(rdev, 7);
1315	if (r) {
1316		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1317		return r;
1318	}
1319	radeon_ring_write(rdev, PACKET3(PACKET3_ME_INITIALIZE, 5));
1320	radeon_ring_write(rdev, 0x1);
1321	radeon_ring_write(rdev, 0x0);
1322	radeon_ring_write(rdev, rdev->config.evergreen.max_hw_contexts - 1);
1323	radeon_ring_write(rdev, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
1324	radeon_ring_write(rdev, 0);
1325	radeon_ring_write(rdev, 0);
1326	radeon_ring_unlock_commit(rdev);
1327
1328	cp_me = 0xff;
1329	WREG32(CP_ME_CNTL, cp_me);
1330
1331	r = radeon_ring_lock(rdev, evergreen_default_size + 19);
1332	if (r) {
1333		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
1334		return r;
1335	}
1336
1337	/* setup clear context state */
1338	radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1339	radeon_ring_write(rdev, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
1340
1341	for (i = 0; i < evergreen_default_size; i++)
1342		radeon_ring_write(rdev, evergreen_default_state[i]);
1343
1344	radeon_ring_write(rdev, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
1345	radeon_ring_write(rdev, PACKET3_PREAMBLE_END_CLEAR_STATE);
1346
1347	/* set clear context state */
1348	radeon_ring_write(rdev, PACKET3(PACKET3_CLEAR_STATE, 0));
1349	radeon_ring_write(rdev, 0);
1350
1351	/* SQ_VTX_BASE_VTX_LOC */
1352	radeon_ring_write(rdev, 0xc0026f00);
1353	radeon_ring_write(rdev, 0x00000000);
1354	radeon_ring_write(rdev, 0x00000000);
1355	radeon_ring_write(rdev, 0x00000000);
1356
1357	/* Clear consts */
1358	radeon_ring_write(rdev, 0xc0036f00);
1359	radeon_ring_write(rdev, 0x00000bc4);
1360	radeon_ring_write(rdev, 0xffffffff);
1361	radeon_ring_write(rdev, 0xffffffff);
1362	radeon_ring_write(rdev, 0xffffffff);
1363
1364	radeon_ring_write(rdev, 0xc0026900);
1365	radeon_ring_write(rdev, 0x00000316);
1366	radeon_ring_write(rdev, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
1367	radeon_ring_write(rdev, 0x00000010); /*  */
1368
1369	radeon_ring_unlock_commit(rdev);
1370
1371	return 0;
1372}
1373
1374int evergreen_cp_resume(struct radeon_device *rdev)
1375{
 
1376	u32 tmp;
1377	u32 rb_bufsz;
1378	int r;
1379
1380	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
1381	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
1382				 SOFT_RESET_PA |
1383				 SOFT_RESET_SH |
1384				 SOFT_RESET_VGT |
1385				 SOFT_RESET_SPI |
1386				 SOFT_RESET_SX));
1387	RREG32(GRBM_SOFT_RESET);
1388	mdelay(15);
1389	WREG32(GRBM_SOFT_RESET, 0);
1390	RREG32(GRBM_SOFT_RESET);
1391
1392	/* Set ring buffer size */
1393	rb_bufsz = drm_order(rdev->cp.ring_size / 8);
1394	tmp = (drm_order(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
1395#ifdef __BIG_ENDIAN
1396	tmp |= BUF_SWAP_32BIT;
1397#endif
1398	WREG32(CP_RB_CNTL, tmp);
1399	WREG32(CP_SEM_WAIT_TIMER, 0x4);
 
1400
1401	/* Set the write pointer delay */
1402	WREG32(CP_RB_WPTR_DELAY, 0);
1403
1404	/* Initialize the ring buffer's read and write pointers */
1405	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
1406	WREG32(CP_RB_RPTR_WR, 0);
1407	rdev->cp.wptr = 0;
1408	WREG32(CP_RB_WPTR, rdev->cp.wptr);
1409
1410	/* set the wb address wether it's enabled or not */
1411	WREG32(CP_RB_RPTR_ADDR,
1412	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
1413	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
1414	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
1415
1416	if (rdev->wb.enabled)
1417		WREG32(SCRATCH_UMSK, 0xff);
1418	else {
1419		tmp |= RB_NO_UPDATE;
1420		WREG32(SCRATCH_UMSK, 0);
1421	}
1422
1423	mdelay(1);
1424	WREG32(CP_RB_CNTL, tmp);
1425
1426	WREG32(CP_RB_BASE, rdev->cp.gpu_addr >> 8);
1427	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
1428
1429	rdev->cp.rptr = RREG32(CP_RB_RPTR);
1430
1431	evergreen_cp_start(rdev);
1432	rdev->cp.ready = true;
1433	r = radeon_ring_test(rdev);
1434	if (r) {
1435		rdev->cp.ready = false;
1436		return r;
1437	}
1438	return 0;
1439}
1440
1441/*
1442 * Core functions
1443 */
1444static u32 evergreen_get_tile_pipe_to_backend_map(struct radeon_device *rdev,
1445						  u32 num_tile_pipes,
1446						  u32 num_backends,
1447						  u32 backend_disable_mask)
1448{
1449	u32 backend_map = 0;
1450	u32 enabled_backends_mask = 0;
1451	u32 enabled_backends_count = 0;
1452	u32 cur_pipe;
1453	u32 swizzle_pipe[EVERGREEN_MAX_PIPES];
1454	u32 cur_backend = 0;
1455	u32 i;
1456	bool force_no_swizzle;
1457
1458	if (num_tile_pipes > EVERGREEN_MAX_PIPES)
1459		num_tile_pipes = EVERGREEN_MAX_PIPES;
1460	if (num_tile_pipes < 1)
1461		num_tile_pipes = 1;
1462	if (num_backends > EVERGREEN_MAX_BACKENDS)
1463		num_backends = EVERGREEN_MAX_BACKENDS;
1464	if (num_backends < 1)
1465		num_backends = 1;
1466
1467	for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
1468		if (((backend_disable_mask >> i) & 1) == 0) {
1469			enabled_backends_mask |= (1 << i);
1470			++enabled_backends_count;
1471		}
1472		if (enabled_backends_count == num_backends)
1473			break;
1474	}
1475
1476	if (enabled_backends_count == 0) {
1477		enabled_backends_mask = 1;
1478		enabled_backends_count = 1;
1479	}
1480
1481	if (enabled_backends_count != num_backends)
1482		num_backends = enabled_backends_count;
1483
1484	memset((uint8_t *)&swizzle_pipe[0], 0, sizeof(u32) * EVERGREEN_MAX_PIPES);
1485	switch (rdev->family) {
1486	case CHIP_CEDAR:
1487	case CHIP_REDWOOD:
1488	case CHIP_PALM:
1489	case CHIP_SUMO:
1490	case CHIP_SUMO2:
1491	case CHIP_TURKS:
1492	case CHIP_CAICOS:
1493		force_no_swizzle = false;
1494		break;
1495	case CHIP_CYPRESS:
1496	case CHIP_HEMLOCK:
1497	case CHIP_JUNIPER:
1498	case CHIP_BARTS:
1499	default:
1500		force_no_swizzle = true;
1501		break;
1502	}
1503	if (force_no_swizzle) {
1504		bool last_backend_enabled = false;
1505
1506		force_no_swizzle = false;
1507		for (i = 0; i < EVERGREEN_MAX_BACKENDS; ++i) {
1508			if (((enabled_backends_mask >> i) & 1) == 1) {
1509				if (last_backend_enabled)
1510					force_no_swizzle = true;
1511				last_backend_enabled = true;
1512			} else
1513				last_backend_enabled = false;
1514		}
1515	}
1516
1517	switch (num_tile_pipes) {
1518	case 1:
1519	case 3:
1520	case 5:
1521	case 7:
1522		DRM_ERROR("odd number of pipes!\n");
1523		break;
1524	case 2:
1525		swizzle_pipe[0] = 0;
1526		swizzle_pipe[1] = 1;
1527		break;
1528	case 4:
1529		if (force_no_swizzle) {
1530			swizzle_pipe[0] = 0;
1531			swizzle_pipe[1] = 1;
1532			swizzle_pipe[2] = 2;
1533			swizzle_pipe[3] = 3;
1534		} else {
1535			swizzle_pipe[0] = 0;
1536			swizzle_pipe[1] = 2;
1537			swizzle_pipe[2] = 1;
1538			swizzle_pipe[3] = 3;
1539		}
1540		break;
1541	case 6:
1542		if (force_no_swizzle) {
1543			swizzle_pipe[0] = 0;
1544			swizzle_pipe[1] = 1;
1545			swizzle_pipe[2] = 2;
1546			swizzle_pipe[3] = 3;
1547			swizzle_pipe[4] = 4;
1548			swizzle_pipe[5] = 5;
1549		} else {
1550			swizzle_pipe[0] = 0;
1551			swizzle_pipe[1] = 2;
1552			swizzle_pipe[2] = 4;
1553			swizzle_pipe[3] = 1;
1554			swizzle_pipe[4] = 3;
1555			swizzle_pipe[5] = 5;
1556		}
1557		break;
1558	case 8:
1559		if (force_no_swizzle) {
1560			swizzle_pipe[0] = 0;
1561			swizzle_pipe[1] = 1;
1562			swizzle_pipe[2] = 2;
1563			swizzle_pipe[3] = 3;
1564			swizzle_pipe[4] = 4;
1565			swizzle_pipe[5] = 5;
1566			swizzle_pipe[6] = 6;
1567			swizzle_pipe[7] = 7;
1568		} else {
1569			swizzle_pipe[0] = 0;
1570			swizzle_pipe[1] = 2;
1571			swizzle_pipe[2] = 4;
1572			swizzle_pipe[3] = 6;
1573			swizzle_pipe[4] = 1;
1574			swizzle_pipe[5] = 3;
1575			swizzle_pipe[6] = 5;
1576			swizzle_pipe[7] = 7;
1577		}
1578		break;
1579	}
1580
1581	for (cur_pipe = 0; cur_pipe < num_tile_pipes; ++cur_pipe) {
1582		while (((1 << cur_backend) & enabled_backends_mask) == 0)
1583			cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
1584
1585		backend_map |= (((cur_backend & 0xf) << (swizzle_pipe[cur_pipe] * 4)));
1586
1587		cur_backend = (cur_backend + 1) % EVERGREEN_MAX_BACKENDS;
1588	}
1589
1590	return backend_map;
1591}
1592
1593static void evergreen_gpu_init(struct radeon_device *rdev)
1594{
1595	u32 cc_rb_backend_disable = 0;
1596	u32 cc_gc_shader_pipe_config;
1597	u32 gb_addr_config = 0;
1598	u32 mc_shared_chmap, mc_arb_ramcfg;
1599	u32 gb_backend_map;
1600	u32 grbm_gfx_index;
1601	u32 sx_debug_1;
1602	u32 smx_dc_ctl0;
1603	u32 sq_config;
1604	u32 sq_lds_resource_mgmt;
1605	u32 sq_gpr_resource_mgmt_1;
1606	u32 sq_gpr_resource_mgmt_2;
1607	u32 sq_gpr_resource_mgmt_3;
1608	u32 sq_thread_resource_mgmt;
1609	u32 sq_thread_resource_mgmt_2;
1610	u32 sq_stack_resource_mgmt_1;
1611	u32 sq_stack_resource_mgmt_2;
1612	u32 sq_stack_resource_mgmt_3;
1613	u32 vgt_cache_invalidation;
1614	u32 hdp_host_path_cntl, tmp;
 
1615	int i, j, num_shader_engines, ps_thread_count;
1616
1617	switch (rdev->family) {
1618	case CHIP_CYPRESS:
1619	case CHIP_HEMLOCK:
1620		rdev->config.evergreen.num_ses = 2;
1621		rdev->config.evergreen.max_pipes = 4;
1622		rdev->config.evergreen.max_tile_pipes = 8;
1623		rdev->config.evergreen.max_simds = 10;
1624		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1625		rdev->config.evergreen.max_gprs = 256;
1626		rdev->config.evergreen.max_threads = 248;
1627		rdev->config.evergreen.max_gs_threads = 32;
1628		rdev->config.evergreen.max_stack_entries = 512;
1629		rdev->config.evergreen.sx_num_of_sets = 4;
1630		rdev->config.evergreen.sx_max_export_size = 256;
1631		rdev->config.evergreen.sx_max_export_pos_size = 64;
1632		rdev->config.evergreen.sx_max_export_smx_size = 192;
1633		rdev->config.evergreen.max_hw_contexts = 8;
1634		rdev->config.evergreen.sq_num_cf_insts = 2;
1635
1636		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1637		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1638		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1639		break;
1640	case CHIP_JUNIPER:
1641		rdev->config.evergreen.num_ses = 1;
1642		rdev->config.evergreen.max_pipes = 4;
1643		rdev->config.evergreen.max_tile_pipes = 4;
1644		rdev->config.evergreen.max_simds = 10;
1645		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1646		rdev->config.evergreen.max_gprs = 256;
1647		rdev->config.evergreen.max_threads = 248;
1648		rdev->config.evergreen.max_gs_threads = 32;
1649		rdev->config.evergreen.max_stack_entries = 512;
1650		rdev->config.evergreen.sx_num_of_sets = 4;
1651		rdev->config.evergreen.sx_max_export_size = 256;
1652		rdev->config.evergreen.sx_max_export_pos_size = 64;
1653		rdev->config.evergreen.sx_max_export_smx_size = 192;
1654		rdev->config.evergreen.max_hw_contexts = 8;
1655		rdev->config.evergreen.sq_num_cf_insts = 2;
1656
1657		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1658		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1659		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1660		break;
1661	case CHIP_REDWOOD:
1662		rdev->config.evergreen.num_ses = 1;
1663		rdev->config.evergreen.max_pipes = 4;
1664		rdev->config.evergreen.max_tile_pipes = 4;
1665		rdev->config.evergreen.max_simds = 5;
1666		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1667		rdev->config.evergreen.max_gprs = 256;
1668		rdev->config.evergreen.max_threads = 248;
1669		rdev->config.evergreen.max_gs_threads = 32;
1670		rdev->config.evergreen.max_stack_entries = 256;
1671		rdev->config.evergreen.sx_num_of_sets = 4;
1672		rdev->config.evergreen.sx_max_export_size = 256;
1673		rdev->config.evergreen.sx_max_export_pos_size = 64;
1674		rdev->config.evergreen.sx_max_export_smx_size = 192;
1675		rdev->config.evergreen.max_hw_contexts = 8;
1676		rdev->config.evergreen.sq_num_cf_insts = 2;
1677
1678		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1679		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1680		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1681		break;
1682	case CHIP_CEDAR:
1683	default:
1684		rdev->config.evergreen.num_ses = 1;
1685		rdev->config.evergreen.max_pipes = 2;
1686		rdev->config.evergreen.max_tile_pipes = 2;
1687		rdev->config.evergreen.max_simds = 2;
1688		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1689		rdev->config.evergreen.max_gprs = 256;
1690		rdev->config.evergreen.max_threads = 192;
1691		rdev->config.evergreen.max_gs_threads = 16;
1692		rdev->config.evergreen.max_stack_entries = 256;
1693		rdev->config.evergreen.sx_num_of_sets = 4;
1694		rdev->config.evergreen.sx_max_export_size = 128;
1695		rdev->config.evergreen.sx_max_export_pos_size = 32;
1696		rdev->config.evergreen.sx_max_export_smx_size = 96;
1697		rdev->config.evergreen.max_hw_contexts = 4;
1698		rdev->config.evergreen.sq_num_cf_insts = 1;
1699
1700		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1701		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1702		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1703		break;
1704	case CHIP_PALM:
1705		rdev->config.evergreen.num_ses = 1;
1706		rdev->config.evergreen.max_pipes = 2;
1707		rdev->config.evergreen.max_tile_pipes = 2;
1708		rdev->config.evergreen.max_simds = 2;
1709		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1710		rdev->config.evergreen.max_gprs = 256;
1711		rdev->config.evergreen.max_threads = 192;
1712		rdev->config.evergreen.max_gs_threads = 16;
1713		rdev->config.evergreen.max_stack_entries = 256;
1714		rdev->config.evergreen.sx_num_of_sets = 4;
1715		rdev->config.evergreen.sx_max_export_size = 128;
1716		rdev->config.evergreen.sx_max_export_pos_size = 32;
1717		rdev->config.evergreen.sx_max_export_smx_size = 96;
1718		rdev->config.evergreen.max_hw_contexts = 4;
1719		rdev->config.evergreen.sq_num_cf_insts = 1;
1720
1721		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1722		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1723		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1724		break;
1725	case CHIP_SUMO:
1726		rdev->config.evergreen.num_ses = 1;
1727		rdev->config.evergreen.max_pipes = 4;
1728		rdev->config.evergreen.max_tile_pipes = 2;
1729		if (rdev->pdev->device == 0x9648)
1730			rdev->config.evergreen.max_simds = 3;
1731		else if ((rdev->pdev->device == 0x9647) ||
1732			 (rdev->pdev->device == 0x964a))
1733			rdev->config.evergreen.max_simds = 4;
1734		else
1735			rdev->config.evergreen.max_simds = 5;
1736		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1737		rdev->config.evergreen.max_gprs = 256;
1738		rdev->config.evergreen.max_threads = 248;
1739		rdev->config.evergreen.max_gs_threads = 32;
1740		rdev->config.evergreen.max_stack_entries = 256;
1741		rdev->config.evergreen.sx_num_of_sets = 4;
1742		rdev->config.evergreen.sx_max_export_size = 256;
1743		rdev->config.evergreen.sx_max_export_pos_size = 64;
1744		rdev->config.evergreen.sx_max_export_smx_size = 192;
1745		rdev->config.evergreen.max_hw_contexts = 8;
1746		rdev->config.evergreen.sq_num_cf_insts = 2;
1747
1748		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1749		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1750		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1751		break;
1752	case CHIP_SUMO2:
1753		rdev->config.evergreen.num_ses = 1;
1754		rdev->config.evergreen.max_pipes = 4;
1755		rdev->config.evergreen.max_tile_pipes = 4;
1756		rdev->config.evergreen.max_simds = 2;
1757		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1758		rdev->config.evergreen.max_gprs = 256;
1759		rdev->config.evergreen.max_threads = 248;
1760		rdev->config.evergreen.max_gs_threads = 32;
1761		rdev->config.evergreen.max_stack_entries = 512;
1762		rdev->config.evergreen.sx_num_of_sets = 4;
1763		rdev->config.evergreen.sx_max_export_size = 256;
1764		rdev->config.evergreen.sx_max_export_pos_size = 64;
1765		rdev->config.evergreen.sx_max_export_smx_size = 192;
1766		rdev->config.evergreen.max_hw_contexts = 8;
1767		rdev->config.evergreen.sq_num_cf_insts = 2;
1768
1769		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1770		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1771		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1772		break;
1773	case CHIP_BARTS:
1774		rdev->config.evergreen.num_ses = 2;
1775		rdev->config.evergreen.max_pipes = 4;
1776		rdev->config.evergreen.max_tile_pipes = 8;
1777		rdev->config.evergreen.max_simds = 7;
1778		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
1779		rdev->config.evergreen.max_gprs = 256;
1780		rdev->config.evergreen.max_threads = 248;
1781		rdev->config.evergreen.max_gs_threads = 32;
1782		rdev->config.evergreen.max_stack_entries = 512;
1783		rdev->config.evergreen.sx_num_of_sets = 4;
1784		rdev->config.evergreen.sx_max_export_size = 256;
1785		rdev->config.evergreen.sx_max_export_pos_size = 64;
1786		rdev->config.evergreen.sx_max_export_smx_size = 192;
1787		rdev->config.evergreen.max_hw_contexts = 8;
1788		rdev->config.evergreen.sq_num_cf_insts = 2;
1789
1790		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1791		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1792		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1793		break;
1794	case CHIP_TURKS:
1795		rdev->config.evergreen.num_ses = 1;
1796		rdev->config.evergreen.max_pipes = 4;
1797		rdev->config.evergreen.max_tile_pipes = 4;
1798		rdev->config.evergreen.max_simds = 6;
1799		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
1800		rdev->config.evergreen.max_gprs = 256;
1801		rdev->config.evergreen.max_threads = 248;
1802		rdev->config.evergreen.max_gs_threads = 32;
1803		rdev->config.evergreen.max_stack_entries = 256;
1804		rdev->config.evergreen.sx_num_of_sets = 4;
1805		rdev->config.evergreen.sx_max_export_size = 256;
1806		rdev->config.evergreen.sx_max_export_pos_size = 64;
1807		rdev->config.evergreen.sx_max_export_smx_size = 192;
1808		rdev->config.evergreen.max_hw_contexts = 8;
1809		rdev->config.evergreen.sq_num_cf_insts = 2;
1810
1811		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
1812		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1813		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1814		break;
1815	case CHIP_CAICOS:
1816		rdev->config.evergreen.num_ses = 1;
1817		rdev->config.evergreen.max_pipes = 4;
1818		rdev->config.evergreen.max_tile_pipes = 2;
1819		rdev->config.evergreen.max_simds = 2;
1820		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
1821		rdev->config.evergreen.max_gprs = 256;
1822		rdev->config.evergreen.max_threads = 192;
1823		rdev->config.evergreen.max_gs_threads = 16;
1824		rdev->config.evergreen.max_stack_entries = 256;
1825		rdev->config.evergreen.sx_num_of_sets = 4;
1826		rdev->config.evergreen.sx_max_export_size = 128;
1827		rdev->config.evergreen.sx_max_export_pos_size = 32;
1828		rdev->config.evergreen.sx_max_export_smx_size = 96;
1829		rdev->config.evergreen.max_hw_contexts = 4;
1830		rdev->config.evergreen.sq_num_cf_insts = 1;
1831
1832		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
1833		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
1834		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
 
1835		break;
1836	}
1837
1838	/* Initialize HDP */
1839	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
1840		WREG32((0x2c14 + j), 0x00000000);
1841		WREG32((0x2c18 + j), 0x00000000);
1842		WREG32((0x2c1c + j), 0x00000000);
1843		WREG32((0x2c20 + j), 0x00000000);
1844		WREG32((0x2c24 + j), 0x00000000);
1845	}
1846
1847	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
1848
1849	evergreen_fix_pci_max_read_req_size(rdev);
1850
1851	cc_gc_shader_pipe_config = RREG32(CC_GC_SHADER_PIPE_CONFIG) & ~2;
1852
1853	cc_gc_shader_pipe_config |=
1854		INACTIVE_QD_PIPES((EVERGREEN_MAX_PIPES_MASK << rdev->config.evergreen.max_pipes)
1855				  & EVERGREEN_MAX_PIPES_MASK);
1856	cc_gc_shader_pipe_config |=
1857		INACTIVE_SIMDS((EVERGREEN_MAX_SIMDS_MASK << rdev->config.evergreen.max_simds)
1858			       & EVERGREEN_MAX_SIMDS_MASK);
1859
1860	cc_rb_backend_disable =
1861		BACKEND_DISABLE((EVERGREEN_MAX_BACKENDS_MASK << rdev->config.evergreen.max_backends)
1862				& EVERGREEN_MAX_BACKENDS_MASK);
1863
1864
1865	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
1866	if (rdev->flags & RADEON_IS_IGP)
 
 
1867		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
1868	else
1869		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
1870
1871	switch (rdev->config.evergreen.max_tile_pipes) {
1872	case 1:
1873	default:
1874		gb_addr_config |= NUM_PIPES(0);
1875		break;
1876	case 2:
1877		gb_addr_config |= NUM_PIPES(1);
1878		break;
1879	case 4:
1880		gb_addr_config |= NUM_PIPES(2);
1881		break;
1882	case 8:
1883		gb_addr_config |= NUM_PIPES(3);
1884		break;
1885	}
1886
1887	gb_addr_config |= PIPE_INTERLEAVE_SIZE((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT);
1888	gb_addr_config |= BANK_INTERLEAVE_SIZE(0);
1889	gb_addr_config |= NUM_SHADER_ENGINES(rdev->config.evergreen.num_ses - 1);
1890	gb_addr_config |= SHADER_ENGINE_TILE_SIZE(1);
1891	gb_addr_config |= NUM_GPUS(0); /* Hemlock? */
1892	gb_addr_config |= MULTI_GPU_TILE_SIZE(2);
1893
1894	if (((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT) > 2)
1895		gb_addr_config |= ROW_SIZE(2);
1896	else
1897		gb_addr_config |= ROW_SIZE((mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT);
1898
1899	if (rdev->ddev->pdev->device == 0x689e) {
1900		u32 efuse_straps_4;
1901		u32 efuse_straps_3;
1902		u8 efuse_box_bit_131_124;
1903
1904		WREG32(RCU_IND_INDEX, 0x204);
1905		efuse_straps_4 = RREG32(RCU_IND_DATA);
1906		WREG32(RCU_IND_INDEX, 0x203);
1907		efuse_straps_3 = RREG32(RCU_IND_DATA);
1908		efuse_box_bit_131_124 = (u8)(((efuse_straps_4 & 0xf) << 4) | ((efuse_straps_3 & 0xf0000000) >> 28));
1909
1910		switch(efuse_box_bit_131_124) {
1911		case 0x00:
1912			gb_backend_map = 0x76543210;
1913			break;
1914		case 0x55:
1915			gb_backend_map = 0x77553311;
1916			break;
1917		case 0x56:
1918			gb_backend_map = 0x77553300;
1919			break;
1920		case 0x59:
1921			gb_backend_map = 0x77552211;
1922			break;
1923		case 0x66:
1924			gb_backend_map = 0x77443300;
1925			break;
1926		case 0x99:
1927			gb_backend_map = 0x66552211;
1928			break;
1929		case 0x5a:
1930			gb_backend_map = 0x77552200;
1931			break;
1932		case 0xaa:
1933			gb_backend_map = 0x66442200;
1934			break;
1935		case 0x95:
1936			gb_backend_map = 0x66553311;
1937			break;
1938		default:
1939			DRM_ERROR("bad backend map, using default\n");
1940			gb_backend_map =
1941				evergreen_get_tile_pipe_to_backend_map(rdev,
1942								       rdev->config.evergreen.max_tile_pipes,
1943								       rdev->config.evergreen.max_backends,
1944								       ((EVERGREEN_MAX_BACKENDS_MASK <<
1945								   rdev->config.evergreen.max_backends) &
1946									EVERGREEN_MAX_BACKENDS_MASK));
1947			break;
1948		}
1949	} else if (rdev->ddev->pdev->device == 0x68b9) {
1950		u32 efuse_straps_3;
1951		u8 efuse_box_bit_127_124;
1952
1953		WREG32(RCU_IND_INDEX, 0x203);
1954		efuse_straps_3 = RREG32(RCU_IND_DATA);
1955		efuse_box_bit_127_124 = (u8)((efuse_straps_3 & 0xF0000000) >> 28);
1956
1957		switch(efuse_box_bit_127_124) {
1958		case 0x0:
1959			gb_backend_map = 0x00003210;
1960			break;
1961		case 0x5:
1962		case 0x6:
1963		case 0x9:
1964		case 0xa:
1965			gb_backend_map = 0x00003311;
1966			break;
1967		default:
1968			DRM_ERROR("bad backend map, using default\n");
1969			gb_backend_map =
1970				evergreen_get_tile_pipe_to_backend_map(rdev,
1971								       rdev->config.evergreen.max_tile_pipes,
1972								       rdev->config.evergreen.max_backends,
1973								       ((EVERGREEN_MAX_BACKENDS_MASK <<
1974								   rdev->config.evergreen.max_backends) &
1975									EVERGREEN_MAX_BACKENDS_MASK));
1976			break;
1977		}
1978	} else {
1979		switch (rdev->family) {
1980		case CHIP_CYPRESS:
1981		case CHIP_HEMLOCK:
1982		case CHIP_BARTS:
1983			gb_backend_map = 0x66442200;
1984			break;
1985		case CHIP_JUNIPER:
1986			gb_backend_map = 0x00002200;
1987			break;
1988		default:
1989			gb_backend_map =
1990				evergreen_get_tile_pipe_to_backend_map(rdev,
1991								       rdev->config.evergreen.max_tile_pipes,
1992								       rdev->config.evergreen.max_backends,
1993								       ((EVERGREEN_MAX_BACKENDS_MASK <<
1994									 rdev->config.evergreen.max_backends) &
1995									EVERGREEN_MAX_BACKENDS_MASK));
1996		}
1997	}
1998
1999	/* setup tiling info dword.  gb_addr_config is not adequate since it does
2000	 * not have bank info, so create a custom tiling dword.
2001	 * bits 3:0   num_pipes
2002	 * bits 7:4   num_banks
2003	 * bits 11:8  group_size
2004	 * bits 15:12 row_size
2005	 */
2006	rdev->config.evergreen.tile_config = 0;
2007	switch (rdev->config.evergreen.max_tile_pipes) {
2008	case 1:
2009	default:
2010		rdev->config.evergreen.tile_config |= (0 << 0);
2011		break;
2012	case 2:
2013		rdev->config.evergreen.tile_config |= (1 << 0);
2014		break;
2015	case 4:
2016		rdev->config.evergreen.tile_config |= (2 << 0);
2017		break;
2018	case 8:
2019		rdev->config.evergreen.tile_config |= (3 << 0);
2020		break;
2021	}
2022	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
2023	if (rdev->flags & RADEON_IS_IGP)
2024		rdev->config.evergreen.tile_config |= 1 << 4;
2025	else
2026		rdev->config.evergreen.tile_config |=
2027			((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) << 4;
2028	rdev->config.evergreen.tile_config |=
2029		((mc_arb_ramcfg & BURSTLENGTH_MASK) >> BURSTLENGTH_SHIFT) << 8;
 
 
 
 
 
 
 
 
 
 
2030	rdev->config.evergreen.tile_config |=
2031		((gb_addr_config & 0x30000000) >> 28) << 12;
2032
2033	rdev->config.evergreen.backend_map = gb_backend_map;
2034	WREG32(GB_BACKEND_MAP, gb_backend_map);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2035	WREG32(GB_ADDR_CONFIG, gb_addr_config);
2036	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
2037	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
2038
2039	num_shader_engines = ((RREG32(GB_ADDR_CONFIG) & NUM_SHADER_ENGINES(3)) >> 12) + 1;
2040	grbm_gfx_index = INSTANCE_BROADCAST_WRITES;
2041
2042	for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
2043		u32 rb = cc_rb_backend_disable | (0xf0 << 16);
2044		u32 sp = cc_gc_shader_pipe_config;
2045		u32 gfx = grbm_gfx_index | SE_INDEX(i);
2046
2047		if (i == num_shader_engines) {
2048			rb |= BACKEND_DISABLE(EVERGREEN_MAX_BACKENDS_MASK);
2049			sp |= INACTIVE_SIMDS(EVERGREEN_MAX_SIMDS_MASK);
2050		}
2051
2052		WREG32(GRBM_GFX_INDEX, gfx);
2053		WREG32(RLC_GFX_INDEX, gfx);
2054
2055		WREG32(CC_RB_BACKEND_DISABLE, rb);
2056		WREG32(CC_SYS_RB_BACKEND_DISABLE, rb);
2057		WREG32(GC_USER_RB_BACKEND_DISABLE, rb);
2058		WREG32(CC_GC_SHADER_PIPE_CONFIG, sp);
2059        }
2060
2061	grbm_gfx_index |= SE_BROADCAST_WRITES;
2062	WREG32(GRBM_GFX_INDEX, grbm_gfx_index);
2063	WREG32(RLC_GFX_INDEX, grbm_gfx_index);
2064
2065	WREG32(CGTS_SYS_TCC_DISABLE, 0);
2066	WREG32(CGTS_TCC_DISABLE, 0);
2067	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
2068	WREG32(CGTS_USER_TCC_DISABLE, 0);
2069
2070	/* set HW defaults for 3D engine */
2071	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
2072				     ROQ_IB2_START(0x2b)));
2073
2074	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
2075
2076	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
2077			     SYNC_GRADIENT |
2078			     SYNC_WALKER |
2079			     SYNC_ALIGNER));
2080
2081	sx_debug_1 = RREG32(SX_DEBUG_1);
2082	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
2083	WREG32(SX_DEBUG_1, sx_debug_1);
2084
2085
2086	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
2087	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
2088	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
2089	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
2090
 
 
 
2091	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
2092					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
2093					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
2094
2095	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
2096				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
2097				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
2098
2099	WREG32(VGT_NUM_INSTANCES, 1);
2100	WREG32(SPI_CONFIG_CNTL, 0);
2101	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
2102	WREG32(CP_PERFMON_CNTL, 0);
2103
2104	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
2105				  FETCH_FIFO_HIWATER(0x4) |
2106				  DONE_FIFO_HIWATER(0xe0) |
2107				  ALU_UPDATE_FIFO_HIWATER(0x8)));
2108
2109	sq_config = RREG32(SQ_CONFIG);
2110	sq_config &= ~(PS_PRIO(3) |
2111		       VS_PRIO(3) |
2112		       GS_PRIO(3) |
2113		       ES_PRIO(3));
2114	sq_config |= (VC_ENABLE |
2115		      EXPORT_SRC_C |
2116		      PS_PRIO(0) |
2117		      VS_PRIO(1) |
2118		      GS_PRIO(2) |
2119		      ES_PRIO(3));
2120
2121	switch (rdev->family) {
2122	case CHIP_CEDAR:
2123	case CHIP_PALM:
2124	case CHIP_SUMO:
2125	case CHIP_SUMO2:
2126	case CHIP_CAICOS:
2127		/* no vertex cache */
2128		sq_config &= ~VC_ENABLE;
2129		break;
2130	default:
2131		break;
2132	}
2133
2134	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
2135
2136	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
2137	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
2138	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
2139	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
2140	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
2141	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
2142	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
2143
2144	switch (rdev->family) {
2145	case CHIP_CEDAR:
2146	case CHIP_PALM:
2147	case CHIP_SUMO:
2148	case CHIP_SUMO2:
2149		ps_thread_count = 96;
2150		break;
2151	default:
2152		ps_thread_count = 128;
2153		break;
2154	}
2155
2156	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
2157	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2158	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2159	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2160	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2161	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
2162
2163	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2164	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2165	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2166	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2167	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2168	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
2169
2170	WREG32(SQ_CONFIG, sq_config);
2171	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
2172	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
2173	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
2174	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
2175	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
2176	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
2177	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
2178	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
2179	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
2180	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
2181
2182	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
2183					  FORCE_EOV_MAX_REZ_CNT(255)));
2184
2185	switch (rdev->family) {
2186	case CHIP_CEDAR:
2187	case CHIP_PALM:
2188	case CHIP_SUMO:
2189	case CHIP_SUMO2:
2190	case CHIP_CAICOS:
2191		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
2192		break;
2193	default:
2194		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
2195		break;
2196	}
2197	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
2198	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
2199
2200	WREG32(VGT_GS_VERTEX_REUSE, 16);
2201	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
2202	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
2203
2204	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
2205	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
2206
2207	WREG32(CB_PERF_CTR0_SEL_0, 0);
2208	WREG32(CB_PERF_CTR0_SEL_1, 0);
2209	WREG32(CB_PERF_CTR1_SEL_0, 0);
2210	WREG32(CB_PERF_CTR1_SEL_1, 0);
2211	WREG32(CB_PERF_CTR2_SEL_0, 0);
2212	WREG32(CB_PERF_CTR2_SEL_1, 0);
2213	WREG32(CB_PERF_CTR3_SEL_0, 0);
2214	WREG32(CB_PERF_CTR3_SEL_1, 0);
2215
2216	/* clear render buffer base addresses */
2217	WREG32(CB_COLOR0_BASE, 0);
2218	WREG32(CB_COLOR1_BASE, 0);
2219	WREG32(CB_COLOR2_BASE, 0);
2220	WREG32(CB_COLOR3_BASE, 0);
2221	WREG32(CB_COLOR4_BASE, 0);
2222	WREG32(CB_COLOR5_BASE, 0);
2223	WREG32(CB_COLOR6_BASE, 0);
2224	WREG32(CB_COLOR7_BASE, 0);
2225	WREG32(CB_COLOR8_BASE, 0);
2226	WREG32(CB_COLOR9_BASE, 0);
2227	WREG32(CB_COLOR10_BASE, 0);
2228	WREG32(CB_COLOR11_BASE, 0);
2229
2230	/* set the shader const cache sizes to 0 */
2231	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
2232		WREG32(i, 0);
2233	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
2234		WREG32(i, 0);
2235
2236	tmp = RREG32(HDP_MISC_CNTL);
2237	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
2238	WREG32(HDP_MISC_CNTL, tmp);
2239
2240	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
2241	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
2242
2243	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
2244
2245	udelay(50);
2246
2247}
2248
2249int evergreen_mc_init(struct radeon_device *rdev)
2250{
2251	u32 tmp;
2252	int chansize, numchan;
2253
2254	/* Get VRAM informations */
2255	rdev->mc.vram_is_ddr = true;
2256	if (rdev->flags & RADEON_IS_IGP)
 
 
2257		tmp = RREG32(FUS_MC_ARB_RAMCFG);
2258	else
2259		tmp = RREG32(MC_ARB_RAMCFG);
2260	if (tmp & CHANSIZE_OVERRIDE) {
2261		chansize = 16;
2262	} else if (tmp & CHANSIZE_MASK) {
2263		chansize = 64;
2264	} else {
2265		chansize = 32;
2266	}
2267	tmp = RREG32(MC_SHARED_CHMAP);
2268	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2269	case 0:
2270	default:
2271		numchan = 1;
2272		break;
2273	case 1:
2274		numchan = 2;
2275		break;
2276	case 2:
2277		numchan = 4;
2278		break;
2279	case 3:
2280		numchan = 8;
2281		break;
2282	}
2283	rdev->mc.vram_width = numchan * chansize;
2284	/* Could aper size report 0 ? */
2285	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
2286	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
2287	/* Setup GPU memory space */
2288	if (rdev->flags & RADEON_IS_IGP) {
 
 
2289		/* size in bytes on fusion */
2290		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
2291		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
2292	} else {
2293		/* size in MB on evergreen */
2294		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2295		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024 * 1024;
2296	}
2297	rdev->mc.visible_vram_size = rdev->mc.aper_size;
2298	r700_vram_gtt_location(rdev, &rdev->mc);
2299	radeon_update_bandwidth_info(rdev);
2300
2301	return 0;
2302}
2303
2304bool evergreen_gpu_is_lockup(struct radeon_device *rdev)
2305{
2306	u32 srbm_status;
2307	u32 grbm_status;
2308	u32 grbm_status_se0, grbm_status_se1;
2309	struct r100_gpu_lockup *lockup = &rdev->config.evergreen.lockup;
2310	int r;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2311
2312	srbm_status = RREG32(SRBM_STATUS);
2313	grbm_status = RREG32(GRBM_STATUS);
2314	grbm_status_se0 = RREG32(GRBM_STATUS_SE0);
2315	grbm_status_se1 = RREG32(GRBM_STATUS_SE1);
2316	if (!(grbm_status & GUI_ACTIVE)) {
2317		r100_gpu_lockup_update(lockup, &rdev->cp);
2318		return false;
 
 
 
 
2319	}
2320	/* force CP activities */
2321	r = radeon_ring_lock(rdev, 2);
2322	if (!r) {
2323		/* PACKET2 NOP */
2324		radeon_ring_write(rdev, 0x80000000);
2325		radeon_ring_write(rdev, 0x80000000);
2326		radeon_ring_unlock_commit(rdev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2327	}
2328	rdev->cp.rptr = RREG32(CP_RB_RPTR);
2329	return r100_gpu_cp_is_lockup(rdev, lockup, &rdev->cp);
2330}
2331
2332static int evergreen_gpu_soft_reset(struct radeon_device *rdev)
2333{
2334	struct evergreen_mc_save save;
2335	u32 grbm_reset = 0;
 
2336
2337	if (!(RREG32(GRBM_STATUS) & GUI_ACTIVE))
2338		return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2339
2340	dev_info(rdev->dev, "GPU softreset \n");
2341	dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
2342		RREG32(GRBM_STATUS));
2343	dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
2344		RREG32(GRBM_STATUS_SE0));
2345	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
2346		RREG32(GRBM_STATUS_SE1));
2347	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
2348		RREG32(SRBM_STATUS));
2349	evergreen_mc_stop(rdev, &save);
2350	if (evergreen_mc_wait_for_idle(rdev)) {
2351		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2352	}
2353	/* Disable CP parsing/prefetching */
2354	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
2355
2356	/* reset all the gfx blocks */
2357	grbm_reset = (SOFT_RESET_CP |
2358		      SOFT_RESET_CB |
2359		      SOFT_RESET_DB |
2360		      SOFT_RESET_PA |
2361		      SOFT_RESET_SC |
2362		      SOFT_RESET_SPI |
2363		      SOFT_RESET_SH |
2364		      SOFT_RESET_SX |
2365		      SOFT_RESET_TC |
2366		      SOFT_RESET_TA |
2367		      SOFT_RESET_VC |
2368		      SOFT_RESET_VGT);
2369
2370	dev_info(rdev->dev, "  GRBM_SOFT_RESET=0x%08X\n", grbm_reset);
2371	WREG32(GRBM_SOFT_RESET, grbm_reset);
2372	(void)RREG32(GRBM_SOFT_RESET);
2373	udelay(50);
2374	WREG32(GRBM_SOFT_RESET, 0);
2375	(void)RREG32(GRBM_SOFT_RESET);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2376	/* Wait a little for things to settle down */
2377	udelay(50);
2378	dev_info(rdev->dev, "  GRBM_STATUS=0x%08X\n",
2379		RREG32(GRBM_STATUS));
2380	dev_info(rdev->dev, "  GRBM_STATUS_SE0=0x%08X\n",
2381		RREG32(GRBM_STATUS_SE0));
2382	dev_info(rdev->dev, "  GRBM_STATUS_SE1=0x%08X\n",
2383		RREG32(GRBM_STATUS_SE1));
2384	dev_info(rdev->dev, "  SRBM_STATUS=0x%08X\n",
2385		RREG32(SRBM_STATUS));
2386	evergreen_mc_resume(rdev, &save);
2387	return 0;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2388}
2389
2390int evergreen_asic_reset(struct radeon_device *rdev)
2391{
2392	return evergreen_gpu_soft_reset(rdev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2393}
2394
2395/* Interrupts */
2396
2397u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
2398{
2399	switch (crtc) {
2400	case 0:
2401		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC0_REGISTER_OFFSET);
2402	case 1:
2403		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC1_REGISTER_OFFSET);
2404	case 2:
2405		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC2_REGISTER_OFFSET);
2406	case 3:
2407		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC3_REGISTER_OFFSET);
2408	case 4:
2409		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC4_REGISTER_OFFSET);
2410	case 5:
2411		return RREG32(CRTC_STATUS_FRAME_COUNT + EVERGREEN_CRTC5_REGISTER_OFFSET);
2412	default:
2413		return 0;
2414	}
 
2415}
2416
2417void evergreen_disable_interrupt_state(struct radeon_device *rdev)
2418{
2419	u32 tmp;
2420
2421	WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
 
 
 
 
 
 
 
 
 
 
2422	WREG32(GRBM_INT_CNTL, 0);
2423	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2424	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2425	if (rdev->num_crtc >= 4) {
2426		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2427		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2428	}
2429	if (rdev->num_crtc >= 6) {
2430		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2431		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2432	}
2433
2434	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
2435	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
2436	if (rdev->num_crtc >= 4) {
2437		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
2438		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
2439	}
2440	if (rdev->num_crtc >= 6) {
2441		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
2442		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
2443	}
2444
2445	WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
 
 
2446	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
2447
2448	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2449	WREG32(DC_HPD1_INT_CONTROL, tmp);
2450	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2451	WREG32(DC_HPD2_INT_CONTROL, tmp);
2452	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2453	WREG32(DC_HPD3_INT_CONTROL, tmp);
2454	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2455	WREG32(DC_HPD4_INT_CONTROL, tmp);
2456	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2457	WREG32(DC_HPD5_INT_CONTROL, tmp);
2458	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
2459	WREG32(DC_HPD6_INT_CONTROL, tmp);
2460
2461}
2462
2463int evergreen_irq_set(struct radeon_device *rdev)
2464{
2465	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
 
2466	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
2467	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
2468	u32 grbm_int_cntl = 0;
2469	u32 grph1 = 0, grph2 = 0, grph3 = 0, grph4 = 0, grph5 = 0, grph6 = 0;
 
 
2470
2471	if (!rdev->irq.installed) {
2472		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
2473		return -EINVAL;
2474	}
2475	/* don't enable anything if the ih is disabled */
2476	if (!rdev->ih.enabled) {
2477		r600_disable_interrupts(rdev);
2478		/* force the active interrupt state to all disabled */
2479		evergreen_disable_interrupt_state(rdev);
2480		return 0;
2481	}
2482
2483	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
2484	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
2485	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
2486	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
2487	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
2488	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2489
2490	if (rdev->irq.sw_int) {
2491		DRM_DEBUG("evergreen_irq_set: sw int\n");
2492		cp_int_cntl |= RB_INT_ENABLE;
2493		cp_int_cntl |= TIME_STAMP_INT_ENABLE;
 
 
 
 
 
 
 
2494	}
 
2495	if (rdev->irq.crtc_vblank_int[0] ||
2496	    rdev->irq.pflip[0]) {
2497		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
2498		crtc1 |= VBLANK_INT_MASK;
2499	}
2500	if (rdev->irq.crtc_vblank_int[1] ||
2501	    rdev->irq.pflip[1]) {
2502		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
2503		crtc2 |= VBLANK_INT_MASK;
2504	}
2505	if (rdev->irq.crtc_vblank_int[2] ||
2506	    rdev->irq.pflip[2]) {
2507		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
2508		crtc3 |= VBLANK_INT_MASK;
2509	}
2510	if (rdev->irq.crtc_vblank_int[3] ||
2511	    rdev->irq.pflip[3]) {
2512		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
2513		crtc4 |= VBLANK_INT_MASK;
2514	}
2515	if (rdev->irq.crtc_vblank_int[4] ||
2516	    rdev->irq.pflip[4]) {
2517		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
2518		crtc5 |= VBLANK_INT_MASK;
2519	}
2520	if (rdev->irq.crtc_vblank_int[5] ||
2521	    rdev->irq.pflip[5]) {
2522		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
2523		crtc6 |= VBLANK_INT_MASK;
2524	}
2525	if (rdev->irq.hpd[0]) {
2526		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
2527		hpd1 |= DC_HPDx_INT_EN;
2528	}
2529	if (rdev->irq.hpd[1]) {
2530		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
2531		hpd2 |= DC_HPDx_INT_EN;
2532	}
2533	if (rdev->irq.hpd[2]) {
2534		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
2535		hpd3 |= DC_HPDx_INT_EN;
2536	}
2537	if (rdev->irq.hpd[3]) {
2538		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
2539		hpd4 |= DC_HPDx_INT_EN;
2540	}
2541	if (rdev->irq.hpd[4]) {
2542		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
2543		hpd5 |= DC_HPDx_INT_EN;
2544	}
2545	if (rdev->irq.hpd[5]) {
2546		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
2547		hpd6 |= DC_HPDx_INT_EN;
2548	}
2549	if (rdev->irq.gui_idle) {
2550		DRM_DEBUG("gui idle\n");
2551		grbm_int_cntl |= GUI_IDLE_INT_ENABLE;
2552	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2553
2554	WREG32(CP_INT_CNTL, cp_int_cntl);
2555	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
2556
2557	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
2558	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
2559	if (rdev->num_crtc >= 4) {
2560		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
2561		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
2562	}
2563	if (rdev->num_crtc >= 6) {
2564		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
2565		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
2566	}
2567
2568	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, grph1);
2569	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, grph2);
 
 
2570	if (rdev->num_crtc >= 4) {
2571		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, grph3);
2572		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, grph4);
 
 
2573	}
2574	if (rdev->num_crtc >= 6) {
2575		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, grph5);
2576		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, grph6);
 
 
2577	}
2578
2579	WREG32(DC_HPD1_INT_CONTROL, hpd1);
2580	WREG32(DC_HPD2_INT_CONTROL, hpd2);
2581	WREG32(DC_HPD3_INT_CONTROL, hpd3);
2582	WREG32(DC_HPD4_INT_CONTROL, hpd4);
2583	WREG32(DC_HPD5_INT_CONTROL, hpd5);
2584	WREG32(DC_HPD6_INT_CONTROL, hpd6);
 
 
 
 
 
 
 
 
 
 
 
2585
2586	return 0;
2587}
2588
2589static inline void evergreen_irq_ack(struct radeon_device *rdev)
2590{
2591	u32 tmp;
2592
2593	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
2594	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
2595	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
2596	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
2597	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
2598	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
2599	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
2600	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
2601	if (rdev->num_crtc >= 4) {
2602		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
2603		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
2604	}
2605	if (rdev->num_crtc >= 6) {
2606		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
2607		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
2608	}
2609
 
 
 
 
 
 
 
2610	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
2611		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2612	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
2613		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2614	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
2615		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
2616	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
2617		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
2618	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
2619		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
2620	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
2621		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
2622
2623	if (rdev->num_crtc >= 4) {
2624		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
2625			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2626		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
2627			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2628		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
2629			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
2630		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
2631			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
2632		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
2633			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
2634		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
2635			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
2636	}
2637
2638	if (rdev->num_crtc >= 6) {
2639		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
2640			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2641		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
2642			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
2643		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
2644			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
2645		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
2646			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
2647		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
2648			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
2649		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
2650			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
2651	}
2652
2653	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2654		tmp = RREG32(DC_HPD1_INT_CONTROL);
2655		tmp |= DC_HPDx_INT_ACK;
2656		WREG32(DC_HPD1_INT_CONTROL, tmp);
2657	}
2658	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2659		tmp = RREG32(DC_HPD2_INT_CONTROL);
2660		tmp |= DC_HPDx_INT_ACK;
2661		WREG32(DC_HPD2_INT_CONTROL, tmp);
2662	}
2663	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2664		tmp = RREG32(DC_HPD3_INT_CONTROL);
2665		tmp |= DC_HPDx_INT_ACK;
2666		WREG32(DC_HPD3_INT_CONTROL, tmp);
2667	}
2668	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2669		tmp = RREG32(DC_HPD4_INT_CONTROL);
2670		tmp |= DC_HPDx_INT_ACK;
2671		WREG32(DC_HPD4_INT_CONTROL, tmp);
2672	}
2673	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2674		tmp = RREG32(DC_HPD5_INT_CONTROL);
2675		tmp |= DC_HPDx_INT_ACK;
2676		WREG32(DC_HPD5_INT_CONTROL, tmp);
2677	}
2678	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2679		tmp = RREG32(DC_HPD5_INT_CONTROL);
2680		tmp |= DC_HPDx_INT_ACK;
2681		WREG32(DC_HPD6_INT_CONTROL, tmp);
2682	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2683}
2684
2685void evergreen_irq_disable(struct radeon_device *rdev)
2686{
2687	r600_disable_interrupts(rdev);
2688	/* Wait and acknowledge irq */
2689	mdelay(1);
2690	evergreen_irq_ack(rdev);
2691	evergreen_disable_interrupt_state(rdev);
2692}
2693
2694void evergreen_irq_suspend(struct radeon_device *rdev)
2695{
2696	evergreen_irq_disable(rdev);
2697	r600_rlc_stop(rdev);
2698}
2699
2700static inline u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
2701{
2702	u32 wptr, tmp;
2703
2704	if (rdev->wb.enabled)
2705		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
2706	else
2707		wptr = RREG32(IH_RB_WPTR);
2708
2709	if (wptr & RB_OVERFLOW) {
2710		/* When a ring buffer overflow happen start parsing interrupt
2711		 * from the last not overwritten vector (wptr + 16). Hopefully
2712		 * this should allow us to catchup.
2713		 */
2714		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
2715			wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
2716		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
2717		tmp = RREG32(IH_RB_CNTL);
2718		tmp |= IH_WPTR_OVERFLOW_CLEAR;
2719		WREG32(IH_RB_CNTL, tmp);
2720	}
2721	return (wptr & rdev->ih.ptr_mask);
2722}
2723
2724int evergreen_irq_process(struct radeon_device *rdev)
2725{
2726	u32 wptr;
2727	u32 rptr;
2728	u32 src_id, src_data;
2729	u32 ring_index;
2730	unsigned long flags;
2731	bool queue_hotplug = false;
 
 
 
2732
2733	if (!rdev->ih.enabled || rdev->shutdown)
2734		return IRQ_NONE;
2735
2736	wptr = evergreen_get_ih_wptr(rdev);
 
 
 
 
 
 
2737	rptr = rdev->ih.rptr;
2738	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
2739
2740	spin_lock_irqsave(&rdev->ih.lock, flags);
2741	if (rptr == wptr) {
2742		spin_unlock_irqrestore(&rdev->ih.lock, flags);
2743		return IRQ_NONE;
2744	}
2745restart_ih:
2746	/* Order reading of wptr vs. reading of IH ring data */
2747	rmb();
2748
2749	/* display interrupts */
2750	evergreen_irq_ack(rdev);
2751
2752	rdev->ih.wptr = wptr;
2753	while (rptr != wptr) {
2754		/* wptr/rptr are in bytes! */
2755		ring_index = rptr / 4;
2756		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
2757		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
2758
2759		switch (src_id) {
2760		case 1: /* D1 vblank/vline */
2761			switch (src_data) {
2762			case 0: /* D1 vblank */
2763				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
2764					if (rdev->irq.crtc_vblank_int[0]) {
2765						drm_handle_vblank(rdev->ddev, 0);
2766						rdev->pm.vblank_sync = true;
2767						wake_up(&rdev->irq.vblank_queue);
2768					}
2769					if (rdev->irq.pflip[0])
2770						radeon_crtc_handle_flip(rdev, 0);
2771					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
2772					DRM_DEBUG("IH: D1 vblank\n");
2773				}
2774				break;
2775			case 1: /* D1 vline */
2776				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
2777					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
2778					DRM_DEBUG("IH: D1 vline\n");
2779				}
2780				break;
2781			default:
2782				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2783				break;
2784			}
2785			break;
2786		case 2: /* D2 vblank/vline */
2787			switch (src_data) {
2788			case 0: /* D2 vblank */
2789				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
2790					if (rdev->irq.crtc_vblank_int[1]) {
2791						drm_handle_vblank(rdev->ddev, 1);
2792						rdev->pm.vblank_sync = true;
2793						wake_up(&rdev->irq.vblank_queue);
2794					}
2795					if (rdev->irq.pflip[1])
2796						radeon_crtc_handle_flip(rdev, 1);
2797					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
2798					DRM_DEBUG("IH: D2 vblank\n");
2799				}
2800				break;
2801			case 1: /* D2 vline */
2802				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
2803					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
2804					DRM_DEBUG("IH: D2 vline\n");
2805				}
2806				break;
2807			default:
2808				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2809				break;
2810			}
2811			break;
2812		case 3: /* D3 vblank/vline */
2813			switch (src_data) {
2814			case 0: /* D3 vblank */
2815				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
2816					if (rdev->irq.crtc_vblank_int[2]) {
2817						drm_handle_vblank(rdev->ddev, 2);
2818						rdev->pm.vblank_sync = true;
2819						wake_up(&rdev->irq.vblank_queue);
2820					}
2821					if (rdev->irq.pflip[2])
2822						radeon_crtc_handle_flip(rdev, 2);
2823					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
2824					DRM_DEBUG("IH: D3 vblank\n");
2825				}
2826				break;
2827			case 1: /* D3 vline */
2828				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
2829					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
2830					DRM_DEBUG("IH: D3 vline\n");
2831				}
2832				break;
2833			default:
2834				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2835				break;
2836			}
2837			break;
2838		case 4: /* D4 vblank/vline */
2839			switch (src_data) {
2840			case 0: /* D4 vblank */
2841				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
2842					if (rdev->irq.crtc_vblank_int[3]) {
2843						drm_handle_vblank(rdev->ddev, 3);
2844						rdev->pm.vblank_sync = true;
2845						wake_up(&rdev->irq.vblank_queue);
2846					}
2847					if (rdev->irq.pflip[3])
2848						radeon_crtc_handle_flip(rdev, 3);
2849					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
2850					DRM_DEBUG("IH: D4 vblank\n");
2851				}
2852				break;
2853			case 1: /* D4 vline */
2854				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
2855					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
2856					DRM_DEBUG("IH: D4 vline\n");
2857				}
2858				break;
2859			default:
2860				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2861				break;
2862			}
2863			break;
2864		case 5: /* D5 vblank/vline */
2865			switch (src_data) {
2866			case 0: /* D5 vblank */
2867				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
2868					if (rdev->irq.crtc_vblank_int[4]) {
2869						drm_handle_vblank(rdev->ddev, 4);
2870						rdev->pm.vblank_sync = true;
2871						wake_up(&rdev->irq.vblank_queue);
2872					}
2873					if (rdev->irq.pflip[4])
2874						radeon_crtc_handle_flip(rdev, 4);
2875					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
2876					DRM_DEBUG("IH: D5 vblank\n");
2877				}
2878				break;
2879			case 1: /* D5 vline */
2880				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
2881					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
2882					DRM_DEBUG("IH: D5 vline\n");
2883				}
2884				break;
2885			default:
2886				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2887				break;
2888			}
2889			break;
2890		case 6: /* D6 vblank/vline */
2891			switch (src_data) {
2892			case 0: /* D6 vblank */
2893				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
2894					if (rdev->irq.crtc_vblank_int[5]) {
2895						drm_handle_vblank(rdev->ddev, 5);
2896						rdev->pm.vblank_sync = true;
2897						wake_up(&rdev->irq.vblank_queue);
2898					}
2899					if (rdev->irq.pflip[5])
2900						radeon_crtc_handle_flip(rdev, 5);
2901					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
2902					DRM_DEBUG("IH: D6 vblank\n");
2903				}
2904				break;
2905			case 1: /* D6 vline */
2906				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
2907					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
2908					DRM_DEBUG("IH: D6 vline\n");
2909				}
2910				break;
2911			default:
2912				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2913				break;
2914			}
2915			break;
 
 
 
 
 
 
 
 
 
2916		case 42: /* HPD hotplug */
2917			switch (src_data) {
2918			case 0:
2919				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
2920					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
2921					queue_hotplug = true;
2922					DRM_DEBUG("IH: HPD1\n");
2923				}
2924				break;
2925			case 1:
2926				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
2927					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
2928					queue_hotplug = true;
2929					DRM_DEBUG("IH: HPD2\n");
2930				}
2931				break;
2932			case 2:
2933				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
2934					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
2935					queue_hotplug = true;
2936					DRM_DEBUG("IH: HPD3\n");
2937				}
2938				break;
2939			case 3:
2940				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
2941					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
2942					queue_hotplug = true;
2943					DRM_DEBUG("IH: HPD4\n");
2944				}
2945				break;
2946			case 4:
2947				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
2948					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
2949					queue_hotplug = true;
2950					DRM_DEBUG("IH: HPD5\n");
2951				}
2952				break;
2953			case 5:
2954				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
2955					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
2956					queue_hotplug = true;
2957					DRM_DEBUG("IH: HPD6\n");
2958				}
2959				break;
2960			default:
2961				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2962				break;
2963			}
2964			break;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2965		case 176: /* CP_INT in ring buffer */
2966		case 177: /* CP_INT in IB1 */
2967		case 178: /* CP_INT in IB2 */
2968			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
2969			radeon_fence_process(rdev);
2970			break;
2971		case 181: /* CP EOP event */
2972			DRM_DEBUG("IH: CP EOP\n");
2973			radeon_fence_process(rdev);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2974			break;
2975		case 233: /* GUI IDLE */
2976			DRM_DEBUG("IH: GUI idle\n");
2977			rdev->pm.gui_idle = true;
2978			wake_up(&rdev->irq.idle_queue);
 
 
 
 
2979			break;
2980		default:
2981			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
2982			break;
2983		}
2984
2985		/* wptr/rptr are in bytes! */
2986		rptr += 16;
2987		rptr &= rdev->ih.ptr_mask;
2988	}
2989	/* make sure wptr hasn't changed while processing */
2990	wptr = evergreen_get_ih_wptr(rdev);
2991	if (wptr != rdev->ih.wptr)
2992		goto restart_ih;
2993	if (queue_hotplug)
2994		schedule_work(&rdev->hotplug_work);
 
 
 
 
2995	rdev->ih.rptr = rptr;
2996	WREG32(IH_RB_RPTR, rdev->ih.rptr);
2997	spin_unlock_irqrestore(&rdev->ih.lock, flags);
 
 
 
 
 
 
2998	return IRQ_HANDLED;
2999}
3000
3001static int evergreen_startup(struct radeon_device *rdev)
3002{
 
3003	int r;
3004
3005	/* enable pcie gen2 link */
3006	if (!ASIC_IS_DCE5(rdev))
3007		evergreen_pcie_gen2_enable(rdev);
 
3008
3009	if (ASIC_IS_DCE5(rdev)) {
3010		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
3011			r = ni_init_microcode(rdev);
3012			if (r) {
3013				DRM_ERROR("Failed to load firmware!\n");
3014				return r;
3015			}
3016		}
3017		r = ni_mc_load_microcode(rdev);
3018		if (r) {
3019			DRM_ERROR("Failed to load MC firmware!\n");
3020			return r;
3021		}
3022	} else {
3023		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
3024			r = r600_init_microcode(rdev);
3025			if (r) {
3026				DRM_ERROR("Failed to load firmware!\n");
3027				return r;
3028			}
3029		}
3030	}
3031
3032	evergreen_mc_program(rdev);
3033	if (rdev->flags & RADEON_IS_AGP) {
3034		evergreen_agp_enable(rdev);
3035	} else {
3036		r = evergreen_pcie_gart_enable(rdev);
3037		if (r)
3038			return r;
3039	}
3040	evergreen_gpu_init(rdev);
3041
3042	r = evergreen_blit_init(rdev);
3043	if (r) {
3044		evergreen_blit_fini(rdev);
3045		rdev->asic->copy = NULL;
3046		dev_warn(rdev->dev, "failed blitter (%d) falling back to memcpy\n", r);
 
 
 
 
 
 
3047	}
3048
3049	/* allocate wb buffer */
3050	r = radeon_wb_init(rdev);
3051	if (r)
3052		return r;
3053
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3054	/* Enable IRQ */
 
 
 
 
 
 
3055	r = r600_irq_init(rdev);
3056	if (r) {
3057		DRM_ERROR("radeon: IH init failed (%d).\n", r);
3058		radeon_irq_kms_fini(rdev);
3059		return r;
3060	}
3061	evergreen_irq_set(rdev);
3062
3063	r = radeon_ring_init(rdev, rdev->cp.ring_size);
 
 
 
 
 
 
 
 
3064	if (r)
3065		return r;
 
3066	r = evergreen_cp_load_microcode(rdev);
3067	if (r)
3068		return r;
3069	r = evergreen_cp_resume(rdev);
3070	if (r)
3071		return r;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3072
3073	return 0;
3074}
3075
3076int evergreen_resume(struct radeon_device *rdev)
3077{
3078	int r;
3079
3080	/* reset the asic, the gfx blocks are often in a bad state
3081	 * after the driver is unloaded or after a resume
3082	 */
3083	if (radeon_asic_reset(rdev))
3084		dev_warn(rdev->dev, "GPU reset failed !\n");
3085	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
3086	 * posting will perform necessary task to bring back GPU into good
3087	 * shape.
3088	 */
3089	/* post card */
3090	atom_asic_init(rdev->mode_info.atom_context);
3091
 
 
 
 
 
 
 
3092	r = evergreen_startup(rdev);
3093	if (r) {
3094		DRM_ERROR("evergreen startup failed on resume\n");
3095		return r;
3096	}
3097
3098	r = r600_ib_test(rdev);
3099	if (r) {
3100		DRM_ERROR("radeon: failed testing IB (%d).\n", r);
3101		return r;
3102	}
3103
3104	return r;
3105
3106}
3107
3108int evergreen_suspend(struct radeon_device *rdev)
3109{
3110	int r;
3111
3112	/* FIXME: we should wait for ring to be empty */
 
3113	r700_cp_stop(rdev);
3114	rdev->cp.ready = false;
3115	evergreen_irq_suspend(rdev);
3116	radeon_wb_disable(rdev);
3117	evergreen_pcie_gart_disable(rdev);
3118
3119	/* unpin shaders bo */
3120	r = radeon_bo_reserve(rdev->r600_blit.shader_obj, false);
3121	if (likely(r == 0)) {
3122		radeon_bo_unpin(rdev->r600_blit.shader_obj);
3123		radeon_bo_unreserve(rdev->r600_blit.shader_obj);
3124	}
3125
3126	return 0;
3127}
3128
3129int evergreen_copy_blit(struct radeon_device *rdev,
3130			uint64_t src_offset,
3131			uint64_t dst_offset,
3132			unsigned num_gpu_pages,
3133			struct radeon_fence *fence)
3134{
3135	int r;
3136
3137	mutex_lock(&rdev->r600_blit.mutex);
3138	rdev->r600_blit.vb_ib = NULL;
3139	r = evergreen_blit_prepare_copy(rdev, num_gpu_pages * RADEON_GPU_PAGE_SIZE);
3140	if (r) {
3141		if (rdev->r600_blit.vb_ib)
3142			radeon_ib_free(rdev, &rdev->r600_blit.vb_ib);
3143		mutex_unlock(&rdev->r600_blit.mutex);
3144		return r;
3145	}
3146	evergreen_kms_blit_copy(rdev, src_offset, dst_offset, num_gpu_pages * RADEON_GPU_PAGE_SIZE);
3147	evergreen_blit_done_copy(rdev, fence);
3148	mutex_unlock(&rdev->r600_blit.mutex);
3149	return 0;
3150}
3151
3152/* Plan is to move initialization in that function and use
3153 * helper function so that radeon_device_init pretty much
3154 * do nothing more than calling asic specific function. This
3155 * should also allow to remove a bunch of callback function
3156 * like vram_info.
3157 */
3158int evergreen_init(struct radeon_device *rdev)
3159{
3160	int r;
3161
3162	/* This don't do much */
3163	r = radeon_gem_init(rdev);
3164	if (r)
3165		return r;
3166	/* Read BIOS */
3167	if (!radeon_get_bios(rdev)) {
3168		if (ASIC_IS_AVIVO(rdev))
3169			return -EINVAL;
3170	}
3171	/* Must be an ATOMBIOS */
3172	if (!rdev->is_atom_bios) {
3173		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
3174		return -EINVAL;
3175	}
3176	r = radeon_atombios_init(rdev);
3177	if (r)
3178		return r;
3179	/* reset the asic, the gfx blocks are often in a bad state
3180	 * after the driver is unloaded or after a resume
3181	 */
3182	if (radeon_asic_reset(rdev))
3183		dev_warn(rdev->dev, "GPU reset failed !\n");
3184	/* Post card if necessary */
3185	if (!radeon_card_posted(rdev)) {
3186		if (!rdev->bios) {
3187			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
3188			return -EINVAL;
3189		}
3190		DRM_INFO("GPU not posted. posting now...\n");
3191		atom_asic_init(rdev->mode_info.atom_context);
3192	}
 
 
3193	/* Initialize scratch registers */
3194	r600_scratch_init(rdev);
3195	/* Initialize surface registers */
3196	radeon_surface_init(rdev);
3197	/* Initialize clocks */
3198	radeon_get_clock_info(rdev->ddev);
3199	/* Fence driver */
3200	r = radeon_fence_driver_init(rdev);
3201	if (r)
3202		return r;
3203	/* initialize AGP */
3204	if (rdev->flags & RADEON_IS_AGP) {
3205		r = radeon_agp_init(rdev);
3206		if (r)
3207			radeon_agp_disable(rdev);
3208	}
3209	/* initialize memory controller */
3210	r = evergreen_mc_init(rdev);
3211	if (r)
3212		return r;
3213	/* Memory manager */
3214	r = radeon_bo_init(rdev);
3215	if (r)
3216		return r;
3217
3218	r = radeon_irq_kms_init(rdev);
3219	if (r)
3220		return r;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3221
3222	rdev->cp.ring_obj = NULL;
3223	r600_ring_init(rdev, 1024 * 1024);
 
 
 
 
 
 
 
 
 
 
3224
3225	rdev->ih.ring_obj = NULL;
3226	r600_ih_ring_init(rdev, 64 * 1024);
3227
3228	r = r600_pcie_gart_init(rdev);
3229	if (r)
3230		return r;
3231
3232	rdev->accel_working = true;
3233	r = evergreen_startup(rdev);
3234	if (r) {
3235		dev_err(rdev->dev, "disabling GPU acceleration\n");
3236		r700_cp_fini(rdev);
 
3237		r600_irq_fini(rdev);
 
 
3238		radeon_wb_fini(rdev);
 
3239		radeon_irq_kms_fini(rdev);
3240		evergreen_pcie_gart_fini(rdev);
3241		rdev->accel_working = false;
3242	}
3243	if (rdev->accel_working) {
3244		r = radeon_ib_pool_init(rdev);
3245		if (r) {
3246			DRM_ERROR("radeon: failed initializing IB pool (%d).\n", r);
3247			rdev->accel_working = false;
3248		}
3249		r = r600_ib_test(rdev);
3250		if (r) {
3251			DRM_ERROR("radeon: failed testing IB (%d).\n", r);
3252			rdev->accel_working = false;
3253		}
3254	}
 
3255	return 0;
3256}
3257
3258void evergreen_fini(struct radeon_device *rdev)
3259{
3260	evergreen_blit_fini(rdev);
 
3261	r700_cp_fini(rdev);
 
3262	r600_irq_fini(rdev);
 
 
3263	radeon_wb_fini(rdev);
3264	radeon_ib_pool_fini(rdev);
3265	radeon_irq_kms_fini(rdev);
 
 
3266	evergreen_pcie_gart_fini(rdev);
 
3267	radeon_gem_fini(rdev);
3268	radeon_fence_driver_fini(rdev);
3269	radeon_agp_fini(rdev);
3270	radeon_bo_fini(rdev);
3271	radeon_atombios_fini(rdev);
3272	kfree(rdev->bios);
3273	rdev->bios = NULL;
3274}
3275
3276static void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
3277{
3278	u32 link_width_cntl, speed_cntl;
3279
3280	if (radeon_pcie_gen2 == 0)
3281		return;
3282
3283	if (rdev->flags & RADEON_IS_IGP)
3284		return;
3285
3286	if (!(rdev->flags & RADEON_IS_PCIE))
3287		return;
3288
3289	/* x2 cards have a special sequence */
3290	if (ASIC_IS_X2(rdev))
3291		return;
3292
3293	speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
 
 
 
 
 
 
 
 
 
 
 
3294	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
3295	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
3296
3297		link_width_cntl = RREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL);
3298		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3299		WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3300
3301		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3302		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
3303		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3304
3305		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3306		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
3307		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3308
3309		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3310		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
3311		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3312
3313		speed_cntl = RREG32_PCIE_P(PCIE_LC_SPEED_CNTL);
3314		speed_cntl |= LC_GEN2_EN_STRAP;
3315		WREG32_PCIE_P(PCIE_LC_SPEED_CNTL, speed_cntl);
3316
3317	} else {
3318		link_width_cntl = RREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL);
3319		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
3320		if (1)
3321			link_width_cntl |= LC_UPCONFIGURE_DIS;
3322		else
3323			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
3324		WREG32_PCIE_P(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
3325	}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3326}
v3.15
   1/*
   2 * Copyright 2010 Advanced Micro Devices, Inc.
   3 *
   4 * Permission is hereby granted, free of charge, to any person obtaining a
   5 * copy of this software and associated documentation files (the "Software"),
   6 * to deal in the Software without restriction, including without limitation
   7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   8 * and/or sell copies of the Software, and to permit persons to whom the
   9 * Software is furnished to do so, subject to the following conditions:
  10 *
  11 * The above copyright notice and this permission notice shall be included in
  12 * all copies or substantial portions of the Software.
  13 *
  14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  20 * OTHER DEALINGS IN THE SOFTWARE.
  21 *
  22 * Authors: Alex Deucher
  23 */
  24#include <linux/firmware.h>
  25#include <linux/platform_device.h>
  26#include <linux/slab.h>
  27#include <drm/drmP.h>
  28#include "radeon.h"
  29#include "radeon_asic.h"
  30#include <drm/radeon_drm.h>
  31#include "evergreend.h"
  32#include "atom.h"
  33#include "avivod.h"
  34#include "evergreen_reg.h"
  35#include "evergreen_blit_shaders.h"
  36#include "radeon_ucode.h"
  37
  38static const u32 crtc_offsets[6] =
  39{
  40	EVERGREEN_CRTC0_REGISTER_OFFSET,
  41	EVERGREEN_CRTC1_REGISTER_OFFSET,
  42	EVERGREEN_CRTC2_REGISTER_OFFSET,
  43	EVERGREEN_CRTC3_REGISTER_OFFSET,
  44	EVERGREEN_CRTC4_REGISTER_OFFSET,
  45	EVERGREEN_CRTC5_REGISTER_OFFSET
  46};
  47
  48#include "clearstate_evergreen.h"
  49
  50static const u32 sumo_rlc_save_restore_register_list[] =
  51{
  52	0x98fc,
  53	0x9830,
  54	0x9834,
  55	0x9838,
  56	0x9870,
  57	0x9874,
  58	0x8a14,
  59	0x8b24,
  60	0x8bcc,
  61	0x8b10,
  62	0x8d00,
  63	0x8d04,
  64	0x8c00,
  65	0x8c04,
  66	0x8c08,
  67	0x8c0c,
  68	0x8d8c,
  69	0x8c20,
  70	0x8c24,
  71	0x8c28,
  72	0x8c18,
  73	0x8c1c,
  74	0x8cf0,
  75	0x8e2c,
  76	0x8e38,
  77	0x8c30,
  78	0x9508,
  79	0x9688,
  80	0x9608,
  81	0x960c,
  82	0x9610,
  83	0x9614,
  84	0x88c4,
  85	0x88d4,
  86	0xa008,
  87	0x900c,
  88	0x9100,
  89	0x913c,
  90	0x98f8,
  91	0x98f4,
  92	0x9b7c,
  93	0x3f8c,
  94	0x8950,
  95	0x8954,
  96	0x8a18,
  97	0x8b28,
  98	0x9144,
  99	0x9148,
 100	0x914c,
 101	0x3f90,
 102	0x3f94,
 103	0x915c,
 104	0x9160,
 105	0x9178,
 106	0x917c,
 107	0x9180,
 108	0x918c,
 109	0x9190,
 110	0x9194,
 111	0x9198,
 112	0x919c,
 113	0x91a8,
 114	0x91ac,
 115	0x91b0,
 116	0x91b4,
 117	0x91b8,
 118	0x91c4,
 119	0x91c8,
 120	0x91cc,
 121	0x91d0,
 122	0x91d4,
 123	0x91e0,
 124	0x91e4,
 125	0x91ec,
 126	0x91f0,
 127	0x91f4,
 128	0x9200,
 129	0x9204,
 130	0x929c,
 131	0x9150,
 132	0x802c,
 133};
 134
 135static void evergreen_gpu_init(struct radeon_device *rdev);
 136void evergreen_fini(struct radeon_device *rdev);
 137void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
 138void evergreen_program_aspm(struct radeon_device *rdev);
 139extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
 140				     int ring, u32 cp_int_cntl);
 141extern void cayman_vm_decode_fault(struct radeon_device *rdev,
 142				   u32 status, u32 addr);
 143void cik_init_cp_pg_table(struct radeon_device *rdev);
 144
 145extern u32 si_get_csb_size(struct radeon_device *rdev);
 146extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
 147extern u32 cik_get_csb_size(struct radeon_device *rdev);
 148extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
 149extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
 150
 151static const u32 evergreen_golden_registers[] =
 152{
 153	0x3f90, 0xffff0000, 0xff000000,
 154	0x9148, 0xffff0000, 0xff000000,
 155	0x3f94, 0xffff0000, 0xff000000,
 156	0x914c, 0xffff0000, 0xff000000,
 157	0x9b7c, 0xffffffff, 0x00000000,
 158	0x8a14, 0xffffffff, 0x00000007,
 159	0x8b10, 0xffffffff, 0x00000000,
 160	0x960c, 0xffffffff, 0x54763210,
 161	0x88c4, 0xffffffff, 0x000000c2,
 162	0x88d4, 0xffffffff, 0x00000010,
 163	0x8974, 0xffffffff, 0x00000000,
 164	0xc78, 0x00000080, 0x00000080,
 165	0x5eb4, 0xffffffff, 0x00000002,
 166	0x5e78, 0xffffffff, 0x001000f0,
 167	0x6104, 0x01000300, 0x00000000,
 168	0x5bc0, 0x00300000, 0x00000000,
 169	0x7030, 0xffffffff, 0x00000011,
 170	0x7c30, 0xffffffff, 0x00000011,
 171	0x10830, 0xffffffff, 0x00000011,
 172	0x11430, 0xffffffff, 0x00000011,
 173	0x12030, 0xffffffff, 0x00000011,
 174	0x12c30, 0xffffffff, 0x00000011,
 175	0xd02c, 0xffffffff, 0x08421000,
 176	0x240c, 0xffffffff, 0x00000380,
 177	0x8b24, 0xffffffff, 0x00ff0fff,
 178	0x28a4c, 0x06000000, 0x06000000,
 179	0x10c, 0x00000001, 0x00000001,
 180	0x8d00, 0xffffffff, 0x100e4848,
 181	0x8d04, 0xffffffff, 0x00164745,
 182	0x8c00, 0xffffffff, 0xe4000003,
 183	0x8c04, 0xffffffff, 0x40600060,
 184	0x8c08, 0xffffffff, 0x001c001c,
 185	0x8cf0, 0xffffffff, 0x08e00620,
 186	0x8c20, 0xffffffff, 0x00800080,
 187	0x8c24, 0xffffffff, 0x00800080,
 188	0x8c18, 0xffffffff, 0x20202078,
 189	0x8c1c, 0xffffffff, 0x00001010,
 190	0x28350, 0xffffffff, 0x00000000,
 191	0xa008, 0xffffffff, 0x00010000,
 192	0x5cc, 0xffffffff, 0x00000001,
 193	0x9508, 0xffffffff, 0x00000002,
 194	0x913c, 0x0000000f, 0x0000000a
 195};
 196
 197static const u32 evergreen_golden_registers2[] =
 198{
 199	0x2f4c, 0xffffffff, 0x00000000,
 200	0x54f4, 0xffffffff, 0x00000000,
 201	0x54f0, 0xffffffff, 0x00000000,
 202	0x5498, 0xffffffff, 0x00000000,
 203	0x549c, 0xffffffff, 0x00000000,
 204	0x5494, 0xffffffff, 0x00000000,
 205	0x53cc, 0xffffffff, 0x00000000,
 206	0x53c8, 0xffffffff, 0x00000000,
 207	0x53c4, 0xffffffff, 0x00000000,
 208	0x53c0, 0xffffffff, 0x00000000,
 209	0x53bc, 0xffffffff, 0x00000000,
 210	0x53b8, 0xffffffff, 0x00000000,
 211	0x53b4, 0xffffffff, 0x00000000,
 212	0x53b0, 0xffffffff, 0x00000000
 213};
 214
 215static const u32 cypress_mgcg_init[] =
 216{
 217	0x802c, 0xffffffff, 0xc0000000,
 218	0x5448, 0xffffffff, 0x00000100,
 219	0x55e4, 0xffffffff, 0x00000100,
 220	0x160c, 0xffffffff, 0x00000100,
 221	0x5644, 0xffffffff, 0x00000100,
 222	0xc164, 0xffffffff, 0x00000100,
 223	0x8a18, 0xffffffff, 0x00000100,
 224	0x897c, 0xffffffff, 0x06000100,
 225	0x8b28, 0xffffffff, 0x00000100,
 226	0x9144, 0xffffffff, 0x00000100,
 227	0x9a60, 0xffffffff, 0x00000100,
 228	0x9868, 0xffffffff, 0x00000100,
 229	0x8d58, 0xffffffff, 0x00000100,
 230	0x9510, 0xffffffff, 0x00000100,
 231	0x949c, 0xffffffff, 0x00000100,
 232	0x9654, 0xffffffff, 0x00000100,
 233	0x9030, 0xffffffff, 0x00000100,
 234	0x9034, 0xffffffff, 0x00000100,
 235	0x9038, 0xffffffff, 0x00000100,
 236	0x903c, 0xffffffff, 0x00000100,
 237	0x9040, 0xffffffff, 0x00000100,
 238	0xa200, 0xffffffff, 0x00000100,
 239	0xa204, 0xffffffff, 0x00000100,
 240	0xa208, 0xffffffff, 0x00000100,
 241	0xa20c, 0xffffffff, 0x00000100,
 242	0x971c, 0xffffffff, 0x00000100,
 243	0x977c, 0xffffffff, 0x00000100,
 244	0x3f80, 0xffffffff, 0x00000100,
 245	0xa210, 0xffffffff, 0x00000100,
 246	0xa214, 0xffffffff, 0x00000100,
 247	0x4d8, 0xffffffff, 0x00000100,
 248	0x9784, 0xffffffff, 0x00000100,
 249	0x9698, 0xffffffff, 0x00000100,
 250	0x4d4, 0xffffffff, 0x00000200,
 251	0x30cc, 0xffffffff, 0x00000100,
 252	0xd0c0, 0xffffffff, 0xff000100,
 253	0x802c, 0xffffffff, 0x40000000,
 254	0x915c, 0xffffffff, 0x00010000,
 255	0x9160, 0xffffffff, 0x00030002,
 256	0x9178, 0xffffffff, 0x00070000,
 257	0x917c, 0xffffffff, 0x00030002,
 258	0x9180, 0xffffffff, 0x00050004,
 259	0x918c, 0xffffffff, 0x00010006,
 260	0x9190, 0xffffffff, 0x00090008,
 261	0x9194, 0xffffffff, 0x00070000,
 262	0x9198, 0xffffffff, 0x00030002,
 263	0x919c, 0xffffffff, 0x00050004,
 264	0x91a8, 0xffffffff, 0x00010006,
 265	0x91ac, 0xffffffff, 0x00090008,
 266	0x91b0, 0xffffffff, 0x00070000,
 267	0x91b4, 0xffffffff, 0x00030002,
 268	0x91b8, 0xffffffff, 0x00050004,
 269	0x91c4, 0xffffffff, 0x00010006,
 270	0x91c8, 0xffffffff, 0x00090008,
 271	0x91cc, 0xffffffff, 0x00070000,
 272	0x91d0, 0xffffffff, 0x00030002,
 273	0x91d4, 0xffffffff, 0x00050004,
 274	0x91e0, 0xffffffff, 0x00010006,
 275	0x91e4, 0xffffffff, 0x00090008,
 276	0x91e8, 0xffffffff, 0x00000000,
 277	0x91ec, 0xffffffff, 0x00070000,
 278	0x91f0, 0xffffffff, 0x00030002,
 279	0x91f4, 0xffffffff, 0x00050004,
 280	0x9200, 0xffffffff, 0x00010006,
 281	0x9204, 0xffffffff, 0x00090008,
 282	0x9208, 0xffffffff, 0x00070000,
 283	0x920c, 0xffffffff, 0x00030002,
 284	0x9210, 0xffffffff, 0x00050004,
 285	0x921c, 0xffffffff, 0x00010006,
 286	0x9220, 0xffffffff, 0x00090008,
 287	0x9224, 0xffffffff, 0x00070000,
 288	0x9228, 0xffffffff, 0x00030002,
 289	0x922c, 0xffffffff, 0x00050004,
 290	0x9238, 0xffffffff, 0x00010006,
 291	0x923c, 0xffffffff, 0x00090008,
 292	0x9240, 0xffffffff, 0x00070000,
 293	0x9244, 0xffffffff, 0x00030002,
 294	0x9248, 0xffffffff, 0x00050004,
 295	0x9254, 0xffffffff, 0x00010006,
 296	0x9258, 0xffffffff, 0x00090008,
 297	0x925c, 0xffffffff, 0x00070000,
 298	0x9260, 0xffffffff, 0x00030002,
 299	0x9264, 0xffffffff, 0x00050004,
 300	0x9270, 0xffffffff, 0x00010006,
 301	0x9274, 0xffffffff, 0x00090008,
 302	0x9278, 0xffffffff, 0x00070000,
 303	0x927c, 0xffffffff, 0x00030002,
 304	0x9280, 0xffffffff, 0x00050004,
 305	0x928c, 0xffffffff, 0x00010006,
 306	0x9290, 0xffffffff, 0x00090008,
 307	0x9294, 0xffffffff, 0x00000000,
 308	0x929c, 0xffffffff, 0x00000001,
 309	0x802c, 0xffffffff, 0x40010000,
 310	0x915c, 0xffffffff, 0x00010000,
 311	0x9160, 0xffffffff, 0x00030002,
 312	0x9178, 0xffffffff, 0x00070000,
 313	0x917c, 0xffffffff, 0x00030002,
 314	0x9180, 0xffffffff, 0x00050004,
 315	0x918c, 0xffffffff, 0x00010006,
 316	0x9190, 0xffffffff, 0x00090008,
 317	0x9194, 0xffffffff, 0x00070000,
 318	0x9198, 0xffffffff, 0x00030002,
 319	0x919c, 0xffffffff, 0x00050004,
 320	0x91a8, 0xffffffff, 0x00010006,
 321	0x91ac, 0xffffffff, 0x00090008,
 322	0x91b0, 0xffffffff, 0x00070000,
 323	0x91b4, 0xffffffff, 0x00030002,
 324	0x91b8, 0xffffffff, 0x00050004,
 325	0x91c4, 0xffffffff, 0x00010006,
 326	0x91c8, 0xffffffff, 0x00090008,
 327	0x91cc, 0xffffffff, 0x00070000,
 328	0x91d0, 0xffffffff, 0x00030002,
 329	0x91d4, 0xffffffff, 0x00050004,
 330	0x91e0, 0xffffffff, 0x00010006,
 331	0x91e4, 0xffffffff, 0x00090008,
 332	0x91e8, 0xffffffff, 0x00000000,
 333	0x91ec, 0xffffffff, 0x00070000,
 334	0x91f0, 0xffffffff, 0x00030002,
 335	0x91f4, 0xffffffff, 0x00050004,
 336	0x9200, 0xffffffff, 0x00010006,
 337	0x9204, 0xffffffff, 0x00090008,
 338	0x9208, 0xffffffff, 0x00070000,
 339	0x920c, 0xffffffff, 0x00030002,
 340	0x9210, 0xffffffff, 0x00050004,
 341	0x921c, 0xffffffff, 0x00010006,
 342	0x9220, 0xffffffff, 0x00090008,
 343	0x9224, 0xffffffff, 0x00070000,
 344	0x9228, 0xffffffff, 0x00030002,
 345	0x922c, 0xffffffff, 0x00050004,
 346	0x9238, 0xffffffff, 0x00010006,
 347	0x923c, 0xffffffff, 0x00090008,
 348	0x9240, 0xffffffff, 0x00070000,
 349	0x9244, 0xffffffff, 0x00030002,
 350	0x9248, 0xffffffff, 0x00050004,
 351	0x9254, 0xffffffff, 0x00010006,
 352	0x9258, 0xffffffff, 0x00090008,
 353	0x925c, 0xffffffff, 0x00070000,
 354	0x9260, 0xffffffff, 0x00030002,
 355	0x9264, 0xffffffff, 0x00050004,
 356	0x9270, 0xffffffff, 0x00010006,
 357	0x9274, 0xffffffff, 0x00090008,
 358	0x9278, 0xffffffff, 0x00070000,
 359	0x927c, 0xffffffff, 0x00030002,
 360	0x9280, 0xffffffff, 0x00050004,
 361	0x928c, 0xffffffff, 0x00010006,
 362	0x9290, 0xffffffff, 0x00090008,
 363	0x9294, 0xffffffff, 0x00000000,
 364	0x929c, 0xffffffff, 0x00000001,
 365	0x802c, 0xffffffff, 0xc0000000
 366};
 367
 368static const u32 redwood_mgcg_init[] =
 369{
 370	0x802c, 0xffffffff, 0xc0000000,
 371	0x5448, 0xffffffff, 0x00000100,
 372	0x55e4, 0xffffffff, 0x00000100,
 373	0x160c, 0xffffffff, 0x00000100,
 374	0x5644, 0xffffffff, 0x00000100,
 375	0xc164, 0xffffffff, 0x00000100,
 376	0x8a18, 0xffffffff, 0x00000100,
 377	0x897c, 0xffffffff, 0x06000100,
 378	0x8b28, 0xffffffff, 0x00000100,
 379	0x9144, 0xffffffff, 0x00000100,
 380	0x9a60, 0xffffffff, 0x00000100,
 381	0x9868, 0xffffffff, 0x00000100,
 382	0x8d58, 0xffffffff, 0x00000100,
 383	0x9510, 0xffffffff, 0x00000100,
 384	0x949c, 0xffffffff, 0x00000100,
 385	0x9654, 0xffffffff, 0x00000100,
 386	0x9030, 0xffffffff, 0x00000100,
 387	0x9034, 0xffffffff, 0x00000100,
 388	0x9038, 0xffffffff, 0x00000100,
 389	0x903c, 0xffffffff, 0x00000100,
 390	0x9040, 0xffffffff, 0x00000100,
 391	0xa200, 0xffffffff, 0x00000100,
 392	0xa204, 0xffffffff, 0x00000100,
 393	0xa208, 0xffffffff, 0x00000100,
 394	0xa20c, 0xffffffff, 0x00000100,
 395	0x971c, 0xffffffff, 0x00000100,
 396	0x977c, 0xffffffff, 0x00000100,
 397	0x3f80, 0xffffffff, 0x00000100,
 398	0xa210, 0xffffffff, 0x00000100,
 399	0xa214, 0xffffffff, 0x00000100,
 400	0x4d8, 0xffffffff, 0x00000100,
 401	0x9784, 0xffffffff, 0x00000100,
 402	0x9698, 0xffffffff, 0x00000100,
 403	0x4d4, 0xffffffff, 0x00000200,
 404	0x30cc, 0xffffffff, 0x00000100,
 405	0xd0c0, 0xffffffff, 0xff000100,
 406	0x802c, 0xffffffff, 0x40000000,
 407	0x915c, 0xffffffff, 0x00010000,
 408	0x9160, 0xffffffff, 0x00030002,
 409	0x9178, 0xffffffff, 0x00070000,
 410	0x917c, 0xffffffff, 0x00030002,
 411	0x9180, 0xffffffff, 0x00050004,
 412	0x918c, 0xffffffff, 0x00010006,
 413	0x9190, 0xffffffff, 0x00090008,
 414	0x9194, 0xffffffff, 0x00070000,
 415	0x9198, 0xffffffff, 0x00030002,
 416	0x919c, 0xffffffff, 0x00050004,
 417	0x91a8, 0xffffffff, 0x00010006,
 418	0x91ac, 0xffffffff, 0x00090008,
 419	0x91b0, 0xffffffff, 0x00070000,
 420	0x91b4, 0xffffffff, 0x00030002,
 421	0x91b8, 0xffffffff, 0x00050004,
 422	0x91c4, 0xffffffff, 0x00010006,
 423	0x91c8, 0xffffffff, 0x00090008,
 424	0x91cc, 0xffffffff, 0x00070000,
 425	0x91d0, 0xffffffff, 0x00030002,
 426	0x91d4, 0xffffffff, 0x00050004,
 427	0x91e0, 0xffffffff, 0x00010006,
 428	0x91e4, 0xffffffff, 0x00090008,
 429	0x91e8, 0xffffffff, 0x00000000,
 430	0x91ec, 0xffffffff, 0x00070000,
 431	0x91f0, 0xffffffff, 0x00030002,
 432	0x91f4, 0xffffffff, 0x00050004,
 433	0x9200, 0xffffffff, 0x00010006,
 434	0x9204, 0xffffffff, 0x00090008,
 435	0x9294, 0xffffffff, 0x00000000,
 436	0x929c, 0xffffffff, 0x00000001,
 437	0x802c, 0xffffffff, 0xc0000000
 438};
 439
 440static const u32 cedar_golden_registers[] =
 441{
 442	0x3f90, 0xffff0000, 0xff000000,
 443	0x9148, 0xffff0000, 0xff000000,
 444	0x3f94, 0xffff0000, 0xff000000,
 445	0x914c, 0xffff0000, 0xff000000,
 446	0x9b7c, 0xffffffff, 0x00000000,
 447	0x8a14, 0xffffffff, 0x00000007,
 448	0x8b10, 0xffffffff, 0x00000000,
 449	0x960c, 0xffffffff, 0x54763210,
 450	0x88c4, 0xffffffff, 0x000000c2,
 451	0x88d4, 0xffffffff, 0x00000000,
 452	0x8974, 0xffffffff, 0x00000000,
 453	0xc78, 0x00000080, 0x00000080,
 454	0x5eb4, 0xffffffff, 0x00000002,
 455	0x5e78, 0xffffffff, 0x001000f0,
 456	0x6104, 0x01000300, 0x00000000,
 457	0x5bc0, 0x00300000, 0x00000000,
 458	0x7030, 0xffffffff, 0x00000011,
 459	0x7c30, 0xffffffff, 0x00000011,
 460	0x10830, 0xffffffff, 0x00000011,
 461	0x11430, 0xffffffff, 0x00000011,
 462	0xd02c, 0xffffffff, 0x08421000,
 463	0x240c, 0xffffffff, 0x00000380,
 464	0x8b24, 0xffffffff, 0x00ff0fff,
 465	0x28a4c, 0x06000000, 0x06000000,
 466	0x10c, 0x00000001, 0x00000001,
 467	0x8d00, 0xffffffff, 0x100e4848,
 468	0x8d04, 0xffffffff, 0x00164745,
 469	0x8c00, 0xffffffff, 0xe4000003,
 470	0x8c04, 0xffffffff, 0x40600060,
 471	0x8c08, 0xffffffff, 0x001c001c,
 472	0x8cf0, 0xffffffff, 0x08e00410,
 473	0x8c20, 0xffffffff, 0x00800080,
 474	0x8c24, 0xffffffff, 0x00800080,
 475	0x8c18, 0xffffffff, 0x20202078,
 476	0x8c1c, 0xffffffff, 0x00001010,
 477	0x28350, 0xffffffff, 0x00000000,
 478	0xa008, 0xffffffff, 0x00010000,
 479	0x5cc, 0xffffffff, 0x00000001,
 480	0x9508, 0xffffffff, 0x00000002
 481};
 482
 483static const u32 cedar_mgcg_init[] =
 484{
 485	0x802c, 0xffffffff, 0xc0000000,
 486	0x5448, 0xffffffff, 0x00000100,
 487	0x55e4, 0xffffffff, 0x00000100,
 488	0x160c, 0xffffffff, 0x00000100,
 489	0x5644, 0xffffffff, 0x00000100,
 490	0xc164, 0xffffffff, 0x00000100,
 491	0x8a18, 0xffffffff, 0x00000100,
 492	0x897c, 0xffffffff, 0x06000100,
 493	0x8b28, 0xffffffff, 0x00000100,
 494	0x9144, 0xffffffff, 0x00000100,
 495	0x9a60, 0xffffffff, 0x00000100,
 496	0x9868, 0xffffffff, 0x00000100,
 497	0x8d58, 0xffffffff, 0x00000100,
 498	0x9510, 0xffffffff, 0x00000100,
 499	0x949c, 0xffffffff, 0x00000100,
 500	0x9654, 0xffffffff, 0x00000100,
 501	0x9030, 0xffffffff, 0x00000100,
 502	0x9034, 0xffffffff, 0x00000100,
 503	0x9038, 0xffffffff, 0x00000100,
 504	0x903c, 0xffffffff, 0x00000100,
 505	0x9040, 0xffffffff, 0x00000100,
 506	0xa200, 0xffffffff, 0x00000100,
 507	0xa204, 0xffffffff, 0x00000100,
 508	0xa208, 0xffffffff, 0x00000100,
 509	0xa20c, 0xffffffff, 0x00000100,
 510	0x971c, 0xffffffff, 0x00000100,
 511	0x977c, 0xffffffff, 0x00000100,
 512	0x3f80, 0xffffffff, 0x00000100,
 513	0xa210, 0xffffffff, 0x00000100,
 514	0xa214, 0xffffffff, 0x00000100,
 515	0x4d8, 0xffffffff, 0x00000100,
 516	0x9784, 0xffffffff, 0x00000100,
 517	0x9698, 0xffffffff, 0x00000100,
 518	0x4d4, 0xffffffff, 0x00000200,
 519	0x30cc, 0xffffffff, 0x00000100,
 520	0xd0c0, 0xffffffff, 0xff000100,
 521	0x802c, 0xffffffff, 0x40000000,
 522	0x915c, 0xffffffff, 0x00010000,
 523	0x9178, 0xffffffff, 0x00050000,
 524	0x917c, 0xffffffff, 0x00030002,
 525	0x918c, 0xffffffff, 0x00010004,
 526	0x9190, 0xffffffff, 0x00070006,
 527	0x9194, 0xffffffff, 0x00050000,
 528	0x9198, 0xffffffff, 0x00030002,
 529	0x91a8, 0xffffffff, 0x00010004,
 530	0x91ac, 0xffffffff, 0x00070006,
 531	0x91e8, 0xffffffff, 0x00000000,
 532	0x9294, 0xffffffff, 0x00000000,
 533	0x929c, 0xffffffff, 0x00000001,
 534	0x802c, 0xffffffff, 0xc0000000
 535};
 536
 537static const u32 juniper_mgcg_init[] =
 538{
 539	0x802c, 0xffffffff, 0xc0000000,
 540	0x5448, 0xffffffff, 0x00000100,
 541	0x55e4, 0xffffffff, 0x00000100,
 542	0x160c, 0xffffffff, 0x00000100,
 543	0x5644, 0xffffffff, 0x00000100,
 544	0xc164, 0xffffffff, 0x00000100,
 545	0x8a18, 0xffffffff, 0x00000100,
 546	0x897c, 0xffffffff, 0x06000100,
 547	0x8b28, 0xffffffff, 0x00000100,
 548	0x9144, 0xffffffff, 0x00000100,
 549	0x9a60, 0xffffffff, 0x00000100,
 550	0x9868, 0xffffffff, 0x00000100,
 551	0x8d58, 0xffffffff, 0x00000100,
 552	0x9510, 0xffffffff, 0x00000100,
 553	0x949c, 0xffffffff, 0x00000100,
 554	0x9654, 0xffffffff, 0x00000100,
 555	0x9030, 0xffffffff, 0x00000100,
 556	0x9034, 0xffffffff, 0x00000100,
 557	0x9038, 0xffffffff, 0x00000100,
 558	0x903c, 0xffffffff, 0x00000100,
 559	0x9040, 0xffffffff, 0x00000100,
 560	0xa200, 0xffffffff, 0x00000100,
 561	0xa204, 0xffffffff, 0x00000100,
 562	0xa208, 0xffffffff, 0x00000100,
 563	0xa20c, 0xffffffff, 0x00000100,
 564	0x971c, 0xffffffff, 0x00000100,
 565	0xd0c0, 0xffffffff, 0xff000100,
 566	0x802c, 0xffffffff, 0x40000000,
 567	0x915c, 0xffffffff, 0x00010000,
 568	0x9160, 0xffffffff, 0x00030002,
 569	0x9178, 0xffffffff, 0x00070000,
 570	0x917c, 0xffffffff, 0x00030002,
 571	0x9180, 0xffffffff, 0x00050004,
 572	0x918c, 0xffffffff, 0x00010006,
 573	0x9190, 0xffffffff, 0x00090008,
 574	0x9194, 0xffffffff, 0x00070000,
 575	0x9198, 0xffffffff, 0x00030002,
 576	0x919c, 0xffffffff, 0x00050004,
 577	0x91a8, 0xffffffff, 0x00010006,
 578	0x91ac, 0xffffffff, 0x00090008,
 579	0x91b0, 0xffffffff, 0x00070000,
 580	0x91b4, 0xffffffff, 0x00030002,
 581	0x91b8, 0xffffffff, 0x00050004,
 582	0x91c4, 0xffffffff, 0x00010006,
 583	0x91c8, 0xffffffff, 0x00090008,
 584	0x91cc, 0xffffffff, 0x00070000,
 585	0x91d0, 0xffffffff, 0x00030002,
 586	0x91d4, 0xffffffff, 0x00050004,
 587	0x91e0, 0xffffffff, 0x00010006,
 588	0x91e4, 0xffffffff, 0x00090008,
 589	0x91e8, 0xffffffff, 0x00000000,
 590	0x91ec, 0xffffffff, 0x00070000,
 591	0x91f0, 0xffffffff, 0x00030002,
 592	0x91f4, 0xffffffff, 0x00050004,
 593	0x9200, 0xffffffff, 0x00010006,
 594	0x9204, 0xffffffff, 0x00090008,
 595	0x9208, 0xffffffff, 0x00070000,
 596	0x920c, 0xffffffff, 0x00030002,
 597	0x9210, 0xffffffff, 0x00050004,
 598	0x921c, 0xffffffff, 0x00010006,
 599	0x9220, 0xffffffff, 0x00090008,
 600	0x9224, 0xffffffff, 0x00070000,
 601	0x9228, 0xffffffff, 0x00030002,
 602	0x922c, 0xffffffff, 0x00050004,
 603	0x9238, 0xffffffff, 0x00010006,
 604	0x923c, 0xffffffff, 0x00090008,
 605	0x9240, 0xffffffff, 0x00070000,
 606	0x9244, 0xffffffff, 0x00030002,
 607	0x9248, 0xffffffff, 0x00050004,
 608	0x9254, 0xffffffff, 0x00010006,
 609	0x9258, 0xffffffff, 0x00090008,
 610	0x925c, 0xffffffff, 0x00070000,
 611	0x9260, 0xffffffff, 0x00030002,
 612	0x9264, 0xffffffff, 0x00050004,
 613	0x9270, 0xffffffff, 0x00010006,
 614	0x9274, 0xffffffff, 0x00090008,
 615	0x9278, 0xffffffff, 0x00070000,
 616	0x927c, 0xffffffff, 0x00030002,
 617	0x9280, 0xffffffff, 0x00050004,
 618	0x928c, 0xffffffff, 0x00010006,
 619	0x9290, 0xffffffff, 0x00090008,
 620	0x9294, 0xffffffff, 0x00000000,
 621	0x929c, 0xffffffff, 0x00000001,
 622	0x802c, 0xffffffff, 0xc0000000,
 623	0x977c, 0xffffffff, 0x00000100,
 624	0x3f80, 0xffffffff, 0x00000100,
 625	0xa210, 0xffffffff, 0x00000100,
 626	0xa214, 0xffffffff, 0x00000100,
 627	0x4d8, 0xffffffff, 0x00000100,
 628	0x9784, 0xffffffff, 0x00000100,
 629	0x9698, 0xffffffff, 0x00000100,
 630	0x4d4, 0xffffffff, 0x00000200,
 631	0x30cc, 0xffffffff, 0x00000100,
 632	0x802c, 0xffffffff, 0xc0000000
 633};
 634
 635static const u32 supersumo_golden_registers[] =
 636{
 637	0x5eb4, 0xffffffff, 0x00000002,
 638	0x5cc, 0xffffffff, 0x00000001,
 639	0x7030, 0xffffffff, 0x00000011,
 640	0x7c30, 0xffffffff, 0x00000011,
 641	0x6104, 0x01000300, 0x00000000,
 642	0x5bc0, 0x00300000, 0x00000000,
 643	0x8c04, 0xffffffff, 0x40600060,
 644	0x8c08, 0xffffffff, 0x001c001c,
 645	0x8c20, 0xffffffff, 0x00800080,
 646	0x8c24, 0xffffffff, 0x00800080,
 647	0x8c18, 0xffffffff, 0x20202078,
 648	0x8c1c, 0xffffffff, 0x00001010,
 649	0x918c, 0xffffffff, 0x00010006,
 650	0x91a8, 0xffffffff, 0x00010006,
 651	0x91c4, 0xffffffff, 0x00010006,
 652	0x91e0, 0xffffffff, 0x00010006,
 653	0x9200, 0xffffffff, 0x00010006,
 654	0x9150, 0xffffffff, 0x6e944040,
 655	0x917c, 0xffffffff, 0x00030002,
 656	0x9180, 0xffffffff, 0x00050004,
 657	0x9198, 0xffffffff, 0x00030002,
 658	0x919c, 0xffffffff, 0x00050004,
 659	0x91b4, 0xffffffff, 0x00030002,
 660	0x91b8, 0xffffffff, 0x00050004,
 661	0x91d0, 0xffffffff, 0x00030002,
 662	0x91d4, 0xffffffff, 0x00050004,
 663	0x91f0, 0xffffffff, 0x00030002,
 664	0x91f4, 0xffffffff, 0x00050004,
 665	0x915c, 0xffffffff, 0x00010000,
 666	0x9160, 0xffffffff, 0x00030002,
 667	0x3f90, 0xffff0000, 0xff000000,
 668	0x9178, 0xffffffff, 0x00070000,
 669	0x9194, 0xffffffff, 0x00070000,
 670	0x91b0, 0xffffffff, 0x00070000,
 671	0x91cc, 0xffffffff, 0x00070000,
 672	0x91ec, 0xffffffff, 0x00070000,
 673	0x9148, 0xffff0000, 0xff000000,
 674	0x9190, 0xffffffff, 0x00090008,
 675	0x91ac, 0xffffffff, 0x00090008,
 676	0x91c8, 0xffffffff, 0x00090008,
 677	0x91e4, 0xffffffff, 0x00090008,
 678	0x9204, 0xffffffff, 0x00090008,
 679	0x3f94, 0xffff0000, 0xff000000,
 680	0x914c, 0xffff0000, 0xff000000,
 681	0x929c, 0xffffffff, 0x00000001,
 682	0x8a18, 0xffffffff, 0x00000100,
 683	0x8b28, 0xffffffff, 0x00000100,
 684	0x9144, 0xffffffff, 0x00000100,
 685	0x5644, 0xffffffff, 0x00000100,
 686	0x9b7c, 0xffffffff, 0x00000000,
 687	0x8030, 0xffffffff, 0x0000100a,
 688	0x8a14, 0xffffffff, 0x00000007,
 689	0x8b24, 0xffffffff, 0x00ff0fff,
 690	0x8b10, 0xffffffff, 0x00000000,
 691	0x28a4c, 0x06000000, 0x06000000,
 692	0x4d8, 0xffffffff, 0x00000100,
 693	0x913c, 0xffff000f, 0x0100000a,
 694	0x960c, 0xffffffff, 0x54763210,
 695	0x88c4, 0xffffffff, 0x000000c2,
 696	0x88d4, 0xffffffff, 0x00000010,
 697	0x8974, 0xffffffff, 0x00000000,
 698	0xc78, 0x00000080, 0x00000080,
 699	0x5e78, 0xffffffff, 0x001000f0,
 700	0xd02c, 0xffffffff, 0x08421000,
 701	0xa008, 0xffffffff, 0x00010000,
 702	0x8d00, 0xffffffff, 0x100e4848,
 703	0x8d04, 0xffffffff, 0x00164745,
 704	0x8c00, 0xffffffff, 0xe4000003,
 705	0x8cf0, 0x1fffffff, 0x08e00620,
 706	0x28350, 0xffffffff, 0x00000000,
 707	0x9508, 0xffffffff, 0x00000002
 708};
 709
 710static const u32 sumo_golden_registers[] =
 711{
 712	0x900c, 0x00ffffff, 0x0017071f,
 713	0x8c18, 0xffffffff, 0x10101060,
 714	0x8c1c, 0xffffffff, 0x00001010,
 715	0x8c30, 0x0000000f, 0x00000005,
 716	0x9688, 0x0000000f, 0x00000007
 717};
 718
 719static const u32 wrestler_golden_registers[] =
 720{
 721	0x5eb4, 0xffffffff, 0x00000002,
 722	0x5cc, 0xffffffff, 0x00000001,
 723	0x7030, 0xffffffff, 0x00000011,
 724	0x7c30, 0xffffffff, 0x00000011,
 725	0x6104, 0x01000300, 0x00000000,
 726	0x5bc0, 0x00300000, 0x00000000,
 727	0x918c, 0xffffffff, 0x00010006,
 728	0x91a8, 0xffffffff, 0x00010006,
 729	0x9150, 0xffffffff, 0x6e944040,
 730	0x917c, 0xffffffff, 0x00030002,
 731	0x9198, 0xffffffff, 0x00030002,
 732	0x915c, 0xffffffff, 0x00010000,
 733	0x3f90, 0xffff0000, 0xff000000,
 734	0x9178, 0xffffffff, 0x00070000,
 735	0x9194, 0xffffffff, 0x00070000,
 736	0x9148, 0xffff0000, 0xff000000,
 737	0x9190, 0xffffffff, 0x00090008,
 738	0x91ac, 0xffffffff, 0x00090008,
 739	0x3f94, 0xffff0000, 0xff000000,
 740	0x914c, 0xffff0000, 0xff000000,
 741	0x929c, 0xffffffff, 0x00000001,
 742	0x8a18, 0xffffffff, 0x00000100,
 743	0x8b28, 0xffffffff, 0x00000100,
 744	0x9144, 0xffffffff, 0x00000100,
 745	0x9b7c, 0xffffffff, 0x00000000,
 746	0x8030, 0xffffffff, 0x0000100a,
 747	0x8a14, 0xffffffff, 0x00000001,
 748	0x8b24, 0xffffffff, 0x00ff0fff,
 749	0x8b10, 0xffffffff, 0x00000000,
 750	0x28a4c, 0x06000000, 0x06000000,
 751	0x4d8, 0xffffffff, 0x00000100,
 752	0x913c, 0xffff000f, 0x0100000a,
 753	0x960c, 0xffffffff, 0x54763210,
 754	0x88c4, 0xffffffff, 0x000000c2,
 755	0x88d4, 0xffffffff, 0x00000010,
 756	0x8974, 0xffffffff, 0x00000000,
 757	0xc78, 0x00000080, 0x00000080,
 758	0x5e78, 0xffffffff, 0x001000f0,
 759	0xd02c, 0xffffffff, 0x08421000,
 760	0xa008, 0xffffffff, 0x00010000,
 761	0x8d00, 0xffffffff, 0x100e4848,
 762	0x8d04, 0xffffffff, 0x00164745,
 763	0x8c00, 0xffffffff, 0xe4000003,
 764	0x8cf0, 0x1fffffff, 0x08e00410,
 765	0x28350, 0xffffffff, 0x00000000,
 766	0x9508, 0xffffffff, 0x00000002,
 767	0x900c, 0xffffffff, 0x0017071f,
 768	0x8c18, 0xffffffff, 0x10101060,
 769	0x8c1c, 0xffffffff, 0x00001010
 770};
 771
 772static const u32 barts_golden_registers[] =
 773{
 774	0x5eb4, 0xffffffff, 0x00000002,
 775	0x5e78, 0x8f311ff1, 0x001000f0,
 776	0x3f90, 0xffff0000, 0xff000000,
 777	0x9148, 0xffff0000, 0xff000000,
 778	0x3f94, 0xffff0000, 0xff000000,
 779	0x914c, 0xffff0000, 0xff000000,
 780	0xc78, 0x00000080, 0x00000080,
 781	0xbd4, 0x70073777, 0x00010001,
 782	0xd02c, 0xbfffff1f, 0x08421000,
 783	0xd0b8, 0x03773777, 0x02011003,
 784	0x5bc0, 0x00200000, 0x50100000,
 785	0x98f8, 0x33773777, 0x02011003,
 786	0x98fc, 0xffffffff, 0x76543210,
 787	0x7030, 0x31000311, 0x00000011,
 788	0x2f48, 0x00000007, 0x02011003,
 789	0x6b28, 0x00000010, 0x00000012,
 790	0x7728, 0x00000010, 0x00000012,
 791	0x10328, 0x00000010, 0x00000012,
 792	0x10f28, 0x00000010, 0x00000012,
 793	0x11b28, 0x00000010, 0x00000012,
 794	0x12728, 0x00000010, 0x00000012,
 795	0x240c, 0x000007ff, 0x00000380,
 796	0x8a14, 0xf000001f, 0x00000007,
 797	0x8b24, 0x3fff3fff, 0x00ff0fff,
 798	0x8b10, 0x0000ff0f, 0x00000000,
 799	0x28a4c, 0x07ffffff, 0x06000000,
 800	0x10c, 0x00000001, 0x00010003,
 801	0xa02c, 0xffffffff, 0x0000009b,
 802	0x913c, 0x0000000f, 0x0100000a,
 803	0x8d00, 0xffff7f7f, 0x100e4848,
 804	0x8d04, 0x00ffffff, 0x00164745,
 805	0x8c00, 0xfffc0003, 0xe4000003,
 806	0x8c04, 0xf8ff00ff, 0x40600060,
 807	0x8c08, 0x00ff00ff, 0x001c001c,
 808	0x8cf0, 0x1fff1fff, 0x08e00620,
 809	0x8c20, 0x0fff0fff, 0x00800080,
 810	0x8c24, 0x0fff0fff, 0x00800080,
 811	0x8c18, 0xffffffff, 0x20202078,
 812	0x8c1c, 0x0000ffff, 0x00001010,
 813	0x28350, 0x00000f01, 0x00000000,
 814	0x9508, 0x3700001f, 0x00000002,
 815	0x960c, 0xffffffff, 0x54763210,
 816	0x88c4, 0x001f3ae3, 0x000000c2,
 817	0x88d4, 0x0000001f, 0x00000010,
 818	0x8974, 0xffffffff, 0x00000000
 819};
 820
 821static const u32 turks_golden_registers[] =
 822{
 823	0x5eb4, 0xffffffff, 0x00000002,
 824	0x5e78, 0x8f311ff1, 0x001000f0,
 825	0x8c8, 0x00003000, 0x00001070,
 826	0x8cc, 0x000fffff, 0x00040035,
 827	0x3f90, 0xffff0000, 0xfff00000,
 828	0x9148, 0xffff0000, 0xfff00000,
 829	0x3f94, 0xffff0000, 0xfff00000,
 830	0x914c, 0xffff0000, 0xfff00000,
 831	0xc78, 0x00000080, 0x00000080,
 832	0xbd4, 0x00073007, 0x00010002,
 833	0xd02c, 0xbfffff1f, 0x08421000,
 834	0xd0b8, 0x03773777, 0x02010002,
 835	0x5bc0, 0x00200000, 0x50100000,
 836	0x98f8, 0x33773777, 0x00010002,
 837	0x98fc, 0xffffffff, 0x33221100,
 838	0x7030, 0x31000311, 0x00000011,
 839	0x2f48, 0x33773777, 0x00010002,
 840	0x6b28, 0x00000010, 0x00000012,
 841	0x7728, 0x00000010, 0x00000012,
 842	0x10328, 0x00000010, 0x00000012,
 843	0x10f28, 0x00000010, 0x00000012,
 844	0x11b28, 0x00000010, 0x00000012,
 845	0x12728, 0x00000010, 0x00000012,
 846	0x240c, 0x000007ff, 0x00000380,
 847	0x8a14, 0xf000001f, 0x00000007,
 848	0x8b24, 0x3fff3fff, 0x00ff0fff,
 849	0x8b10, 0x0000ff0f, 0x00000000,
 850	0x28a4c, 0x07ffffff, 0x06000000,
 851	0x10c, 0x00000001, 0x00010003,
 852	0xa02c, 0xffffffff, 0x0000009b,
 853	0x913c, 0x0000000f, 0x0100000a,
 854	0x8d00, 0xffff7f7f, 0x100e4848,
 855	0x8d04, 0x00ffffff, 0x00164745,
 856	0x8c00, 0xfffc0003, 0xe4000003,
 857	0x8c04, 0xf8ff00ff, 0x40600060,
 858	0x8c08, 0x00ff00ff, 0x001c001c,
 859	0x8cf0, 0x1fff1fff, 0x08e00410,
 860	0x8c20, 0x0fff0fff, 0x00800080,
 861	0x8c24, 0x0fff0fff, 0x00800080,
 862	0x8c18, 0xffffffff, 0x20202078,
 863	0x8c1c, 0x0000ffff, 0x00001010,
 864	0x28350, 0x00000f01, 0x00000000,
 865	0x9508, 0x3700001f, 0x00000002,
 866	0x960c, 0xffffffff, 0x54763210,
 867	0x88c4, 0x001f3ae3, 0x000000c2,
 868	0x88d4, 0x0000001f, 0x00000010,
 869	0x8974, 0xffffffff, 0x00000000
 870};
 871
 872static const u32 caicos_golden_registers[] =
 873{
 874	0x5eb4, 0xffffffff, 0x00000002,
 875	0x5e78, 0x8f311ff1, 0x001000f0,
 876	0x8c8, 0x00003420, 0x00001450,
 877	0x8cc, 0x000fffff, 0x00040035,
 878	0x3f90, 0xffff0000, 0xfffc0000,
 879	0x9148, 0xffff0000, 0xfffc0000,
 880	0x3f94, 0xffff0000, 0xfffc0000,
 881	0x914c, 0xffff0000, 0xfffc0000,
 882	0xc78, 0x00000080, 0x00000080,
 883	0xbd4, 0x00073007, 0x00010001,
 884	0xd02c, 0xbfffff1f, 0x08421000,
 885	0xd0b8, 0x03773777, 0x02010001,
 886	0x5bc0, 0x00200000, 0x50100000,
 887	0x98f8, 0x33773777, 0x02010001,
 888	0x98fc, 0xffffffff, 0x33221100,
 889	0x7030, 0x31000311, 0x00000011,
 890	0x2f48, 0x33773777, 0x02010001,
 891	0x6b28, 0x00000010, 0x00000012,
 892	0x7728, 0x00000010, 0x00000012,
 893	0x10328, 0x00000010, 0x00000012,
 894	0x10f28, 0x00000010, 0x00000012,
 895	0x11b28, 0x00000010, 0x00000012,
 896	0x12728, 0x00000010, 0x00000012,
 897	0x240c, 0x000007ff, 0x00000380,
 898	0x8a14, 0xf000001f, 0x00000001,
 899	0x8b24, 0x3fff3fff, 0x00ff0fff,
 900	0x8b10, 0x0000ff0f, 0x00000000,
 901	0x28a4c, 0x07ffffff, 0x06000000,
 902	0x10c, 0x00000001, 0x00010003,
 903	0xa02c, 0xffffffff, 0x0000009b,
 904	0x913c, 0x0000000f, 0x0100000a,
 905	0x8d00, 0xffff7f7f, 0x100e4848,
 906	0x8d04, 0x00ffffff, 0x00164745,
 907	0x8c00, 0xfffc0003, 0xe4000003,
 908	0x8c04, 0xf8ff00ff, 0x40600060,
 909	0x8c08, 0x00ff00ff, 0x001c001c,
 910	0x8cf0, 0x1fff1fff, 0x08e00410,
 911	0x8c20, 0x0fff0fff, 0x00800080,
 912	0x8c24, 0x0fff0fff, 0x00800080,
 913	0x8c18, 0xffffffff, 0x20202078,
 914	0x8c1c, 0x0000ffff, 0x00001010,
 915	0x28350, 0x00000f01, 0x00000000,
 916	0x9508, 0x3700001f, 0x00000002,
 917	0x960c, 0xffffffff, 0x54763210,
 918	0x88c4, 0x001f3ae3, 0x000000c2,
 919	0x88d4, 0x0000001f, 0x00000010,
 920	0x8974, 0xffffffff, 0x00000000
 921};
 922
 923static void evergreen_init_golden_registers(struct radeon_device *rdev)
 924{
 925	switch (rdev->family) {
 926	case CHIP_CYPRESS:
 927	case CHIP_HEMLOCK:
 928		radeon_program_register_sequence(rdev,
 929						 evergreen_golden_registers,
 930						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
 931		radeon_program_register_sequence(rdev,
 932						 evergreen_golden_registers2,
 933						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
 934		radeon_program_register_sequence(rdev,
 935						 cypress_mgcg_init,
 936						 (const u32)ARRAY_SIZE(cypress_mgcg_init));
 937		break;
 938	case CHIP_JUNIPER:
 939		radeon_program_register_sequence(rdev,
 940						 evergreen_golden_registers,
 941						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
 942		radeon_program_register_sequence(rdev,
 943						 evergreen_golden_registers2,
 944						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
 945		radeon_program_register_sequence(rdev,
 946						 juniper_mgcg_init,
 947						 (const u32)ARRAY_SIZE(juniper_mgcg_init));
 948		break;
 949	case CHIP_REDWOOD:
 950		radeon_program_register_sequence(rdev,
 951						 evergreen_golden_registers,
 952						 (const u32)ARRAY_SIZE(evergreen_golden_registers));
 953		radeon_program_register_sequence(rdev,
 954						 evergreen_golden_registers2,
 955						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
 956		radeon_program_register_sequence(rdev,
 957						 redwood_mgcg_init,
 958						 (const u32)ARRAY_SIZE(redwood_mgcg_init));
 959		break;
 960	case CHIP_CEDAR:
 961		radeon_program_register_sequence(rdev,
 962						 cedar_golden_registers,
 963						 (const u32)ARRAY_SIZE(cedar_golden_registers));
 964		radeon_program_register_sequence(rdev,
 965						 evergreen_golden_registers2,
 966						 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
 967		radeon_program_register_sequence(rdev,
 968						 cedar_mgcg_init,
 969						 (const u32)ARRAY_SIZE(cedar_mgcg_init));
 970		break;
 971	case CHIP_PALM:
 972		radeon_program_register_sequence(rdev,
 973						 wrestler_golden_registers,
 974						 (const u32)ARRAY_SIZE(wrestler_golden_registers));
 975		break;
 976	case CHIP_SUMO:
 977		radeon_program_register_sequence(rdev,
 978						 supersumo_golden_registers,
 979						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
 980		break;
 981	case CHIP_SUMO2:
 982		radeon_program_register_sequence(rdev,
 983						 supersumo_golden_registers,
 984						 (const u32)ARRAY_SIZE(supersumo_golden_registers));
 985		radeon_program_register_sequence(rdev,
 986						 sumo_golden_registers,
 987						 (const u32)ARRAY_SIZE(sumo_golden_registers));
 988		break;
 989	case CHIP_BARTS:
 990		radeon_program_register_sequence(rdev,
 991						 barts_golden_registers,
 992						 (const u32)ARRAY_SIZE(barts_golden_registers));
 993		break;
 994	case CHIP_TURKS:
 995		radeon_program_register_sequence(rdev,
 996						 turks_golden_registers,
 997						 (const u32)ARRAY_SIZE(turks_golden_registers));
 998		break;
 999	case CHIP_CAICOS:
1000		radeon_program_register_sequence(rdev,
1001						 caicos_golden_registers,
1002						 (const u32)ARRAY_SIZE(caicos_golden_registers));
1003		break;
1004	default:
1005		break;
1006	}
1007}
1008
1009void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1010			     unsigned *bankh, unsigned *mtaspect,
1011			     unsigned *tile_split)
1012{
1013	*bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1014	*bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1015	*mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1016	*tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1017	switch (*bankw) {
1018	default:
1019	case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1020	case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1021	case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1022	case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1023	}
1024	switch (*bankh) {
1025	default:
1026	case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1027	case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1028	case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1029	case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1030	}
1031	switch (*mtaspect) {
1032	default:
1033	case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1034	case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1035	case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1036	case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1037	}
1038}
1039
1040static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1041			      u32 cntl_reg, u32 status_reg)
1042{
1043	int r, i;
1044	struct atom_clock_dividers dividers;
1045
1046        r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1047					   clock, false, &dividers);
1048	if (r)
1049		return r;
1050
1051	WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1052
1053	for (i = 0; i < 100; i++) {
1054		if (RREG32(status_reg) & DCLK_STATUS)
1055			break;
1056		mdelay(10);
1057	}
1058	if (i == 100)
1059		return -ETIMEDOUT;
1060
1061	return 0;
1062}
1063
1064int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1065{
1066	int r = 0;
1067	u32 cg_scratch = RREG32(CG_SCRATCH1);
1068
1069	r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1070	if (r)
1071		goto done;
1072	cg_scratch &= 0xffff0000;
1073	cg_scratch |= vclk / 100; /* Mhz */
1074
1075	r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1076	if (r)
1077		goto done;
1078	cg_scratch &= 0x0000ffff;
1079	cg_scratch |= (dclk / 100) << 16; /* Mhz */
1080
1081done:
1082	WREG32(CG_SCRATCH1, cg_scratch);
1083
1084	return r;
1085}
1086
1087int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1088{
1089	/* start off with something large */
1090	unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1091	int r;
1092
1093	/* bypass vclk and dclk with bclk */
1094	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1095		VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1096		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1097
1098	/* put PLL in bypass mode */
1099	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1100
1101	if (!vclk || !dclk) {
1102		/* keep the Bypass mode, put PLL to sleep */
1103		WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1104		return 0;
1105	}
1106
1107	r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1108					  16384, 0x03FFFFFF, 0, 128, 5,
1109					  &fb_div, &vclk_div, &dclk_div);
1110	if (r)
1111		return r;
1112
1113	/* set VCO_MODE to 1 */
1114	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1115
1116	/* toggle UPLL_SLEEP to 1 then back to 0 */
1117	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1118	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1119
1120	/* deassert UPLL_RESET */
1121	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1122
1123	mdelay(1);
1124
1125	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1126	if (r)
1127		return r;
1128
1129	/* assert UPLL_RESET again */
1130	WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1131
1132	/* disable spread spectrum. */
1133	WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1134
1135	/* set feedback divider */
1136	WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1137
1138	/* set ref divider to 0 */
1139	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1140
1141	if (fb_div < 307200)
1142		WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1143	else
1144		WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1145
1146	/* set PDIV_A and PDIV_B */
1147	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1148		UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1149		~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1150
1151	/* give the PLL some time to settle */
1152	mdelay(15);
1153
1154	/* deassert PLL_RESET */
1155	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1156
1157	mdelay(15);
1158
1159	/* switch from bypass mode to normal mode */
1160	WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1161
1162	r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1163	if (r)
1164		return r;
1165
1166	/* switch VCLK and DCLK selection */
1167	WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168		VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1169		~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170
1171	mdelay(100);
1172
1173	return 0;
1174}
1175
1176void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1177{
1178	int readrq;
1179	u16 v;
1180
1181	readrq = pcie_get_readrq(rdev->pdev);
1182	v = ffs(readrq) - 8;
1183	/* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1184	 * to avoid hangs or perfomance issues
1185	 */
1186	if ((v == 0) || (v == 6) || (v == 7))
1187		pcie_set_readrq(rdev->pdev, 512);
1188}
1189
1190void dce4_program_fmt(struct drm_encoder *encoder)
1191{
1192	struct drm_device *dev = encoder->dev;
1193	struct radeon_device *rdev = dev->dev_private;
1194	struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1195	struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1196	struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1197	int bpc = 0;
1198	u32 tmp = 0;
1199	enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1200
1201	if (connector) {
1202		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1203		bpc = radeon_get_monitor_bpc(connector);
1204		dither = radeon_connector->dither;
1205	}
1206
1207	/* LVDS/eDP FMT is set up by atom */
1208	if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1209		return;
1210
1211	/* not needed for analog */
1212	if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1213	    (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1214		return;
1215
1216	if (bpc == 0)
1217		return;
1218
1219	switch (bpc) {
1220	case 6:
1221		if (dither == RADEON_FMT_DITHER_ENABLE)
1222			/* XXX sort out optimal dither settings */
1223			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1224				FMT_SPATIAL_DITHER_EN);
1225		else
1226			tmp |= FMT_TRUNCATE_EN;
1227		break;
1228	case 8:
1229		if (dither == RADEON_FMT_DITHER_ENABLE)
1230			/* XXX sort out optimal dither settings */
1231			tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1232				FMT_RGB_RANDOM_ENABLE |
1233				FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1234		else
1235			tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1236		break;
1237	case 10:
1238	default:
1239		/* not needed */
1240		break;
1241	}
1242
1243	WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1244}
1245
1246static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1247{
1248	if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1249		return true;
1250	else
1251		return false;
1252}
1253
1254static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1255{
1256	u32 pos1, pos2;
1257
1258	pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1259	pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1260
1261	if (pos1 != pos2)
1262		return true;
1263	else
1264		return false;
1265}
1266
1267/**
1268 * dce4_wait_for_vblank - vblank wait asic callback.
1269 *
1270 * @rdev: radeon_device pointer
1271 * @crtc: crtc to wait for vblank on
1272 *
1273 * Wait for vblank on the requested crtc (evergreen+).
1274 */
1275void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1276{
1277	unsigned i = 0;
1278
1279	if (crtc >= rdev->num_crtc)
1280		return;
1281
1282	if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1283		return;
1284
1285	/* depending on when we hit vblank, we may be close to active; if so,
1286	 * wait for another frame.
1287	 */
1288	while (dce4_is_in_vblank(rdev, crtc)) {
1289		if (i++ % 100 == 0) {
1290			if (!dce4_is_counter_moving(rdev, crtc))
1291				break;
1292		}
1293	}
1294
1295	while (!dce4_is_in_vblank(rdev, crtc)) {
1296		if (i++ % 100 == 0) {
1297			if (!dce4_is_counter_moving(rdev, crtc))
1298				break;
1299		}
1300	}
1301}
1302
1303/**
1304 * radeon_irq_kms_pflip_irq_get - pre-pageflip callback.
1305 *
1306 * @rdev: radeon_device pointer
1307 * @crtc: crtc to prepare for pageflip on
1308 *
1309 * Pre-pageflip callback (evergreen+).
1310 * Enables the pageflip irq (vblank irq).
1311 */
1312void evergreen_pre_page_flip(struct radeon_device *rdev, int crtc)
1313{
1314	/* enable the pflip int */
1315	radeon_irq_kms_pflip_irq_get(rdev, crtc);
1316}
1317
1318/**
1319 * evergreen_post_page_flip - pos-pageflip callback.
1320 *
1321 * @rdev: radeon_device pointer
1322 * @crtc: crtc to cleanup pageflip on
1323 *
1324 * Post-pageflip callback (evergreen+).
1325 * Disables the pageflip irq (vblank irq).
1326 */
1327void evergreen_post_page_flip(struct radeon_device *rdev, int crtc)
1328{
1329	/* disable the pflip int */
1330	radeon_irq_kms_pflip_irq_put(rdev, crtc);
1331}
1332
1333/**
1334 * evergreen_page_flip - pageflip callback.
1335 *
1336 * @rdev: radeon_device pointer
1337 * @crtc_id: crtc to cleanup pageflip on
1338 * @crtc_base: new address of the crtc (GPU MC address)
1339 *
1340 * Does the actual pageflip (evergreen+).
1341 * During vblank we take the crtc lock and wait for the update_pending
1342 * bit to go high, when it does, we release the lock, and allow the
1343 * double buffered update to take place.
1344 * Returns the current update pending status.
1345 */
1346u32 evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1347{
1348	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1349	u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1350	int i;
1351
1352	/* Lock the graphics update lock */
1353	tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1354	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1355
1356	/* update the scanout addresses */
1357	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1358	       upper_32_bits(crtc_base));
1359	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1360	       (u32)crtc_base);
1361
1362	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1363	       upper_32_bits(crtc_base));
1364	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1365	       (u32)crtc_base);
1366
1367	/* Wait for update_pending to go high. */
1368	for (i = 0; i < rdev->usec_timeout; i++) {
1369		if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1370			break;
1371		udelay(1);
1372	}
1373	DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1374
1375	/* Unlock the lock, so double-buffering can take place inside vblank */
1376	tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1377	WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1378
1379	/* Return current update_pending status: */
1380	return RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING;
1381}
1382
1383/* get temperature in millidegrees */
1384int evergreen_get_temp(struct radeon_device *rdev)
1385{
1386	u32 temp, toffset;
1387	int actual_temp = 0;
1388
1389	if (rdev->family == CHIP_JUNIPER) {
1390		toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1391			TOFFSET_SHIFT;
1392		temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1393			TS0_ADC_DOUT_SHIFT;
1394
1395		if (toffset & 0x100)
1396			actual_temp = temp / 2 - (0x200 - toffset);
1397		else
1398			actual_temp = temp / 2 + toffset;
1399
1400		actual_temp = actual_temp * 1000;
1401
1402	} else {
1403		temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1404			ASIC_T_SHIFT;
1405
1406		if (temp & 0x400)
1407			actual_temp = -256;
1408		else if (temp & 0x200)
1409			actual_temp = 255;
1410		else if (temp & 0x100) {
1411			actual_temp = temp & 0x1ff;
1412			actual_temp |= ~0x1ff;
1413		} else
1414			actual_temp = temp & 0xff;
1415
1416		actual_temp = (actual_temp * 1000) / 2;
1417	}
1418
1419	return actual_temp;
1420}
1421
1422int sumo_get_temp(struct radeon_device *rdev)
1423{
1424	u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1425	int actual_temp = temp - 49;
1426
1427	return actual_temp * 1000;
1428}
1429
1430/**
1431 * sumo_pm_init_profile - Initialize power profiles callback.
1432 *
1433 * @rdev: radeon_device pointer
1434 *
1435 * Initialize the power states used in profile mode
1436 * (sumo, trinity, SI).
1437 * Used for profile mode only.
1438 */
1439void sumo_pm_init_profile(struct radeon_device *rdev)
1440{
1441	int idx;
1442
1443	/* default */
1444	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1445	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1446	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1447	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1448
1449	/* low,mid sh/mh */
1450	if (rdev->flags & RADEON_IS_MOBILITY)
1451		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1452	else
1453		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1454
1455	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1456	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1457	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1458	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1459
1460	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1461	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1462	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1463	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1464
1465	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1466	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1467	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1468	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1469
1470	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1471	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1472	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1473	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1474
1475	/* high sh/mh */
1476	idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1477	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1478	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1479	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1480	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1481		rdev->pm.power_state[idx].num_clock_modes - 1;
1482
1483	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1484	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1485	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1486	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1487		rdev->pm.power_state[idx].num_clock_modes - 1;
1488}
1489
1490/**
1491 * btc_pm_init_profile - Initialize power profiles callback.
1492 *
1493 * @rdev: radeon_device pointer
1494 *
1495 * Initialize the power states used in profile mode
1496 * (BTC, cayman).
1497 * Used for profile mode only.
1498 */
1499void btc_pm_init_profile(struct radeon_device *rdev)
1500{
1501	int idx;
1502
1503	/* default */
1504	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1505	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1506	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1507	rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1508	/* starting with BTC, there is one state that is used for both
1509	 * MH and SH.  Difference is that we always use the high clock index for
1510	 * mclk.
1511	 */
1512	if (rdev->flags & RADEON_IS_MOBILITY)
1513		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1514	else
1515		idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1516	/* low sh */
1517	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1518	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1519	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1520	rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1521	/* mid sh */
1522	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1523	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1524	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1525	rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1526	/* high sh */
1527	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1528	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1529	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1530	rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1531	/* low mh */
1532	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1533	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1534	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1535	rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1536	/* mid mh */
1537	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1538	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1539	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1540	rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1541	/* high mh */
1542	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1543	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1544	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1545	rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1546}
1547
1548/**
1549 * evergreen_pm_misc - set additional pm hw parameters callback.
1550 *
1551 * @rdev: radeon_device pointer
1552 *
1553 * Set non-clock parameters associated with a power state
1554 * (voltage, etc.) (evergreen+).
1555 */
1556void evergreen_pm_misc(struct radeon_device *rdev)
1557{
1558	int req_ps_idx = rdev->pm.requested_power_state_index;
1559	int req_cm_idx = rdev->pm.requested_clock_mode_index;
1560	struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1561	struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1562
1563	if (voltage->type == VOLTAGE_SW) {
1564		/* 0xff0x are flags rather then an actual voltage */
1565		if ((voltage->voltage & 0xff00) == 0xff00)
1566			return;
1567		if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1568			radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1569			rdev->pm.current_vddc = voltage->voltage;
1570			DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1571		}
1572
1573		/* starting with BTC, there is one state that is used for both
1574		 * MH and SH.  Difference is that we always use the high clock index for
1575		 * mclk and vddci.
1576		 */
1577		if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1578		    (rdev->family >= CHIP_BARTS) &&
1579		    rdev->pm.active_crtc_count &&
1580		    ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1581		     (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1582			voltage = &rdev->pm.power_state[req_ps_idx].
1583				clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1584
1585		/* 0xff0x are flags rather then an actual voltage */
1586		if ((voltage->vddci & 0xff00) == 0xff00)
1587			return;
1588		if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1589			radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1590			rdev->pm.current_vddci = voltage->vddci;
1591			DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1592		}
1593	}
1594}
1595
1596/**
1597 * evergreen_pm_prepare - pre-power state change callback.
1598 *
1599 * @rdev: radeon_device pointer
1600 *
1601 * Prepare for a power state change (evergreen+).
1602 */
1603void evergreen_pm_prepare(struct radeon_device *rdev)
1604{
1605	struct drm_device *ddev = rdev->ddev;
1606	struct drm_crtc *crtc;
1607	struct radeon_crtc *radeon_crtc;
1608	u32 tmp;
1609
1610	/* disable any active CRTCs */
1611	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1612		radeon_crtc = to_radeon_crtc(crtc);
1613		if (radeon_crtc->enabled) {
1614			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1615			tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1616			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1617		}
1618	}
1619}
1620
1621/**
1622 * evergreen_pm_finish - post-power state change callback.
1623 *
1624 * @rdev: radeon_device pointer
1625 *
1626 * Clean up after a power state change (evergreen+).
1627 */
1628void evergreen_pm_finish(struct radeon_device *rdev)
1629{
1630	struct drm_device *ddev = rdev->ddev;
1631	struct drm_crtc *crtc;
1632	struct radeon_crtc *radeon_crtc;
1633	u32 tmp;
1634
1635	/* enable any active CRTCs */
1636	list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1637		radeon_crtc = to_radeon_crtc(crtc);
1638		if (radeon_crtc->enabled) {
1639			tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1640			tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1641			WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1642		}
1643	}
1644}
1645
1646/**
1647 * evergreen_hpd_sense - hpd sense callback.
1648 *
1649 * @rdev: radeon_device pointer
1650 * @hpd: hpd (hotplug detect) pin
1651 *
1652 * Checks if a digital monitor is connected (evergreen+).
1653 * Returns true if connected, false if not connected.
1654 */
1655bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1656{
1657	bool connected = false;
1658
1659	switch (hpd) {
1660	case RADEON_HPD_1:
1661		if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1662			connected = true;
1663		break;
1664	case RADEON_HPD_2:
1665		if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1666			connected = true;
1667		break;
1668	case RADEON_HPD_3:
1669		if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1670			connected = true;
1671		break;
1672	case RADEON_HPD_4:
1673		if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1674			connected = true;
1675		break;
1676	case RADEON_HPD_5:
1677		if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1678			connected = true;
1679		break;
1680	case RADEON_HPD_6:
1681		if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1682			connected = true;
1683		break;
1684	default:
1685		break;
1686	}
1687
1688	return connected;
1689}
1690
1691/**
1692 * evergreen_hpd_set_polarity - hpd set polarity callback.
1693 *
1694 * @rdev: radeon_device pointer
1695 * @hpd: hpd (hotplug detect) pin
1696 *
1697 * Set the polarity of the hpd pin (evergreen+).
1698 */
1699void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1700				enum radeon_hpd_id hpd)
1701{
1702	u32 tmp;
1703	bool connected = evergreen_hpd_sense(rdev, hpd);
1704
1705	switch (hpd) {
1706	case RADEON_HPD_1:
1707		tmp = RREG32(DC_HPD1_INT_CONTROL);
1708		if (connected)
1709			tmp &= ~DC_HPDx_INT_POLARITY;
1710		else
1711			tmp |= DC_HPDx_INT_POLARITY;
1712		WREG32(DC_HPD1_INT_CONTROL, tmp);
1713		break;
1714	case RADEON_HPD_2:
1715		tmp = RREG32(DC_HPD2_INT_CONTROL);
1716		if (connected)
1717			tmp &= ~DC_HPDx_INT_POLARITY;
1718		else
1719			tmp |= DC_HPDx_INT_POLARITY;
1720		WREG32(DC_HPD2_INT_CONTROL, tmp);
1721		break;
1722	case RADEON_HPD_3:
1723		tmp = RREG32(DC_HPD3_INT_CONTROL);
1724		if (connected)
1725			tmp &= ~DC_HPDx_INT_POLARITY;
1726		else
1727			tmp |= DC_HPDx_INT_POLARITY;
1728		WREG32(DC_HPD3_INT_CONTROL, tmp);
1729		break;
1730	case RADEON_HPD_4:
1731		tmp = RREG32(DC_HPD4_INT_CONTROL);
1732		if (connected)
1733			tmp &= ~DC_HPDx_INT_POLARITY;
1734		else
1735			tmp |= DC_HPDx_INT_POLARITY;
1736		WREG32(DC_HPD4_INT_CONTROL, tmp);
1737		break;
1738	case RADEON_HPD_5:
1739		tmp = RREG32(DC_HPD5_INT_CONTROL);
1740		if (connected)
1741			tmp &= ~DC_HPDx_INT_POLARITY;
1742		else
1743			tmp |= DC_HPDx_INT_POLARITY;
1744		WREG32(DC_HPD5_INT_CONTROL, tmp);
1745			break;
1746	case RADEON_HPD_6:
1747		tmp = RREG32(DC_HPD6_INT_CONTROL);
1748		if (connected)
1749			tmp &= ~DC_HPDx_INT_POLARITY;
1750		else
1751			tmp |= DC_HPDx_INT_POLARITY;
1752		WREG32(DC_HPD6_INT_CONTROL, tmp);
1753		break;
1754	default:
1755		break;
1756	}
1757}
1758
1759/**
1760 * evergreen_hpd_init - hpd setup callback.
1761 *
1762 * @rdev: radeon_device pointer
1763 *
1764 * Setup the hpd pins used by the card (evergreen+).
1765 * Enable the pin, set the polarity, and enable the hpd interrupts.
1766 */
1767void evergreen_hpd_init(struct radeon_device *rdev)
1768{
1769	struct drm_device *dev = rdev->ddev;
1770	struct drm_connector *connector;
1771	unsigned enabled = 0;
1772	u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1773		DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1774
1775	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1776		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1777
1778		if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1779		    connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1780			/* don't try to enable hpd on eDP or LVDS avoid breaking the
1781			 * aux dp channel on imac and help (but not completely fix)
1782			 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1783			 * also avoid interrupt storms during dpms.
1784			 */
1785			continue;
1786		}
1787		switch (radeon_connector->hpd.hpd) {
1788		case RADEON_HPD_1:
1789			WREG32(DC_HPD1_CONTROL, tmp);
 
1790			break;
1791		case RADEON_HPD_2:
1792			WREG32(DC_HPD2_CONTROL, tmp);
 
1793			break;
1794		case RADEON_HPD_3:
1795			WREG32(DC_HPD3_CONTROL, tmp);
 
1796			break;
1797		case RADEON_HPD_4:
1798			WREG32(DC_HPD4_CONTROL, tmp);
 
1799			break;
1800		case RADEON_HPD_5:
1801			WREG32(DC_HPD5_CONTROL, tmp);
 
1802			break;
1803		case RADEON_HPD_6:
1804			WREG32(DC_HPD6_CONTROL, tmp);
 
1805			break;
1806		default:
1807			break;
1808		}
1809		radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1810		enabled |= 1 << radeon_connector->hpd.hpd;
1811	}
1812	radeon_irq_kms_enable_hpd(rdev, enabled);
 
1813}
1814
1815/**
1816 * evergreen_hpd_fini - hpd tear down callback.
1817 *
1818 * @rdev: radeon_device pointer
1819 *
1820 * Tear down the hpd pins used by the card (evergreen+).
1821 * Disable the hpd interrupts.
1822 */
1823void evergreen_hpd_fini(struct radeon_device *rdev)
1824{
1825	struct drm_device *dev = rdev->ddev;
1826	struct drm_connector *connector;
1827	unsigned disabled = 0;
1828
1829	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1830		struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1831		switch (radeon_connector->hpd.hpd) {
1832		case RADEON_HPD_1:
1833			WREG32(DC_HPD1_CONTROL, 0);
 
1834			break;
1835		case RADEON_HPD_2:
1836			WREG32(DC_HPD2_CONTROL, 0);
 
1837			break;
1838		case RADEON_HPD_3:
1839			WREG32(DC_HPD3_CONTROL, 0);
 
1840			break;
1841		case RADEON_HPD_4:
1842			WREG32(DC_HPD4_CONTROL, 0);
 
1843			break;
1844		case RADEON_HPD_5:
1845			WREG32(DC_HPD5_CONTROL, 0);
 
1846			break;
1847		case RADEON_HPD_6:
1848			WREG32(DC_HPD6_CONTROL, 0);
 
1849			break;
1850		default:
1851			break;
1852		}
1853		disabled |= 1 << radeon_connector->hpd.hpd;
1854	}
1855	radeon_irq_kms_disable_hpd(rdev, disabled);
1856}
1857
1858/* watermark setup */
1859
1860static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1861					struct radeon_crtc *radeon_crtc,
1862					struct drm_display_mode *mode,
1863					struct drm_display_mode *other_mode)
1864{
1865	u32 tmp, buffer_alloc, i;
1866	u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1867	/*
1868	 * Line Buffer Setup
1869	 * There are 3 line buffers, each one shared by 2 display controllers.
1870	 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1871	 * the display controllers.  The paritioning is done via one of four
1872	 * preset allocations specified in bits 2:0:
1873	 * first display controller
1874	 *  0 - first half of lb (3840 * 2)
1875	 *  1 - first 3/4 of lb (5760 * 2)
1876	 *  2 - whole lb (7680 * 2), other crtc must be disabled
1877	 *  3 - first 1/4 of lb (1920 * 2)
1878	 * second display controller
1879	 *  4 - second half of lb (3840 * 2)
1880	 *  5 - second 3/4 of lb (5760 * 2)
1881	 *  6 - whole lb (7680 * 2), other crtc must be disabled
1882	 *  7 - last 1/4 of lb (1920 * 2)
1883	 */
1884	/* this can get tricky if we have two large displays on a paired group
1885	 * of crtcs.  Ideally for multiple large displays we'd assign them to
1886	 * non-linked crtcs for maximum line buffer allocation.
1887	 */
1888	if (radeon_crtc->base.enabled && mode) {
1889		if (other_mode) {
1890			tmp = 0; /* 1/2 */
1891			buffer_alloc = 1;
1892		} else {
1893			tmp = 2; /* whole */
1894			buffer_alloc = 2;
1895		}
1896	} else {
1897		tmp = 0;
1898		buffer_alloc = 0;
1899	}
1900
1901	/* second controller of the pair uses second half of the lb */
1902	if (radeon_crtc->crtc_id % 2)
1903		tmp += 4;
1904	WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1905
1906	if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1907		WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1908		       DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1909		for (i = 0; i < rdev->usec_timeout; i++) {
1910			if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1911			    DMIF_BUFFERS_ALLOCATED_COMPLETED)
1912				break;
1913			udelay(1);
1914		}
1915	}
1916
1917	if (radeon_crtc->base.enabled && mode) {
1918		switch (tmp) {
1919		case 0:
1920		case 4:
1921		default:
1922			if (ASIC_IS_DCE5(rdev))
1923				return 4096 * 2;
1924			else
1925				return 3840 * 2;
1926		case 1:
1927		case 5:
1928			if (ASIC_IS_DCE5(rdev))
1929				return 6144 * 2;
1930			else
1931				return 5760 * 2;
1932		case 2:
1933		case 6:
1934			if (ASIC_IS_DCE5(rdev))
1935				return 8192 * 2;
1936			else
1937				return 7680 * 2;
1938		case 3:
1939		case 7:
1940			if (ASIC_IS_DCE5(rdev))
1941				return 2048 * 2;
1942			else
1943				return 1920 * 2;
1944		}
1945	}
1946
1947	/* controller not enabled, so no lb used */
1948	return 0;
1949}
1950
1951u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1952{
1953	u32 tmp = RREG32(MC_SHARED_CHMAP);
1954
1955	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1956	case 0:
1957	default:
1958		return 1;
1959	case 1:
1960		return 2;
1961	case 2:
1962		return 4;
1963	case 3:
1964		return 8;
1965	}
1966}
1967
1968struct evergreen_wm_params {
1969	u32 dram_channels; /* number of dram channels */
1970	u32 yclk;          /* bandwidth per dram data pin in kHz */
1971	u32 sclk;          /* engine clock in kHz */
1972	u32 disp_clk;      /* display clock in kHz */
1973	u32 src_width;     /* viewport width */
1974	u32 active_time;   /* active display time in ns */
1975	u32 blank_time;    /* blank time in ns */
1976	bool interlaced;    /* mode is interlaced */
1977	fixed20_12 vsc;    /* vertical scale ratio */
1978	u32 num_heads;     /* number of active crtcs */
1979	u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1980	u32 lb_size;       /* line buffer allocated to pipe */
1981	u32 vtaps;         /* vertical scaler taps */
1982};
1983
1984static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1985{
1986	/* Calculate DRAM Bandwidth and the part allocated to display. */
1987	fixed20_12 dram_efficiency; /* 0.7 */
1988	fixed20_12 yclk, dram_channels, bandwidth;
1989	fixed20_12 a;
1990
1991	a.full = dfixed_const(1000);
1992	yclk.full = dfixed_const(wm->yclk);
1993	yclk.full = dfixed_div(yclk, a);
1994	dram_channels.full = dfixed_const(wm->dram_channels * 4);
1995	a.full = dfixed_const(10);
1996	dram_efficiency.full = dfixed_const(7);
1997	dram_efficiency.full = dfixed_div(dram_efficiency, a);
1998	bandwidth.full = dfixed_mul(dram_channels, yclk);
1999	bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2000
2001	return dfixed_trunc(bandwidth);
2002}
2003
2004static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2005{
2006	/* Calculate DRAM Bandwidth and the part allocated to display. */
2007	fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2008	fixed20_12 yclk, dram_channels, bandwidth;
2009	fixed20_12 a;
2010
2011	a.full = dfixed_const(1000);
2012	yclk.full = dfixed_const(wm->yclk);
2013	yclk.full = dfixed_div(yclk, a);
2014	dram_channels.full = dfixed_const(wm->dram_channels * 4);
2015	a.full = dfixed_const(10);
2016	disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2017	disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2018	bandwidth.full = dfixed_mul(dram_channels, yclk);
2019	bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2020
2021	return dfixed_trunc(bandwidth);
2022}
2023
2024static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2025{
2026	/* Calculate the display Data return Bandwidth */
2027	fixed20_12 return_efficiency; /* 0.8 */
2028	fixed20_12 sclk, bandwidth;
2029	fixed20_12 a;
2030
2031	a.full = dfixed_const(1000);
2032	sclk.full = dfixed_const(wm->sclk);
2033	sclk.full = dfixed_div(sclk, a);
2034	a.full = dfixed_const(10);
2035	return_efficiency.full = dfixed_const(8);
2036	return_efficiency.full = dfixed_div(return_efficiency, a);
2037	a.full = dfixed_const(32);
2038	bandwidth.full = dfixed_mul(a, sclk);
2039	bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2040
2041	return dfixed_trunc(bandwidth);
2042}
2043
2044static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2045{
2046	/* Calculate the DMIF Request Bandwidth */
2047	fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2048	fixed20_12 disp_clk, bandwidth;
2049	fixed20_12 a;
2050
2051	a.full = dfixed_const(1000);
2052	disp_clk.full = dfixed_const(wm->disp_clk);
2053	disp_clk.full = dfixed_div(disp_clk, a);
2054	a.full = dfixed_const(10);
2055	disp_clk_request_efficiency.full = dfixed_const(8);
2056	disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2057	a.full = dfixed_const(32);
2058	bandwidth.full = dfixed_mul(a, disp_clk);
2059	bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2060
2061	return dfixed_trunc(bandwidth);
2062}
2063
2064static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2065{
2066	/* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2067	u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2068	u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2069	u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2070
2071	return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2072}
2073
2074static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2075{
2076	/* Calculate the display mode Average Bandwidth
2077	 * DisplayMode should contain the source and destination dimensions,
2078	 * timing, etc.
2079	 */
2080	fixed20_12 bpp;
2081	fixed20_12 line_time;
2082	fixed20_12 src_width;
2083	fixed20_12 bandwidth;
2084	fixed20_12 a;
2085
2086	a.full = dfixed_const(1000);
2087	line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2088	line_time.full = dfixed_div(line_time, a);
2089	bpp.full = dfixed_const(wm->bytes_per_pixel);
2090	src_width.full = dfixed_const(wm->src_width);
2091	bandwidth.full = dfixed_mul(src_width, bpp);
2092	bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2093	bandwidth.full = dfixed_div(bandwidth, line_time);
2094
2095	return dfixed_trunc(bandwidth);
2096}
2097
2098static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2099{
2100	/* First calcualte the latency in ns */
2101	u32 mc_latency = 2000; /* 2000 ns. */
2102	u32 available_bandwidth = evergreen_available_bandwidth(wm);
2103	u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2104	u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2105	u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2106	u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2107		(wm->num_heads * cursor_line_pair_return_time);
2108	u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2109	u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2110	fixed20_12 a, b, c;
2111
2112	if (wm->num_heads == 0)
2113		return 0;
2114
2115	a.full = dfixed_const(2);
2116	b.full = dfixed_const(1);
2117	if ((wm->vsc.full > a.full) ||
2118	    ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2119	    (wm->vtaps >= 5) ||
2120	    ((wm->vsc.full >= a.full) && wm->interlaced))
2121		max_src_lines_per_dst_line = 4;
2122	else
2123		max_src_lines_per_dst_line = 2;
2124
2125	a.full = dfixed_const(available_bandwidth);
2126	b.full = dfixed_const(wm->num_heads);
2127	a.full = dfixed_div(a, b);
2128
2129	b.full = dfixed_const(1000);
2130	c.full = dfixed_const(wm->disp_clk);
2131	b.full = dfixed_div(c, b);
2132	c.full = dfixed_const(wm->bytes_per_pixel);
2133	b.full = dfixed_mul(b, c);
2134
2135	lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2136
2137	a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2138	b.full = dfixed_const(1000);
2139	c.full = dfixed_const(lb_fill_bw);
2140	b.full = dfixed_div(c, b);
2141	a.full = dfixed_div(a, b);
2142	line_fill_time = dfixed_trunc(a);
2143
2144	if (line_fill_time < wm->active_time)
2145		return latency;
2146	else
2147		return latency + (line_fill_time - wm->active_time);
2148
2149}
2150
2151static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2152{
2153	if (evergreen_average_bandwidth(wm) <=
2154	    (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2155		return true;
2156	else
2157		return false;
2158};
2159
2160static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2161{
2162	if (evergreen_average_bandwidth(wm) <=
2163	    (evergreen_available_bandwidth(wm) / wm->num_heads))
2164		return true;
2165	else
2166		return false;
2167};
2168
2169static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2170{
2171	u32 lb_partitions = wm->lb_size / wm->src_width;
2172	u32 line_time = wm->active_time + wm->blank_time;
2173	u32 latency_tolerant_lines;
2174	u32 latency_hiding;
2175	fixed20_12 a;
2176
2177	a.full = dfixed_const(1);
2178	if (wm->vsc.full > a.full)
2179		latency_tolerant_lines = 1;
2180	else {
2181		if (lb_partitions <= (wm->vtaps + 1))
2182			latency_tolerant_lines = 1;
2183		else
2184			latency_tolerant_lines = 2;
2185	}
2186
2187	latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2188
2189	if (evergreen_latency_watermark(wm) <= latency_hiding)
2190		return true;
2191	else
2192		return false;
2193}
2194
2195static void evergreen_program_watermarks(struct radeon_device *rdev,
2196					 struct radeon_crtc *radeon_crtc,
2197					 u32 lb_size, u32 num_heads)
2198{
2199	struct drm_display_mode *mode = &radeon_crtc->base.mode;
2200	struct evergreen_wm_params wm_low, wm_high;
2201	u32 dram_channels;
2202	u32 pixel_period;
2203	u32 line_time = 0;
2204	u32 latency_watermark_a = 0, latency_watermark_b = 0;
2205	u32 priority_a_mark = 0, priority_b_mark = 0;
2206	u32 priority_a_cnt = PRIORITY_OFF;
2207	u32 priority_b_cnt = PRIORITY_OFF;
2208	u32 pipe_offset = radeon_crtc->crtc_id * 16;
2209	u32 tmp, arb_control3;
2210	fixed20_12 a, b, c;
2211
2212	if (radeon_crtc->base.enabled && num_heads && mode) {
2213		pixel_period = 1000000 / (u32)mode->clock;
2214		line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2215		priority_a_cnt = 0;
2216		priority_b_cnt = 0;
2217		dram_channels = evergreen_get_number_of_dram_channels(rdev);
2218
2219		/* watermark for high clocks */
2220		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2221			wm_high.yclk =
2222				radeon_dpm_get_mclk(rdev, false) * 10;
2223			wm_high.sclk =
2224				radeon_dpm_get_sclk(rdev, false) * 10;
2225		} else {
2226			wm_high.yclk = rdev->pm.current_mclk * 10;
2227			wm_high.sclk = rdev->pm.current_sclk * 10;
2228		}
2229
2230		wm_high.disp_clk = mode->clock;
2231		wm_high.src_width = mode->crtc_hdisplay;
2232		wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2233		wm_high.blank_time = line_time - wm_high.active_time;
2234		wm_high.interlaced = false;
2235		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2236			wm_high.interlaced = true;
2237		wm_high.vsc = radeon_crtc->vsc;
2238		wm_high.vtaps = 1;
2239		if (radeon_crtc->rmx_type != RMX_OFF)
2240			wm_high.vtaps = 2;
2241		wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2242		wm_high.lb_size = lb_size;
2243		wm_high.dram_channels = dram_channels;
2244		wm_high.num_heads = num_heads;
2245
2246		/* watermark for low clocks */
2247		if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2248			wm_low.yclk =
2249				radeon_dpm_get_mclk(rdev, true) * 10;
2250			wm_low.sclk =
2251				radeon_dpm_get_sclk(rdev, true) * 10;
2252		} else {
2253			wm_low.yclk = rdev->pm.current_mclk * 10;
2254			wm_low.sclk = rdev->pm.current_sclk * 10;
2255		}
2256
2257		wm_low.disp_clk = mode->clock;
2258		wm_low.src_width = mode->crtc_hdisplay;
2259		wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2260		wm_low.blank_time = line_time - wm_low.active_time;
2261		wm_low.interlaced = false;
2262		if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2263			wm_low.interlaced = true;
2264		wm_low.vsc = radeon_crtc->vsc;
2265		wm_low.vtaps = 1;
2266		if (radeon_crtc->rmx_type != RMX_OFF)
2267			wm_low.vtaps = 2;
2268		wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2269		wm_low.lb_size = lb_size;
2270		wm_low.dram_channels = dram_channels;
2271		wm_low.num_heads = num_heads;
2272
2273		/* set for high clocks */
2274		latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2275		/* set for low clocks */
2276		latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
 
2277
2278		/* possibly force display priority to high */
2279		/* should really do this at mode validation time... */
2280		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2281		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2282		    !evergreen_check_latency_hiding(&wm_high) ||
2283		    (rdev->disp_priority == 2)) {
2284			DRM_DEBUG_KMS("force priority a to high\n");
2285			priority_a_cnt |= PRIORITY_ALWAYS_ON;
2286		}
2287		if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2288		    !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2289		    !evergreen_check_latency_hiding(&wm_low) ||
2290		    (rdev->disp_priority == 2)) {
2291			DRM_DEBUG_KMS("force priority b to high\n");
2292			priority_b_cnt |= PRIORITY_ALWAYS_ON;
2293		}
2294
2295		a.full = dfixed_const(1000);
2296		b.full = dfixed_const(mode->clock);
2297		b.full = dfixed_div(b, a);
2298		c.full = dfixed_const(latency_watermark_a);
2299		c.full = dfixed_mul(c, b);
2300		c.full = dfixed_mul(c, radeon_crtc->hsc);
2301		c.full = dfixed_div(c, a);
2302		a.full = dfixed_const(16);
2303		c.full = dfixed_div(c, a);
2304		priority_a_mark = dfixed_trunc(c);
2305		priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2306
2307		a.full = dfixed_const(1000);
2308		b.full = dfixed_const(mode->clock);
2309		b.full = dfixed_div(b, a);
2310		c.full = dfixed_const(latency_watermark_b);
2311		c.full = dfixed_mul(c, b);
2312		c.full = dfixed_mul(c, radeon_crtc->hsc);
2313		c.full = dfixed_div(c, a);
2314		a.full = dfixed_const(16);
2315		c.full = dfixed_div(c, a);
2316		priority_b_mark = dfixed_trunc(c);
2317		priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2318	}
2319
2320	/* select wm A */
2321	arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2322	tmp = arb_control3;
2323	tmp &= ~LATENCY_WATERMARK_MASK(3);
2324	tmp |= LATENCY_WATERMARK_MASK(1);
2325	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2326	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2327	       (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2328		LATENCY_HIGH_WATERMARK(line_time)));
2329	/* select wm B */
2330	tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2331	tmp &= ~LATENCY_WATERMARK_MASK(3);
2332	tmp |= LATENCY_WATERMARK_MASK(2);
2333	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2334	WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2335	       (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2336		LATENCY_HIGH_WATERMARK(line_time)));
2337	/* restore original selection */
2338	WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2339
2340	/* write the priority marks */
2341	WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2342	WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2343
2344	/* save values for DPM */
2345	radeon_crtc->line_time = line_time;
2346	radeon_crtc->wm_high = latency_watermark_a;
2347	radeon_crtc->wm_low = latency_watermark_b;
2348}
2349
2350/**
2351 * evergreen_bandwidth_update - update display watermarks callback.
2352 *
2353 * @rdev: radeon_device pointer
2354 *
2355 * Update the display watermarks based on the requested mode(s)
2356 * (evergreen+).
2357 */
2358void evergreen_bandwidth_update(struct radeon_device *rdev)
2359{
2360	struct drm_display_mode *mode0 = NULL;
2361	struct drm_display_mode *mode1 = NULL;
2362	u32 num_heads = 0, lb_size;
2363	int i;
2364
2365	radeon_update_display_priority(rdev);
2366
2367	for (i = 0; i < rdev->num_crtc; i++) {
2368		if (rdev->mode_info.crtcs[i]->base.enabled)
2369			num_heads++;
2370	}
2371	for (i = 0; i < rdev->num_crtc; i += 2) {
2372		mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2373		mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2374		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2375		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2376		lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2377		evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2378	}
2379}
2380
2381/**
2382 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2383 *
2384 * @rdev: radeon_device pointer
2385 *
2386 * Wait for the MC (memory controller) to be idle.
2387 * (evergreen+).
2388 * Returns 0 if the MC is idle, -1 if not.
2389 */
2390int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2391{
2392	unsigned i;
2393	u32 tmp;
2394
2395	for (i = 0; i < rdev->usec_timeout; i++) {
2396		/* read MC_STATUS */
2397		tmp = RREG32(SRBM_STATUS) & 0x1F00;
2398		if (!tmp)
2399			return 0;
2400		udelay(1);
2401	}
2402	return -1;
2403}
2404
2405/*
2406 * GART
2407 */
2408void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2409{
2410	unsigned i;
2411	u32 tmp;
2412
2413	WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2414
2415	WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2416	for (i = 0; i < rdev->usec_timeout; i++) {
2417		/* read MC_STATUS */
2418		tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2419		tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2420		if (tmp == 2) {
2421			printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2422			return;
2423		}
2424		if (tmp) {
2425			return;
2426		}
2427		udelay(1);
2428	}
2429}
2430
2431static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2432{
2433	u32 tmp;
2434	int r;
2435
2436	if (rdev->gart.robj == NULL) {
2437		dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2438		return -EINVAL;
2439	}
2440	r = radeon_gart_table_vram_pin(rdev);
2441	if (r)
2442		return r;
2443	radeon_gart_restore(rdev);
2444	/* Setup L2 cache */
2445	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2446				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2447				EFFECTIVE_L2_QUEUE_SIZE(7));
2448	WREG32(VM_L2_CNTL2, 0);
2449	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2450	/* Setup TLB control */
2451	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2452		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2453		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2454		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2455	if (rdev->flags & RADEON_IS_IGP) {
2456		WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2457		WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2458		WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2459	} else {
2460		WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2461		WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2462		WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2463		if ((rdev->family == CHIP_JUNIPER) ||
2464		    (rdev->family == CHIP_CYPRESS) ||
2465		    (rdev->family == CHIP_HEMLOCK) ||
2466		    (rdev->family == CHIP_BARTS))
2467			WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2468	}
2469	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2470	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2471	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2472	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2473	WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2474	WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2475	WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2476	WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2477				RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2478	WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2479			(u32)(rdev->dummy_page.addr >> 12));
2480	WREG32(VM_CONTEXT1_CNTL, 0);
2481
2482	evergreen_pcie_gart_tlb_flush(rdev);
2483	DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2484		 (unsigned)(rdev->mc.gtt_size >> 20),
2485		 (unsigned long long)rdev->gart.table_addr);
2486	rdev->gart.ready = true;
2487	return 0;
2488}
2489
2490static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2491{
2492	u32 tmp;
 
2493
2494	/* Disable all tables */
2495	WREG32(VM_CONTEXT0_CNTL, 0);
2496	WREG32(VM_CONTEXT1_CNTL, 0);
2497
2498	/* Setup L2 cache */
2499	WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2500				EFFECTIVE_L2_QUEUE_SIZE(7));
2501	WREG32(VM_L2_CNTL2, 0);
2502	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2503	/* Setup TLB control */
2504	tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2505	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2506	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2507	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2508	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2509	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2510	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2511	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2512	radeon_gart_table_vram_unpin(rdev);
 
 
 
 
 
 
 
2513}
2514
2515static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2516{
2517	evergreen_pcie_gart_disable(rdev);
2518	radeon_gart_table_vram_free(rdev);
2519	radeon_gart_fini(rdev);
2520}
2521
2522
2523static void evergreen_agp_enable(struct radeon_device *rdev)
2524{
2525	u32 tmp;
2526
2527	/* Setup L2 cache */
2528	WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2529				ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2530				EFFECTIVE_L2_QUEUE_SIZE(7));
2531	WREG32(VM_L2_CNTL2, 0);
2532	WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2533	/* Setup TLB control */
2534	tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2535		SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2536		SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2537		EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2538	WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2539	WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2540	WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2541	WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2542	WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2543	WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2544	WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2545	WREG32(VM_CONTEXT0_CNTL, 0);
2546	WREG32(VM_CONTEXT1_CNTL, 0);
2547}
2548
2549void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2550{
2551	u32 crtc_enabled, tmp, frame_count, blackout;
2552	int i, j;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2553
2554	if (!ASIC_IS_NODCE(rdev)) {
2555		save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2556		save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2557
2558		/* disable VGA render */
2559		WREG32(VGA_RENDER_CONTROL, 0);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2560	}
2561	/* blank the display controllers */
2562	for (i = 0; i < rdev->num_crtc; i++) {
2563		crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2564		if (crtc_enabled) {
2565			save->crtc_enabled[i] = true;
2566			if (ASIC_IS_DCE6(rdev)) {
2567				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2568				if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2569					radeon_wait_for_vblank(rdev, i);
2570					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2571					tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2572					WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2573				}
2574			} else {
2575				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2576				if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2577					radeon_wait_for_vblank(rdev, i);
2578					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2579					tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2580					WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2581					WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2582				}
2583			}
2584			/* wait for the next frame */
2585			frame_count = radeon_get_vblank_counter(rdev, i);
2586			for (j = 0; j < rdev->usec_timeout; j++) {
2587				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2588					break;
2589				udelay(1);
2590			}
2591
2592			/* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2593			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2594			tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2595			tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2596			WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2597			WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2598			save->crtc_enabled[i] = false;
2599			/* ***** */
2600		} else {
2601			save->crtc_enabled[i] = false;
2602		}
2603	}
2604
2605	radeon_mc_wait_for_idle(rdev);
2606
2607	blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2608	if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2609		/* Block CPU access */
2610		WREG32(BIF_FB_EN, 0);
2611		/* blackout the MC */
2612		blackout &= ~BLACKOUT_MODE_MASK;
2613		WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2614	}
2615	/* wait for the MC to settle */
2616	udelay(100);
2617
2618	/* lock double buffered regs */
2619	for (i = 0; i < rdev->num_crtc; i++) {
2620		if (save->crtc_enabled[i]) {
2621			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2622			if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2623				tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2624				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2625			}
2626			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2627			if (!(tmp & 1)) {
2628				tmp |= 1;
2629				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2630			}
2631		}
2632	}
2633}
2634
2635void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2636{
2637	u32 tmp, frame_count;
2638	int i, j;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2639
2640	/* update crtc base addresses */
2641	for (i = 0; i < rdev->num_crtc; i++) {
2642		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2643		       upper_32_bits(rdev->mc.vram_start));
2644		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2645		       upper_32_bits(rdev->mc.vram_start));
2646		WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2647		       (u32)rdev->mc.vram_start);
2648		WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2649		       (u32)rdev->mc.vram_start);
2650	}
 
 
 
 
 
 
 
 
 
2651
2652	if (!ASIC_IS_NODCE(rdev)) {
2653		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2654		WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
 
 
 
 
 
2655	}
2656
2657	/* unlock regs and wait for update */
2658	for (i = 0; i < rdev->num_crtc; i++) {
2659		if (save->crtc_enabled[i]) {
2660			tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2661			if ((tmp & 0x3) != 0) {
2662				tmp &= ~0x3;
2663				WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2664			}
2665			tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2666			if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2667				tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2668				WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2669			}
2670			tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2671			if (tmp & 1) {
2672				tmp &= ~1;
2673				WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2674			}
2675			for (j = 0; j < rdev->usec_timeout; j++) {
2676				tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2677				if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2678					break;
2679				udelay(1);
2680			}
2681		}
 
 
 
 
 
 
 
 
 
 
2682	}
2683
2684	/* unblackout the MC */
2685	tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2686	tmp &= ~BLACKOUT_MODE_MASK;
2687	WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2688	/* allow CPU access */
2689	WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2690
2691	for (i = 0; i < rdev->num_crtc; i++) {
2692		if (save->crtc_enabled[i]) {
2693			if (ASIC_IS_DCE6(rdev)) {
2694				tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2695				tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2696				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2697				WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2698				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2699			} else {
2700				tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2701				tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2702				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2703				WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2704				WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2705			}
2706			/* wait for the next frame */
2707			frame_count = radeon_get_vblank_counter(rdev, i);
2708			for (j = 0; j < rdev->usec_timeout; j++) {
2709				if (radeon_get_vblank_counter(rdev, i) != frame_count)
2710					break;
2711				udelay(1);
2712			}
2713		}
2714	}
2715	if (!ASIC_IS_NODCE(rdev)) {
2716		/* Unlock vga access */
2717		WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2718		mdelay(1);
2719		WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2720	}
 
2721}
2722
2723void evergreen_mc_program(struct radeon_device *rdev)
2724{
2725	struct evergreen_mc_save save;
2726	u32 tmp;
2727	int i, j;
2728
2729	/* Initialize HDP */
2730	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2731		WREG32((0x2c14 + j), 0x00000000);
2732		WREG32((0x2c18 + j), 0x00000000);
2733		WREG32((0x2c1c + j), 0x00000000);
2734		WREG32((0x2c20 + j), 0x00000000);
2735		WREG32((0x2c24 + j), 0x00000000);
2736	}
2737	WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2738
2739	evergreen_mc_stop(rdev, &save);
2740	if (evergreen_mc_wait_for_idle(rdev)) {
2741		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2742	}
2743	/* Lockout access through VGA aperture*/
2744	WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2745	/* Update configuration */
2746	if (rdev->flags & RADEON_IS_AGP) {
2747		if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2748			/* VRAM before AGP */
2749			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2750				rdev->mc.vram_start >> 12);
2751			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2752				rdev->mc.gtt_end >> 12);
2753		} else {
2754			/* VRAM after AGP */
2755			WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2756				rdev->mc.gtt_start >> 12);
2757			WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2758				rdev->mc.vram_end >> 12);
2759		}
2760	} else {
2761		WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2762			rdev->mc.vram_start >> 12);
2763		WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2764			rdev->mc.vram_end >> 12);
2765	}
2766	WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2767	/* llano/ontario only */
2768	if ((rdev->family == CHIP_PALM) ||
2769	    (rdev->family == CHIP_SUMO) ||
2770	    (rdev->family == CHIP_SUMO2)) {
2771		tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2772		tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2773		tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2774		WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2775	}
2776	tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2777	tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2778	WREG32(MC_VM_FB_LOCATION, tmp);
2779	WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2780	WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2781	WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2782	if (rdev->flags & RADEON_IS_AGP) {
2783		WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2784		WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2785		WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2786	} else {
2787		WREG32(MC_VM_AGP_BASE, 0);
2788		WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2789		WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2790	}
2791	if (evergreen_mc_wait_for_idle(rdev)) {
2792		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2793	}
2794	evergreen_mc_resume(rdev, &save);
2795	/* we need to own VRAM, so turn off the VGA renderer here
2796	 * to stop it overwriting our objects */
2797	rv515_vga_render_disable(rdev);
2798}
2799
2800/*
2801 * CP.
2802 */
2803void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2804{
2805	struct radeon_ring *ring = &rdev->ring[ib->ring];
2806	u32 next_rptr;
2807
2808	/* set to DX10/11 mode */
2809	radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2810	radeon_ring_write(ring, 1);
2811
2812	if (ring->rptr_save_reg) {
2813		next_rptr = ring->wptr + 3 + 4;
2814		radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2815		radeon_ring_write(ring, ((ring->rptr_save_reg - 
2816					  PACKET3_SET_CONFIG_REG_START) >> 2));
2817		radeon_ring_write(ring, next_rptr);
2818	} else if (rdev->wb.enabled) {
2819		next_rptr = ring->wptr + 5 + 4;
2820		radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2821		radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2822		radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2823		radeon_ring_write(ring, next_rptr);
2824		radeon_ring_write(ring, 0);
2825	}
2826
2827	radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2828	radeon_ring_write(ring,
2829#ifdef __BIG_ENDIAN
2830			  (2 << 0) |
2831#endif
2832			  (ib->gpu_addr & 0xFFFFFFFC));
2833	radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2834	radeon_ring_write(ring, ib->length_dw);
2835}
2836
2837
2838static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2839{
2840	const __be32 *fw_data;
2841	int i;
2842
2843	if (!rdev->me_fw || !rdev->pfp_fw)
2844		return -EINVAL;
2845
2846	r700_cp_stop(rdev);
2847	WREG32(CP_RB_CNTL,
2848#ifdef __BIG_ENDIAN
2849	       BUF_SWAP_32BIT |
2850#endif
2851	       RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2852
2853	fw_data = (const __be32 *)rdev->pfp_fw->data;
2854	WREG32(CP_PFP_UCODE_ADDR, 0);
2855	for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2856		WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2857	WREG32(CP_PFP_UCODE_ADDR, 0);
2858
2859	fw_data = (const __be32 *)rdev->me_fw->data;
2860	WREG32(CP_ME_RAM_WADDR, 0);
2861	for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2862		WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2863
2864	WREG32(CP_PFP_UCODE_ADDR, 0);
2865	WREG32(CP_ME_RAM_WADDR, 0);
2866	WREG32(CP_ME_RAM_RADDR, 0);
2867	return 0;
2868}
2869
2870static int evergreen_cp_start(struct radeon_device *rdev)
2871{
2872	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2873	int r, i;
2874	uint32_t cp_me;
2875
2876	r = radeon_ring_lock(rdev, ring, 7);
2877	if (r) {
2878		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2879		return r;
2880	}
2881	radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2882	radeon_ring_write(ring, 0x1);
2883	radeon_ring_write(ring, 0x0);
2884	radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2885	radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2886	radeon_ring_write(ring, 0);
2887	radeon_ring_write(ring, 0);
2888	radeon_ring_unlock_commit(rdev, ring);
2889
2890	cp_me = 0xff;
2891	WREG32(CP_ME_CNTL, cp_me);
2892
2893	r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2894	if (r) {
2895		DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2896		return r;
2897	}
2898
2899	/* setup clear context state */
2900	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2901	radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2902
2903	for (i = 0; i < evergreen_default_size; i++)
2904		radeon_ring_write(ring, evergreen_default_state[i]);
2905
2906	radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2907	radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2908
2909	/* set clear context state */
2910	radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2911	radeon_ring_write(ring, 0);
2912
2913	/* SQ_VTX_BASE_VTX_LOC */
2914	radeon_ring_write(ring, 0xc0026f00);
2915	radeon_ring_write(ring, 0x00000000);
2916	radeon_ring_write(ring, 0x00000000);
2917	radeon_ring_write(ring, 0x00000000);
2918
2919	/* Clear consts */
2920	radeon_ring_write(ring, 0xc0036f00);
2921	radeon_ring_write(ring, 0x00000bc4);
2922	radeon_ring_write(ring, 0xffffffff);
2923	radeon_ring_write(ring, 0xffffffff);
2924	radeon_ring_write(ring, 0xffffffff);
2925
2926	radeon_ring_write(ring, 0xc0026900);
2927	radeon_ring_write(ring, 0x00000316);
2928	radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2929	radeon_ring_write(ring, 0x00000010); /*  */
2930
2931	radeon_ring_unlock_commit(rdev, ring);
2932
2933	return 0;
2934}
2935
2936static int evergreen_cp_resume(struct radeon_device *rdev)
2937{
2938	struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2939	u32 tmp;
2940	u32 rb_bufsz;
2941	int r;
2942
2943	/* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2944	WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2945				 SOFT_RESET_PA |
2946				 SOFT_RESET_SH |
2947				 SOFT_RESET_VGT |
2948				 SOFT_RESET_SPI |
2949				 SOFT_RESET_SX));
2950	RREG32(GRBM_SOFT_RESET);
2951	mdelay(15);
2952	WREG32(GRBM_SOFT_RESET, 0);
2953	RREG32(GRBM_SOFT_RESET);
2954
2955	/* Set ring buffer size */
2956	rb_bufsz = order_base_2(ring->ring_size / 8);
2957	tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2958#ifdef __BIG_ENDIAN
2959	tmp |= BUF_SWAP_32BIT;
2960#endif
2961	WREG32(CP_RB_CNTL, tmp);
2962	WREG32(CP_SEM_WAIT_TIMER, 0x0);
2963	WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2964
2965	/* Set the write pointer delay */
2966	WREG32(CP_RB_WPTR_DELAY, 0);
2967
2968	/* Initialize the ring buffer's read and write pointers */
2969	WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2970	WREG32(CP_RB_RPTR_WR, 0);
2971	ring->wptr = 0;
2972	WREG32(CP_RB_WPTR, ring->wptr);
2973
2974	/* set the wb address whether it's enabled or not */
2975	WREG32(CP_RB_RPTR_ADDR,
2976	       ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2977	WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2978	WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2979
2980	if (rdev->wb.enabled)
2981		WREG32(SCRATCH_UMSK, 0xff);
2982	else {
2983		tmp |= RB_NO_UPDATE;
2984		WREG32(SCRATCH_UMSK, 0);
2985	}
2986
2987	mdelay(1);
2988	WREG32(CP_RB_CNTL, tmp);
2989
2990	WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2991	WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2992
 
 
2993	evergreen_cp_start(rdev);
2994	ring->ready = true;
2995	r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2996	if (r) {
2997		ring->ready = false;
2998		return r;
2999	}
3000	return 0;
3001}
3002
3003/*
3004 * Core functions
3005 */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3006static void evergreen_gpu_init(struct radeon_device *rdev)
3007{
3008	u32 gb_addr_config;
 
 
3009	u32 mc_shared_chmap, mc_arb_ramcfg;
 
 
3010	u32 sx_debug_1;
3011	u32 smx_dc_ctl0;
3012	u32 sq_config;
3013	u32 sq_lds_resource_mgmt;
3014	u32 sq_gpr_resource_mgmt_1;
3015	u32 sq_gpr_resource_mgmt_2;
3016	u32 sq_gpr_resource_mgmt_3;
3017	u32 sq_thread_resource_mgmt;
3018	u32 sq_thread_resource_mgmt_2;
3019	u32 sq_stack_resource_mgmt_1;
3020	u32 sq_stack_resource_mgmt_2;
3021	u32 sq_stack_resource_mgmt_3;
3022	u32 vgt_cache_invalidation;
3023	u32 hdp_host_path_cntl, tmp;
3024	u32 disabled_rb_mask;
3025	int i, j, num_shader_engines, ps_thread_count;
3026
3027	switch (rdev->family) {
3028	case CHIP_CYPRESS:
3029	case CHIP_HEMLOCK:
3030		rdev->config.evergreen.num_ses = 2;
3031		rdev->config.evergreen.max_pipes = 4;
3032		rdev->config.evergreen.max_tile_pipes = 8;
3033		rdev->config.evergreen.max_simds = 10;
3034		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3035		rdev->config.evergreen.max_gprs = 256;
3036		rdev->config.evergreen.max_threads = 248;
3037		rdev->config.evergreen.max_gs_threads = 32;
3038		rdev->config.evergreen.max_stack_entries = 512;
3039		rdev->config.evergreen.sx_num_of_sets = 4;
3040		rdev->config.evergreen.sx_max_export_size = 256;
3041		rdev->config.evergreen.sx_max_export_pos_size = 64;
3042		rdev->config.evergreen.sx_max_export_smx_size = 192;
3043		rdev->config.evergreen.max_hw_contexts = 8;
3044		rdev->config.evergreen.sq_num_cf_insts = 2;
3045
3046		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3047		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3048		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3049		gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3050		break;
3051	case CHIP_JUNIPER:
3052		rdev->config.evergreen.num_ses = 1;
3053		rdev->config.evergreen.max_pipes = 4;
3054		rdev->config.evergreen.max_tile_pipes = 4;
3055		rdev->config.evergreen.max_simds = 10;
3056		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3057		rdev->config.evergreen.max_gprs = 256;
3058		rdev->config.evergreen.max_threads = 248;
3059		rdev->config.evergreen.max_gs_threads = 32;
3060		rdev->config.evergreen.max_stack_entries = 512;
3061		rdev->config.evergreen.sx_num_of_sets = 4;
3062		rdev->config.evergreen.sx_max_export_size = 256;
3063		rdev->config.evergreen.sx_max_export_pos_size = 64;
3064		rdev->config.evergreen.sx_max_export_smx_size = 192;
3065		rdev->config.evergreen.max_hw_contexts = 8;
3066		rdev->config.evergreen.sq_num_cf_insts = 2;
3067
3068		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3069		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3070		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3071		gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3072		break;
3073	case CHIP_REDWOOD:
3074		rdev->config.evergreen.num_ses = 1;
3075		rdev->config.evergreen.max_pipes = 4;
3076		rdev->config.evergreen.max_tile_pipes = 4;
3077		rdev->config.evergreen.max_simds = 5;
3078		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3079		rdev->config.evergreen.max_gprs = 256;
3080		rdev->config.evergreen.max_threads = 248;
3081		rdev->config.evergreen.max_gs_threads = 32;
3082		rdev->config.evergreen.max_stack_entries = 256;
3083		rdev->config.evergreen.sx_num_of_sets = 4;
3084		rdev->config.evergreen.sx_max_export_size = 256;
3085		rdev->config.evergreen.sx_max_export_pos_size = 64;
3086		rdev->config.evergreen.sx_max_export_smx_size = 192;
3087		rdev->config.evergreen.max_hw_contexts = 8;
3088		rdev->config.evergreen.sq_num_cf_insts = 2;
3089
3090		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3091		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3092		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3093		gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3094		break;
3095	case CHIP_CEDAR:
3096	default:
3097		rdev->config.evergreen.num_ses = 1;
3098		rdev->config.evergreen.max_pipes = 2;
3099		rdev->config.evergreen.max_tile_pipes = 2;
3100		rdev->config.evergreen.max_simds = 2;
3101		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3102		rdev->config.evergreen.max_gprs = 256;
3103		rdev->config.evergreen.max_threads = 192;
3104		rdev->config.evergreen.max_gs_threads = 16;
3105		rdev->config.evergreen.max_stack_entries = 256;
3106		rdev->config.evergreen.sx_num_of_sets = 4;
3107		rdev->config.evergreen.sx_max_export_size = 128;
3108		rdev->config.evergreen.sx_max_export_pos_size = 32;
3109		rdev->config.evergreen.sx_max_export_smx_size = 96;
3110		rdev->config.evergreen.max_hw_contexts = 4;
3111		rdev->config.evergreen.sq_num_cf_insts = 1;
3112
3113		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3114		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3115		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3116		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3117		break;
3118	case CHIP_PALM:
3119		rdev->config.evergreen.num_ses = 1;
3120		rdev->config.evergreen.max_pipes = 2;
3121		rdev->config.evergreen.max_tile_pipes = 2;
3122		rdev->config.evergreen.max_simds = 2;
3123		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3124		rdev->config.evergreen.max_gprs = 256;
3125		rdev->config.evergreen.max_threads = 192;
3126		rdev->config.evergreen.max_gs_threads = 16;
3127		rdev->config.evergreen.max_stack_entries = 256;
3128		rdev->config.evergreen.sx_num_of_sets = 4;
3129		rdev->config.evergreen.sx_max_export_size = 128;
3130		rdev->config.evergreen.sx_max_export_pos_size = 32;
3131		rdev->config.evergreen.sx_max_export_smx_size = 96;
3132		rdev->config.evergreen.max_hw_contexts = 4;
3133		rdev->config.evergreen.sq_num_cf_insts = 1;
3134
3135		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3136		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3137		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3138		gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3139		break;
3140	case CHIP_SUMO:
3141		rdev->config.evergreen.num_ses = 1;
3142		rdev->config.evergreen.max_pipes = 4;
3143		rdev->config.evergreen.max_tile_pipes = 4;
3144		if (rdev->pdev->device == 0x9648)
3145			rdev->config.evergreen.max_simds = 3;
3146		else if ((rdev->pdev->device == 0x9647) ||
3147			 (rdev->pdev->device == 0x964a))
3148			rdev->config.evergreen.max_simds = 4;
3149		else
3150			rdev->config.evergreen.max_simds = 5;
3151		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3152		rdev->config.evergreen.max_gprs = 256;
3153		rdev->config.evergreen.max_threads = 248;
3154		rdev->config.evergreen.max_gs_threads = 32;
3155		rdev->config.evergreen.max_stack_entries = 256;
3156		rdev->config.evergreen.sx_num_of_sets = 4;
3157		rdev->config.evergreen.sx_max_export_size = 256;
3158		rdev->config.evergreen.sx_max_export_pos_size = 64;
3159		rdev->config.evergreen.sx_max_export_smx_size = 192;
3160		rdev->config.evergreen.max_hw_contexts = 8;
3161		rdev->config.evergreen.sq_num_cf_insts = 2;
3162
3163		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3164		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3165		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3166		gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3167		break;
3168	case CHIP_SUMO2:
3169		rdev->config.evergreen.num_ses = 1;
3170		rdev->config.evergreen.max_pipes = 4;
3171		rdev->config.evergreen.max_tile_pipes = 4;
3172		rdev->config.evergreen.max_simds = 2;
3173		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3174		rdev->config.evergreen.max_gprs = 256;
3175		rdev->config.evergreen.max_threads = 248;
3176		rdev->config.evergreen.max_gs_threads = 32;
3177		rdev->config.evergreen.max_stack_entries = 512;
3178		rdev->config.evergreen.sx_num_of_sets = 4;
3179		rdev->config.evergreen.sx_max_export_size = 256;
3180		rdev->config.evergreen.sx_max_export_pos_size = 64;
3181		rdev->config.evergreen.sx_max_export_smx_size = 192;
3182		rdev->config.evergreen.max_hw_contexts = 4;
3183		rdev->config.evergreen.sq_num_cf_insts = 2;
3184
3185		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3186		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3187		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3188		gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3189		break;
3190	case CHIP_BARTS:
3191		rdev->config.evergreen.num_ses = 2;
3192		rdev->config.evergreen.max_pipes = 4;
3193		rdev->config.evergreen.max_tile_pipes = 8;
3194		rdev->config.evergreen.max_simds = 7;
3195		rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3196		rdev->config.evergreen.max_gprs = 256;
3197		rdev->config.evergreen.max_threads = 248;
3198		rdev->config.evergreen.max_gs_threads = 32;
3199		rdev->config.evergreen.max_stack_entries = 512;
3200		rdev->config.evergreen.sx_num_of_sets = 4;
3201		rdev->config.evergreen.sx_max_export_size = 256;
3202		rdev->config.evergreen.sx_max_export_pos_size = 64;
3203		rdev->config.evergreen.sx_max_export_smx_size = 192;
3204		rdev->config.evergreen.max_hw_contexts = 8;
3205		rdev->config.evergreen.sq_num_cf_insts = 2;
3206
3207		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3208		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3209		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3210		gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3211		break;
3212	case CHIP_TURKS:
3213		rdev->config.evergreen.num_ses = 1;
3214		rdev->config.evergreen.max_pipes = 4;
3215		rdev->config.evergreen.max_tile_pipes = 4;
3216		rdev->config.evergreen.max_simds = 6;
3217		rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3218		rdev->config.evergreen.max_gprs = 256;
3219		rdev->config.evergreen.max_threads = 248;
3220		rdev->config.evergreen.max_gs_threads = 32;
3221		rdev->config.evergreen.max_stack_entries = 256;
3222		rdev->config.evergreen.sx_num_of_sets = 4;
3223		rdev->config.evergreen.sx_max_export_size = 256;
3224		rdev->config.evergreen.sx_max_export_pos_size = 64;
3225		rdev->config.evergreen.sx_max_export_smx_size = 192;
3226		rdev->config.evergreen.max_hw_contexts = 8;
3227		rdev->config.evergreen.sq_num_cf_insts = 2;
3228
3229		rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3230		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3231		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3232		gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3233		break;
3234	case CHIP_CAICOS:
3235		rdev->config.evergreen.num_ses = 1;
3236		rdev->config.evergreen.max_pipes = 2;
3237		rdev->config.evergreen.max_tile_pipes = 2;
3238		rdev->config.evergreen.max_simds = 2;
3239		rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3240		rdev->config.evergreen.max_gprs = 256;
3241		rdev->config.evergreen.max_threads = 192;
3242		rdev->config.evergreen.max_gs_threads = 16;
3243		rdev->config.evergreen.max_stack_entries = 256;
3244		rdev->config.evergreen.sx_num_of_sets = 4;
3245		rdev->config.evergreen.sx_max_export_size = 128;
3246		rdev->config.evergreen.sx_max_export_pos_size = 32;
3247		rdev->config.evergreen.sx_max_export_smx_size = 96;
3248		rdev->config.evergreen.max_hw_contexts = 4;
3249		rdev->config.evergreen.sq_num_cf_insts = 1;
3250
3251		rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3252		rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3253		rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3254		gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3255		break;
3256	}
3257
3258	/* Initialize HDP */
3259	for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3260		WREG32((0x2c14 + j), 0x00000000);
3261		WREG32((0x2c18 + j), 0x00000000);
3262		WREG32((0x2c1c + j), 0x00000000);
3263		WREG32((0x2c20 + j), 0x00000000);
3264		WREG32((0x2c24 + j), 0x00000000);
3265	}
3266
3267	WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3268
3269	evergreen_fix_pci_max_read_req_size(rdev);
3270
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3271	mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3272	if ((rdev->family == CHIP_PALM) ||
3273	    (rdev->family == CHIP_SUMO) ||
3274	    (rdev->family == CHIP_SUMO2))
3275		mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3276	else
3277		mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3278
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3279	/* setup tiling info dword.  gb_addr_config is not adequate since it does
3280	 * not have bank info, so create a custom tiling dword.
3281	 * bits 3:0   num_pipes
3282	 * bits 7:4   num_banks
3283	 * bits 11:8  group_size
3284	 * bits 15:12 row_size
3285	 */
3286	rdev->config.evergreen.tile_config = 0;
3287	switch (rdev->config.evergreen.max_tile_pipes) {
3288	case 1:
3289	default:
3290		rdev->config.evergreen.tile_config |= (0 << 0);
3291		break;
3292	case 2:
3293		rdev->config.evergreen.tile_config |= (1 << 0);
3294		break;
3295	case 4:
3296		rdev->config.evergreen.tile_config |= (2 << 0);
3297		break;
3298	case 8:
3299		rdev->config.evergreen.tile_config |= (3 << 0);
3300		break;
3301	}
3302	/* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3303	if (rdev->flags & RADEON_IS_IGP)
3304		rdev->config.evergreen.tile_config |= 1 << 4;
3305	else {
3306		switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3307		case 0: /* four banks */
3308			rdev->config.evergreen.tile_config |= 0 << 4;
3309			break;
3310		case 1: /* eight banks */
3311			rdev->config.evergreen.tile_config |= 1 << 4;
3312			break;
3313		case 2: /* sixteen banks */
3314		default:
3315			rdev->config.evergreen.tile_config |= 2 << 4;
3316			break;
3317		}
3318	}
3319	rdev->config.evergreen.tile_config |= 0 << 8;
3320	rdev->config.evergreen.tile_config |=
3321		((gb_addr_config & 0x30000000) >> 28) << 12;
3322
3323	num_shader_engines = (gb_addr_config & NUM_SHADER_ENGINES(3) >> 12) + 1;
3324
3325	if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3326		u32 efuse_straps_4;
3327		u32 efuse_straps_3;
3328
3329		efuse_straps_4 = RREG32_RCU(0x204);
3330		efuse_straps_3 = RREG32_RCU(0x203);
3331		tmp = (((efuse_straps_4 & 0xf) << 4) |
3332		      ((efuse_straps_3 & 0xf0000000) >> 28));
3333	} else {
3334		tmp = 0;
3335		for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3336			u32 rb_disable_bitmap;
3337
3338			WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3339			WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3340			rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3341			tmp <<= 4;
3342			tmp |= rb_disable_bitmap;
3343		}
3344	}
3345	/* enabled rb are just the one not disabled :) */
3346	disabled_rb_mask = tmp;
3347	tmp = 0;
3348	for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3349		tmp |= (1 << i);
3350	/* if all the backends are disabled, fix it up here */
3351	if ((disabled_rb_mask & tmp) == tmp) {
3352		for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3353			disabled_rb_mask &= ~(1 << i);
3354	}
3355
3356	WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3357	WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3358
3359	WREG32(GB_ADDR_CONFIG, gb_addr_config);
3360	WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3361	WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3362	WREG32(DMA_TILING_CONFIG, gb_addr_config);
3363	WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3364	WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3365	WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3366
3367	if ((rdev->config.evergreen.max_backends == 1) &&
3368	    (rdev->flags & RADEON_IS_IGP)) {
3369		if ((disabled_rb_mask & 3) == 1) {
3370			/* RB0 disabled, RB1 enabled */
3371			tmp = 0x11111111;
3372		} else {
3373			/* RB1 disabled, RB0 enabled */
3374			tmp = 0x00000000;
3375		}
3376	} else {
3377		tmp = gb_addr_config & NUM_PIPES_MASK;
3378		tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3379						EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3380	}
3381	WREG32(GB_BACKEND_MAP, tmp);
 
 
 
 
 
 
3382
3383	WREG32(CGTS_SYS_TCC_DISABLE, 0);
3384	WREG32(CGTS_TCC_DISABLE, 0);
3385	WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3386	WREG32(CGTS_USER_TCC_DISABLE, 0);
3387
3388	/* set HW defaults for 3D engine */
3389	WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3390				     ROQ_IB2_START(0x2b)));
3391
3392	WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3393
3394	WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3395			     SYNC_GRADIENT |
3396			     SYNC_WALKER |
3397			     SYNC_ALIGNER));
3398
3399	sx_debug_1 = RREG32(SX_DEBUG_1);
3400	sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3401	WREG32(SX_DEBUG_1, sx_debug_1);
3402
3403
3404	smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3405	smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3406	smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3407	WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3408
3409	if (rdev->family <= CHIP_SUMO2)
3410		WREG32(SMX_SAR_CTL0, 0x00010000);
3411
3412	WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3413					POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3414					SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3415
3416	WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3417				 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3418				 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3419
3420	WREG32(VGT_NUM_INSTANCES, 1);
3421	WREG32(SPI_CONFIG_CNTL, 0);
3422	WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3423	WREG32(CP_PERFMON_CNTL, 0);
3424
3425	WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3426				  FETCH_FIFO_HIWATER(0x4) |
3427				  DONE_FIFO_HIWATER(0xe0) |
3428				  ALU_UPDATE_FIFO_HIWATER(0x8)));
3429
3430	sq_config = RREG32(SQ_CONFIG);
3431	sq_config &= ~(PS_PRIO(3) |
3432		       VS_PRIO(3) |
3433		       GS_PRIO(3) |
3434		       ES_PRIO(3));
3435	sq_config |= (VC_ENABLE |
3436		      EXPORT_SRC_C |
3437		      PS_PRIO(0) |
3438		      VS_PRIO(1) |
3439		      GS_PRIO(2) |
3440		      ES_PRIO(3));
3441
3442	switch (rdev->family) {
3443	case CHIP_CEDAR:
3444	case CHIP_PALM:
3445	case CHIP_SUMO:
3446	case CHIP_SUMO2:
3447	case CHIP_CAICOS:
3448		/* no vertex cache */
3449		sq_config &= ~VC_ENABLE;
3450		break;
3451	default:
3452		break;
3453	}
3454
3455	sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3456
3457	sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3458	sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3459	sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3460	sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3461	sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3462	sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3463	sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3464
3465	switch (rdev->family) {
3466	case CHIP_CEDAR:
3467	case CHIP_PALM:
3468	case CHIP_SUMO:
3469	case CHIP_SUMO2:
3470		ps_thread_count = 96;
3471		break;
3472	default:
3473		ps_thread_count = 128;
3474		break;
3475	}
3476
3477	sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3478	sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3479	sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3480	sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3481	sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3482	sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3483
3484	sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3485	sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3486	sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3487	sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3488	sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3489	sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3490
3491	WREG32(SQ_CONFIG, sq_config);
3492	WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3493	WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3494	WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3495	WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3496	WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3497	WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3498	WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3499	WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3500	WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3501	WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3502
3503	WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3504					  FORCE_EOV_MAX_REZ_CNT(255)));
3505
3506	switch (rdev->family) {
3507	case CHIP_CEDAR:
3508	case CHIP_PALM:
3509	case CHIP_SUMO:
3510	case CHIP_SUMO2:
3511	case CHIP_CAICOS:
3512		vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3513		break;
3514	default:
3515		vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3516		break;
3517	}
3518	vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3519	WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3520
3521	WREG32(VGT_GS_VERTEX_REUSE, 16);
3522	WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3523	WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3524
3525	WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3526	WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3527
3528	WREG32(CB_PERF_CTR0_SEL_0, 0);
3529	WREG32(CB_PERF_CTR0_SEL_1, 0);
3530	WREG32(CB_PERF_CTR1_SEL_0, 0);
3531	WREG32(CB_PERF_CTR1_SEL_1, 0);
3532	WREG32(CB_PERF_CTR2_SEL_0, 0);
3533	WREG32(CB_PERF_CTR2_SEL_1, 0);
3534	WREG32(CB_PERF_CTR3_SEL_0, 0);
3535	WREG32(CB_PERF_CTR3_SEL_1, 0);
3536
3537	/* clear render buffer base addresses */
3538	WREG32(CB_COLOR0_BASE, 0);
3539	WREG32(CB_COLOR1_BASE, 0);
3540	WREG32(CB_COLOR2_BASE, 0);
3541	WREG32(CB_COLOR3_BASE, 0);
3542	WREG32(CB_COLOR4_BASE, 0);
3543	WREG32(CB_COLOR5_BASE, 0);
3544	WREG32(CB_COLOR6_BASE, 0);
3545	WREG32(CB_COLOR7_BASE, 0);
3546	WREG32(CB_COLOR8_BASE, 0);
3547	WREG32(CB_COLOR9_BASE, 0);
3548	WREG32(CB_COLOR10_BASE, 0);
3549	WREG32(CB_COLOR11_BASE, 0);
3550
3551	/* set the shader const cache sizes to 0 */
3552	for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3553		WREG32(i, 0);
3554	for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3555		WREG32(i, 0);
3556
3557	tmp = RREG32(HDP_MISC_CNTL);
3558	tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3559	WREG32(HDP_MISC_CNTL, tmp);
3560
3561	hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3562	WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3563
3564	WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3565
3566	udelay(50);
3567
3568}
3569
3570int evergreen_mc_init(struct radeon_device *rdev)
3571{
3572	u32 tmp;
3573	int chansize, numchan;
3574
3575	/* Get VRAM informations */
3576	rdev->mc.vram_is_ddr = true;
3577	if ((rdev->family == CHIP_PALM) ||
3578	    (rdev->family == CHIP_SUMO) ||
3579	    (rdev->family == CHIP_SUMO2))
3580		tmp = RREG32(FUS_MC_ARB_RAMCFG);
3581	else
3582		tmp = RREG32(MC_ARB_RAMCFG);
3583	if (tmp & CHANSIZE_OVERRIDE) {
3584		chansize = 16;
3585	} else if (tmp & CHANSIZE_MASK) {
3586		chansize = 64;
3587	} else {
3588		chansize = 32;
3589	}
3590	tmp = RREG32(MC_SHARED_CHMAP);
3591	switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3592	case 0:
3593	default:
3594		numchan = 1;
3595		break;
3596	case 1:
3597		numchan = 2;
3598		break;
3599	case 2:
3600		numchan = 4;
3601		break;
3602	case 3:
3603		numchan = 8;
3604		break;
3605	}
3606	rdev->mc.vram_width = numchan * chansize;
3607	/* Could aper size report 0 ? */
3608	rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3609	rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3610	/* Setup GPU memory space */
3611	if ((rdev->family == CHIP_PALM) ||
3612	    (rdev->family == CHIP_SUMO) ||
3613	    (rdev->family == CHIP_SUMO2)) {
3614		/* size in bytes on fusion */
3615		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3616		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3617	} else {
3618		/* size in MB on evergreen/cayman/tn */
3619		rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3620		rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3621	}
3622	rdev->mc.visible_vram_size = rdev->mc.aper_size;
3623	r700_vram_gtt_location(rdev, &rdev->mc);
3624	radeon_update_bandwidth_info(rdev);
3625
3626	return 0;
3627}
3628
3629void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3630{
3631	dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3632		RREG32(GRBM_STATUS));
3633	dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3634		RREG32(GRBM_STATUS_SE0));
3635	dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3636		RREG32(GRBM_STATUS_SE1));
3637	dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3638		RREG32(SRBM_STATUS));
3639	dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3640		RREG32(SRBM_STATUS2));
3641	dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3642		RREG32(CP_STALLED_STAT1));
3643	dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3644		RREG32(CP_STALLED_STAT2));
3645	dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3646		RREG32(CP_BUSY_STAT));
3647	dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3648		RREG32(CP_STAT));
3649	dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3650		RREG32(DMA_STATUS_REG));
3651	if (rdev->family >= CHIP_CAYMAN) {
3652		dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3653			 RREG32(DMA_STATUS_REG + 0x800));
3654	}
3655}
3656
3657bool evergreen_is_display_hung(struct radeon_device *rdev)
3658{
3659	u32 crtc_hung = 0;
3660	u32 crtc_status[6];
3661	u32 i, j, tmp;
3662
3663	for (i = 0; i < rdev->num_crtc; i++) {
3664		if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3665			crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3666			crtc_hung |= (1 << i);
3667		}
3668	}
3669
3670	for (j = 0; j < 10; j++) {
3671		for (i = 0; i < rdev->num_crtc; i++) {
3672			if (crtc_hung & (1 << i)) {
3673				tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3674				if (tmp != crtc_status[i])
3675					crtc_hung &= ~(1 << i);
3676			}
3677		}
3678		if (crtc_hung == 0)
3679			return false;
3680		udelay(100);
3681	}
3682
3683	return true;
3684}
3685
3686u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3687{
3688	u32 reset_mask = 0;
3689	u32 tmp;
3690
3691	/* GRBM_STATUS */
3692	tmp = RREG32(GRBM_STATUS);
3693	if (tmp & (PA_BUSY | SC_BUSY |
3694		   SH_BUSY | SX_BUSY |
3695		   TA_BUSY | VGT_BUSY |
3696		   DB_BUSY | CB_BUSY |
3697		   SPI_BUSY | VGT_BUSY_NO_DMA))
3698		reset_mask |= RADEON_RESET_GFX;
3699
3700	if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3701		   CP_BUSY | CP_COHERENCY_BUSY))
3702		reset_mask |= RADEON_RESET_CP;
3703
3704	if (tmp & GRBM_EE_BUSY)
3705		reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3706
3707	/* DMA_STATUS_REG */
3708	tmp = RREG32(DMA_STATUS_REG);
3709	if (!(tmp & DMA_IDLE))
3710		reset_mask |= RADEON_RESET_DMA;
3711
3712	/* SRBM_STATUS2 */
3713	tmp = RREG32(SRBM_STATUS2);
3714	if (tmp & DMA_BUSY)
3715		reset_mask |= RADEON_RESET_DMA;
3716
3717	/* SRBM_STATUS */
3718	tmp = RREG32(SRBM_STATUS);
3719	if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3720		reset_mask |= RADEON_RESET_RLC;
3721
3722	if (tmp & IH_BUSY)
3723		reset_mask |= RADEON_RESET_IH;
3724
3725	if (tmp & SEM_BUSY)
3726		reset_mask |= RADEON_RESET_SEM;
3727
3728	if (tmp & GRBM_RQ_PENDING)
3729		reset_mask |= RADEON_RESET_GRBM;
3730
3731	if (tmp & VMC_BUSY)
3732		reset_mask |= RADEON_RESET_VMC;
3733
3734	if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3735		   MCC_BUSY | MCD_BUSY))
3736		reset_mask |= RADEON_RESET_MC;
3737
3738	if (evergreen_is_display_hung(rdev))
3739		reset_mask |= RADEON_RESET_DISPLAY;
3740
3741	/* VM_L2_STATUS */
3742	tmp = RREG32(VM_L2_STATUS);
3743	if (tmp & L2_BUSY)
3744		reset_mask |= RADEON_RESET_VMC;
3745
3746	/* Skip MC reset as it's mostly likely not hung, just busy */
3747	if (reset_mask & RADEON_RESET_MC) {
3748		DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3749		reset_mask &= ~RADEON_RESET_MC;
3750	}
3751
3752	return reset_mask;
3753}
3754
3755static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3756{
3757	struct evergreen_mc_save save;
3758	u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3759	u32 tmp;
3760
3761	if (reset_mask == 0)
3762		return;
3763
3764	dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3765
3766	evergreen_print_gpu_status_regs(rdev);
3767
3768	/* Disable CP parsing/prefetching */
3769	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3770
3771	if (reset_mask & RADEON_RESET_DMA) {
3772		/* Disable DMA */
3773		tmp = RREG32(DMA_RB_CNTL);
3774		tmp &= ~DMA_RB_ENABLE;
3775		WREG32(DMA_RB_CNTL, tmp);
3776	}
3777
3778	udelay(50);
3779
 
 
 
 
 
 
 
 
 
3780	evergreen_mc_stop(rdev, &save);
3781	if (evergreen_mc_wait_for_idle(rdev)) {
3782		dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3783	}
 
 
3784
3785	if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3786		grbm_soft_reset |= SOFT_RESET_DB |
3787			SOFT_RESET_CB |
3788			SOFT_RESET_PA |
3789			SOFT_RESET_SC |
3790			SOFT_RESET_SPI |
3791			SOFT_RESET_SX |
3792			SOFT_RESET_SH |
3793			SOFT_RESET_TC |
3794			SOFT_RESET_TA |
3795			SOFT_RESET_VC |
3796			SOFT_RESET_VGT;
3797	}
3798
3799	if (reset_mask & RADEON_RESET_CP) {
3800		grbm_soft_reset |= SOFT_RESET_CP |
3801			SOFT_RESET_VGT;
3802
3803		srbm_soft_reset |= SOFT_RESET_GRBM;
3804	}
3805
3806	if (reset_mask & RADEON_RESET_DMA)
3807		srbm_soft_reset |= SOFT_RESET_DMA;
3808
3809	if (reset_mask & RADEON_RESET_DISPLAY)
3810		srbm_soft_reset |= SOFT_RESET_DC;
3811
3812	if (reset_mask & RADEON_RESET_RLC)
3813		srbm_soft_reset |= SOFT_RESET_RLC;
3814
3815	if (reset_mask & RADEON_RESET_SEM)
3816		srbm_soft_reset |= SOFT_RESET_SEM;
3817
3818	if (reset_mask & RADEON_RESET_IH)
3819		srbm_soft_reset |= SOFT_RESET_IH;
3820
3821	if (reset_mask & RADEON_RESET_GRBM)
3822		srbm_soft_reset |= SOFT_RESET_GRBM;
3823
3824	if (reset_mask & RADEON_RESET_VMC)
3825		srbm_soft_reset |= SOFT_RESET_VMC;
3826
3827	if (!(rdev->flags & RADEON_IS_IGP)) {
3828		if (reset_mask & RADEON_RESET_MC)
3829			srbm_soft_reset |= SOFT_RESET_MC;
3830	}
3831
3832	if (grbm_soft_reset) {
3833		tmp = RREG32(GRBM_SOFT_RESET);
3834		tmp |= grbm_soft_reset;
3835		dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3836		WREG32(GRBM_SOFT_RESET, tmp);
3837		tmp = RREG32(GRBM_SOFT_RESET);
3838
3839		udelay(50);
3840
3841		tmp &= ~grbm_soft_reset;
3842		WREG32(GRBM_SOFT_RESET, tmp);
3843		tmp = RREG32(GRBM_SOFT_RESET);
3844	}
3845
3846	if (srbm_soft_reset) {
3847		tmp = RREG32(SRBM_SOFT_RESET);
3848		tmp |= srbm_soft_reset;
3849		dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3850		WREG32(SRBM_SOFT_RESET, tmp);
3851		tmp = RREG32(SRBM_SOFT_RESET);
3852
3853		udelay(50);
3854
3855		tmp &= ~srbm_soft_reset;
3856		WREG32(SRBM_SOFT_RESET, tmp);
3857		tmp = RREG32(SRBM_SOFT_RESET);
3858	}
3859
3860	/* Wait a little for things to settle down */
3861	udelay(50);
3862
 
 
 
 
 
 
 
3863	evergreen_mc_resume(rdev, &save);
3864	udelay(50);
3865
3866	evergreen_print_gpu_status_regs(rdev);
3867}
3868
3869void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3870{
3871	struct evergreen_mc_save save;
3872	u32 tmp, i;
3873
3874	dev_info(rdev->dev, "GPU pci config reset\n");
3875
3876	/* disable dpm? */
3877
3878	/* Disable CP parsing/prefetching */
3879	WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3880	udelay(50);
3881	/* Disable DMA */
3882	tmp = RREG32(DMA_RB_CNTL);
3883	tmp &= ~DMA_RB_ENABLE;
3884	WREG32(DMA_RB_CNTL, tmp);
3885	/* XXX other engines? */
3886
3887	/* halt the rlc */
3888	r600_rlc_stop(rdev);
3889
3890	udelay(50);
3891
3892	/* set mclk/sclk to bypass */
3893	rv770_set_clk_bypass_mode(rdev);
3894	/* disable BM */
3895	pci_clear_master(rdev->pdev);
3896	/* disable mem access */
3897	evergreen_mc_stop(rdev, &save);
3898	if (evergreen_mc_wait_for_idle(rdev)) {
3899		dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3900	}
3901	/* reset */
3902	radeon_pci_config_reset(rdev);
3903	/* wait for asic to come out of reset */
3904	for (i = 0; i < rdev->usec_timeout; i++) {
3905		if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3906			break;
3907		udelay(1);
3908	}
3909}
3910
3911int evergreen_asic_reset(struct radeon_device *rdev)
3912{
3913	u32 reset_mask;
3914
3915	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3916
3917	if (reset_mask)
3918		r600_set_bios_scratch_engine_hung(rdev, true);
3919
3920	/* try soft reset */
3921	evergreen_gpu_soft_reset(rdev, reset_mask);
3922
3923	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3924
3925	/* try pci config reset */
3926	if (reset_mask && radeon_hard_reset)
3927		evergreen_gpu_pci_config_reset(rdev);
3928
3929	reset_mask = evergreen_gpu_check_soft_reset(rdev);
3930
3931	if (!reset_mask)
3932		r600_set_bios_scratch_engine_hung(rdev, false);
3933
3934	return 0;
3935}
3936
3937/**
3938 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3939 *
3940 * @rdev: radeon_device pointer
3941 * @ring: radeon_ring structure holding ring information
3942 *
3943 * Check if the GFX engine is locked up.
3944 * Returns true if the engine appears to be locked up, false if not.
3945 */
3946bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3947{
3948	u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3949
3950	if (!(reset_mask & (RADEON_RESET_GFX |
3951			    RADEON_RESET_COMPUTE |
3952			    RADEON_RESET_CP))) {
3953		radeon_ring_lockup_update(rdev, ring);
3954		return false;
3955	}
3956	return radeon_ring_test_lockup(rdev, ring);
3957}
3958
3959/*
3960 * RLC
3961 */
3962#define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3963#define RLC_CLEAR_STATE_END_MARKER          0x00000001
3964
3965void sumo_rlc_fini(struct radeon_device *rdev)
3966{
3967	int r;
3968
3969	/* save restore block */
3970	if (rdev->rlc.save_restore_obj) {
3971		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3972		if (unlikely(r != 0))
3973			dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3974		radeon_bo_unpin(rdev->rlc.save_restore_obj);
3975		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3976
3977		radeon_bo_unref(&rdev->rlc.save_restore_obj);
3978		rdev->rlc.save_restore_obj = NULL;
3979	}
3980
3981	/* clear state block */
3982	if (rdev->rlc.clear_state_obj) {
3983		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3984		if (unlikely(r != 0))
3985			dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3986		radeon_bo_unpin(rdev->rlc.clear_state_obj);
3987		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3988
3989		radeon_bo_unref(&rdev->rlc.clear_state_obj);
3990		rdev->rlc.clear_state_obj = NULL;
3991	}
3992
3993	/* clear state block */
3994	if (rdev->rlc.cp_table_obj) {
3995		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3996		if (unlikely(r != 0))
3997			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3998		radeon_bo_unpin(rdev->rlc.cp_table_obj);
3999		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4000
4001		radeon_bo_unref(&rdev->rlc.cp_table_obj);
4002		rdev->rlc.cp_table_obj = NULL;
4003	}
4004}
4005
4006#define CP_ME_TABLE_SIZE    96
4007
4008int sumo_rlc_init(struct radeon_device *rdev)
4009{
4010	const u32 *src_ptr;
4011	volatile u32 *dst_ptr;
4012	u32 dws, data, i, j, k, reg_num;
4013	u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4014	u64 reg_list_mc_addr;
4015	const struct cs_section_def *cs_data;
4016	int r;
4017
4018	src_ptr = rdev->rlc.reg_list;
4019	dws = rdev->rlc.reg_list_size;
4020	if (rdev->family >= CHIP_BONAIRE) {
4021		dws += (5 * 16) + 48 + 48 + 64;
4022	}
4023	cs_data = rdev->rlc.cs_data;
4024
4025	if (src_ptr) {
4026		/* save restore block */
4027		if (rdev->rlc.save_restore_obj == NULL) {
4028			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4029					     RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.save_restore_obj);
4030			if (r) {
4031				dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4032				return r;
4033			}
4034		}
4035
4036		r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4037		if (unlikely(r != 0)) {
4038			sumo_rlc_fini(rdev);
4039			return r;
4040		}
4041		r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4042				  &rdev->rlc.save_restore_gpu_addr);
4043		if (r) {
4044			radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4045			dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4046			sumo_rlc_fini(rdev);
4047			return r;
4048		}
4049
4050		r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4051		if (r) {
4052			dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4053			sumo_rlc_fini(rdev);
4054			return r;
4055		}
4056		/* write the sr buffer */
4057		dst_ptr = rdev->rlc.sr_ptr;
4058		if (rdev->family >= CHIP_TAHITI) {
4059			/* SI */
4060			for (i = 0; i < rdev->rlc.reg_list_size; i++)
4061				dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4062		} else {
4063			/* ON/LN/TN */
4064			/* format:
4065			 * dw0: (reg2 << 16) | reg1
4066			 * dw1: reg1 save space
4067			 * dw2: reg2 save space
4068			 */
4069			for (i = 0; i < dws; i++) {
4070				data = src_ptr[i] >> 2;
4071				i++;
4072				if (i < dws)
4073					data |= (src_ptr[i] >> 2) << 16;
4074				j = (((i - 1) * 3) / 2);
4075				dst_ptr[j] = cpu_to_le32(data);
4076			}
4077			j = ((i * 3) / 2);
4078			dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4079		}
4080		radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4081		radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4082	}
4083
4084	if (cs_data) {
4085		/* clear state block */
4086		if (rdev->family >= CHIP_BONAIRE) {
4087			rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4088		} else if (rdev->family >= CHIP_TAHITI) {
4089			rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4090			dws = rdev->rlc.clear_state_size + (256 / 4);
4091		} else {
4092			reg_list_num = 0;
4093			dws = 0;
4094			for (i = 0; cs_data[i].section != NULL; i++) {
4095				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4096					reg_list_num++;
4097					dws += cs_data[i].section[j].reg_count;
4098				}
4099			}
4100			reg_list_blk_index = (3 * reg_list_num + 2);
4101			dws += reg_list_blk_index;
4102			rdev->rlc.clear_state_size = dws;
4103		}
4104
4105		if (rdev->rlc.clear_state_obj == NULL) {
4106			r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4107					     RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.clear_state_obj);
4108			if (r) {
4109				dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4110				sumo_rlc_fini(rdev);
4111				return r;
4112			}
4113		}
4114		r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4115		if (unlikely(r != 0)) {
4116			sumo_rlc_fini(rdev);
4117			return r;
4118		}
4119		r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4120				  &rdev->rlc.clear_state_gpu_addr);
4121		if (r) {
4122			radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4123			dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4124			sumo_rlc_fini(rdev);
4125			return r;
4126		}
4127
4128		r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4129		if (r) {
4130			dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4131			sumo_rlc_fini(rdev);
4132			return r;
4133		}
4134		/* set up the cs buffer */
4135		dst_ptr = rdev->rlc.cs_ptr;
4136		if (rdev->family >= CHIP_BONAIRE) {
4137			cik_get_csb_buffer(rdev, dst_ptr);
4138		} else if (rdev->family >= CHIP_TAHITI) {
4139			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4140			dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4141			dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4142			dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4143			si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4144		} else {
4145			reg_list_hdr_blk_index = 0;
4146			reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4147			data = upper_32_bits(reg_list_mc_addr);
4148			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4149			reg_list_hdr_blk_index++;
4150			for (i = 0; cs_data[i].section != NULL; i++) {
4151				for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4152					reg_num = cs_data[i].section[j].reg_count;
4153					data = reg_list_mc_addr & 0xffffffff;
4154					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4155					reg_list_hdr_blk_index++;
4156
4157					data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4158					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4159					reg_list_hdr_blk_index++;
4160
4161					data = 0x08000000 | (reg_num * 4);
4162					dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4163					reg_list_hdr_blk_index++;
4164
4165					for (k = 0; k < reg_num; k++) {
4166						data = cs_data[i].section[j].extent[k];
4167						dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4168					}
4169					reg_list_mc_addr += reg_num * 4;
4170					reg_list_blk_index += reg_num;
4171				}
4172			}
4173			dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4174		}
4175		radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4176		radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4177	}
4178
4179	if (rdev->rlc.cp_table_size) {
4180		if (rdev->rlc.cp_table_obj == NULL) {
4181			r = radeon_bo_create(rdev, rdev->rlc.cp_table_size, PAGE_SIZE, true,
4182					     RADEON_GEM_DOMAIN_VRAM, NULL, &rdev->rlc.cp_table_obj);
4183			if (r) {
4184				dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4185				sumo_rlc_fini(rdev);
4186				return r;
4187			}
4188		}
4189
4190		r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4191		if (unlikely(r != 0)) {
4192			dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4193			sumo_rlc_fini(rdev);
4194			return r;
4195		}
4196		r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4197				  &rdev->rlc.cp_table_gpu_addr);
4198		if (r) {
4199			radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4200			dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4201			sumo_rlc_fini(rdev);
4202			return r;
4203		}
4204		r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4205		if (r) {
4206			dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4207			sumo_rlc_fini(rdev);
4208			return r;
4209		}
4210
4211		cik_init_cp_pg_table(rdev);
4212
4213		radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4214		radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4215
4216	}
4217
4218	return 0;
4219}
4220
4221static void evergreen_rlc_start(struct radeon_device *rdev)
4222{
4223	u32 mask = RLC_ENABLE;
4224
4225	if (rdev->flags & RADEON_IS_IGP) {
4226		mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4227	}
4228
4229	WREG32(RLC_CNTL, mask);
4230}
4231
4232int evergreen_rlc_resume(struct radeon_device *rdev)
4233{
4234	u32 i;
4235	const __be32 *fw_data;
4236
4237	if (!rdev->rlc_fw)
4238		return -EINVAL;
4239
4240	r600_rlc_stop(rdev);
4241
4242	WREG32(RLC_HB_CNTL, 0);
4243
4244	if (rdev->flags & RADEON_IS_IGP) {
4245		if (rdev->family == CHIP_ARUBA) {
4246			u32 always_on_bitmap =
4247				3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4248			/* find out the number of active simds */
4249			u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4250			tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4251			tmp = hweight32(~tmp);
4252			if (tmp == rdev->config.cayman.max_simds_per_se) {
4253				WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4254				WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4255				WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4256				WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4257				WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4258			}
4259		} else {
4260			WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4261			WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4262		}
4263		WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4264		WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4265	} else {
4266		WREG32(RLC_HB_BASE, 0);
4267		WREG32(RLC_HB_RPTR, 0);
4268		WREG32(RLC_HB_WPTR, 0);
4269		WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4270		WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4271	}
4272	WREG32(RLC_MC_CNTL, 0);
4273	WREG32(RLC_UCODE_CNTL, 0);
4274
4275	fw_data = (const __be32 *)rdev->rlc_fw->data;
4276	if (rdev->family >= CHIP_ARUBA) {
4277		for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4278			WREG32(RLC_UCODE_ADDR, i);
4279			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4280		}
4281	} else if (rdev->family >= CHIP_CAYMAN) {
4282		for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4283			WREG32(RLC_UCODE_ADDR, i);
4284			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4285		}
4286	} else {
4287		for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4288			WREG32(RLC_UCODE_ADDR, i);
4289			WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4290		}
4291	}
4292	WREG32(RLC_UCODE_ADDR, 0);
4293
4294	evergreen_rlc_start(rdev);
4295
4296	return 0;
4297}
4298
4299/* Interrupts */
4300
4301u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4302{
4303	if (crtc >= rdev->num_crtc)
 
 
 
 
 
 
 
 
 
 
 
 
 
4304		return 0;
4305	else
4306		return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4307}
4308
4309void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4310{
4311	u32 tmp;
4312
4313	if (rdev->family >= CHIP_CAYMAN) {
4314		cayman_cp_int_cntl_setup(rdev, 0,
4315					 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4316		cayman_cp_int_cntl_setup(rdev, 1, 0);
4317		cayman_cp_int_cntl_setup(rdev, 2, 0);
4318		tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4319		WREG32(CAYMAN_DMA1_CNTL, tmp);
4320	} else
4321		WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4322	tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4323	WREG32(DMA_CNTL, tmp);
4324	WREG32(GRBM_INT_CNTL, 0);
4325	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4326	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4327	if (rdev->num_crtc >= 4) {
4328		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4329		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4330	}
4331	if (rdev->num_crtc >= 6) {
4332		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4333		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4334	}
4335
4336	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4337	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4338	if (rdev->num_crtc >= 4) {
4339		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4340		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4341	}
4342	if (rdev->num_crtc >= 6) {
4343		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4344		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4345	}
4346
4347	/* only one DAC on DCE5 */
4348	if (!ASIC_IS_DCE5(rdev))
4349		WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4350	WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4351
4352	tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4353	WREG32(DC_HPD1_INT_CONTROL, tmp);
4354	tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4355	WREG32(DC_HPD2_INT_CONTROL, tmp);
4356	tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4357	WREG32(DC_HPD3_INT_CONTROL, tmp);
4358	tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4359	WREG32(DC_HPD4_INT_CONTROL, tmp);
4360	tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4361	WREG32(DC_HPD5_INT_CONTROL, tmp);
4362	tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4363	WREG32(DC_HPD6_INT_CONTROL, tmp);
4364
4365}
4366
4367int evergreen_irq_set(struct radeon_device *rdev)
4368{
4369	u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4370	u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4371	u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4372	u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4373	u32 grbm_int_cntl = 0;
4374	u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4375	u32 dma_cntl, dma_cntl1 = 0;
4376	u32 thermal_int = 0;
4377
4378	if (!rdev->irq.installed) {
4379		WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4380		return -EINVAL;
4381	}
4382	/* don't enable anything if the ih is disabled */
4383	if (!rdev->ih.enabled) {
4384		r600_disable_interrupts(rdev);
4385		/* force the active interrupt state to all disabled */
4386		evergreen_disable_interrupt_state(rdev);
4387		return 0;
4388	}
4389
4390	hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4391	hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4392	hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4393	hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4394	hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4395	hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4396	if (rdev->family == CHIP_ARUBA)
4397		thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4398			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4399	else
4400		thermal_int = RREG32(CG_THERMAL_INT) &
4401			~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4402
4403	afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4404	afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4405	afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4406	afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4407	afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4408	afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4409
4410	dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4411
4412	if (rdev->family >= CHIP_CAYMAN) {
4413		/* enable CP interrupts on all rings */
4414		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4415			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4416			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4417		}
4418		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4419			DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4420			cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4421		}
4422		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4423			DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4424			cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4425		}
4426	} else {
4427		if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4428			DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4429			cp_int_cntl |= RB_INT_ENABLE;
4430			cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4431		}
4432	}
4433
4434	if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4435		DRM_DEBUG("r600_irq_set: sw int dma\n");
4436		dma_cntl |= TRAP_ENABLE;
4437	}
4438
4439	if (rdev->family >= CHIP_CAYMAN) {
4440		dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4441		if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4442			DRM_DEBUG("r600_irq_set: sw int dma1\n");
4443			dma_cntl1 |= TRAP_ENABLE;
4444		}
4445	}
4446
4447	if (rdev->irq.dpm_thermal) {
4448		DRM_DEBUG("dpm thermal\n");
4449		thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4450	}
4451
4452	if (rdev->irq.crtc_vblank_int[0] ||
4453	    atomic_read(&rdev->irq.pflip[0])) {
4454		DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4455		crtc1 |= VBLANK_INT_MASK;
4456	}
4457	if (rdev->irq.crtc_vblank_int[1] ||
4458	    atomic_read(&rdev->irq.pflip[1])) {
4459		DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4460		crtc2 |= VBLANK_INT_MASK;
4461	}
4462	if (rdev->irq.crtc_vblank_int[2] ||
4463	    atomic_read(&rdev->irq.pflip[2])) {
4464		DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4465		crtc3 |= VBLANK_INT_MASK;
4466	}
4467	if (rdev->irq.crtc_vblank_int[3] ||
4468	    atomic_read(&rdev->irq.pflip[3])) {
4469		DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4470		crtc4 |= VBLANK_INT_MASK;
4471	}
4472	if (rdev->irq.crtc_vblank_int[4] ||
4473	    atomic_read(&rdev->irq.pflip[4])) {
4474		DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4475		crtc5 |= VBLANK_INT_MASK;
4476	}
4477	if (rdev->irq.crtc_vblank_int[5] ||
4478	    atomic_read(&rdev->irq.pflip[5])) {
4479		DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4480		crtc6 |= VBLANK_INT_MASK;
4481	}
4482	if (rdev->irq.hpd[0]) {
4483		DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4484		hpd1 |= DC_HPDx_INT_EN;
4485	}
4486	if (rdev->irq.hpd[1]) {
4487		DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4488		hpd2 |= DC_HPDx_INT_EN;
4489	}
4490	if (rdev->irq.hpd[2]) {
4491		DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4492		hpd3 |= DC_HPDx_INT_EN;
4493	}
4494	if (rdev->irq.hpd[3]) {
4495		DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4496		hpd4 |= DC_HPDx_INT_EN;
4497	}
4498	if (rdev->irq.hpd[4]) {
4499		DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4500		hpd5 |= DC_HPDx_INT_EN;
4501	}
4502	if (rdev->irq.hpd[5]) {
4503		DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4504		hpd6 |= DC_HPDx_INT_EN;
4505	}
4506	if (rdev->irq.afmt[0]) {
4507		DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4508		afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4509	}
4510	if (rdev->irq.afmt[1]) {
4511		DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4512		afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4513	}
4514	if (rdev->irq.afmt[2]) {
4515		DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4516		afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4517	}
4518	if (rdev->irq.afmt[3]) {
4519		DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4520		afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4521	}
4522	if (rdev->irq.afmt[4]) {
4523		DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4524		afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4525	}
4526	if (rdev->irq.afmt[5]) {
4527		DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4528		afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4529	}
4530
4531	if (rdev->family >= CHIP_CAYMAN) {
4532		cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4533		cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4534		cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4535	} else
4536		WREG32(CP_INT_CNTL, cp_int_cntl);
4537
4538	WREG32(DMA_CNTL, dma_cntl);
4539
4540	if (rdev->family >= CHIP_CAYMAN)
4541		WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4542
 
4543	WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4544
4545	WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4546	WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4547	if (rdev->num_crtc >= 4) {
4548		WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4549		WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4550	}
4551	if (rdev->num_crtc >= 6) {
4552		WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4553		WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4554	}
4555
4556	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4557	       GRPH_PFLIP_INT_MASK);
4558	WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4559	       GRPH_PFLIP_INT_MASK);
4560	if (rdev->num_crtc >= 4) {
4561		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4562		       GRPH_PFLIP_INT_MASK);
4563		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4564		       GRPH_PFLIP_INT_MASK);
4565	}
4566	if (rdev->num_crtc >= 6) {
4567		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4568		       GRPH_PFLIP_INT_MASK);
4569		WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4570		       GRPH_PFLIP_INT_MASK);
4571	}
4572
4573	WREG32(DC_HPD1_INT_CONTROL, hpd1);
4574	WREG32(DC_HPD2_INT_CONTROL, hpd2);
4575	WREG32(DC_HPD3_INT_CONTROL, hpd3);
4576	WREG32(DC_HPD4_INT_CONTROL, hpd4);
4577	WREG32(DC_HPD5_INT_CONTROL, hpd5);
4578	WREG32(DC_HPD6_INT_CONTROL, hpd6);
4579	if (rdev->family == CHIP_ARUBA)
4580		WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4581	else
4582		WREG32(CG_THERMAL_INT, thermal_int);
4583
4584	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4585	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4586	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4587	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4588	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4589	WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4590
4591	return 0;
4592}
4593
4594static void evergreen_irq_ack(struct radeon_device *rdev)
4595{
4596	u32 tmp;
4597
4598	rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4599	rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4600	rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4601	rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4602	rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4603	rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4604	rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4605	rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4606	if (rdev->num_crtc >= 4) {
4607		rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4608		rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4609	}
4610	if (rdev->num_crtc >= 6) {
4611		rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4612		rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4613	}
4614
4615	rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4616	rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4617	rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4618	rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4619	rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4620	rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4621
4622	if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4623		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4624	if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4625		WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4626	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4627		WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4628	if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4629		WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4630	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4631		WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4632	if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4633		WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4634
4635	if (rdev->num_crtc >= 4) {
4636		if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4637			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4638		if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4639			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4640		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4641			WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4642		if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4643			WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4644		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4645			WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4646		if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4647			WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4648	}
4649
4650	if (rdev->num_crtc >= 6) {
4651		if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4652			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4653		if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4654			WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4655		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4656			WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4657		if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4658			WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4659		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4660			WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4661		if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4662			WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4663	}
4664
4665	if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4666		tmp = RREG32(DC_HPD1_INT_CONTROL);
4667		tmp |= DC_HPDx_INT_ACK;
4668		WREG32(DC_HPD1_INT_CONTROL, tmp);
4669	}
4670	if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4671		tmp = RREG32(DC_HPD2_INT_CONTROL);
4672		tmp |= DC_HPDx_INT_ACK;
4673		WREG32(DC_HPD2_INT_CONTROL, tmp);
4674	}
4675	if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4676		tmp = RREG32(DC_HPD3_INT_CONTROL);
4677		tmp |= DC_HPDx_INT_ACK;
4678		WREG32(DC_HPD3_INT_CONTROL, tmp);
4679	}
4680	if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4681		tmp = RREG32(DC_HPD4_INT_CONTROL);
4682		tmp |= DC_HPDx_INT_ACK;
4683		WREG32(DC_HPD4_INT_CONTROL, tmp);
4684	}
4685	if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4686		tmp = RREG32(DC_HPD5_INT_CONTROL);
4687		tmp |= DC_HPDx_INT_ACK;
4688		WREG32(DC_HPD5_INT_CONTROL, tmp);
4689	}
4690	if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4691		tmp = RREG32(DC_HPD5_INT_CONTROL);
4692		tmp |= DC_HPDx_INT_ACK;
4693		WREG32(DC_HPD6_INT_CONTROL, tmp);
4694	}
4695	if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4696		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4697		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4698		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4699	}
4700	if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4701		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4702		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4703		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4704	}
4705	if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4706		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4707		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4708		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4709	}
4710	if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4711		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4712		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4713		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4714	}
4715	if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4716		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4717		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4718		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4719	}
4720	if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4721		tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4722		tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4723		WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4724	}
4725}
4726
4727static void evergreen_irq_disable(struct radeon_device *rdev)
4728{
4729	r600_disable_interrupts(rdev);
4730	/* Wait and acknowledge irq */
4731	mdelay(1);
4732	evergreen_irq_ack(rdev);
4733	evergreen_disable_interrupt_state(rdev);
4734}
4735
4736void evergreen_irq_suspend(struct radeon_device *rdev)
4737{
4738	evergreen_irq_disable(rdev);
4739	r600_rlc_stop(rdev);
4740}
4741
4742static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4743{
4744	u32 wptr, tmp;
4745
4746	if (rdev->wb.enabled)
4747		wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4748	else
4749		wptr = RREG32(IH_RB_WPTR);
4750
4751	if (wptr & RB_OVERFLOW) {
4752		/* When a ring buffer overflow happen start parsing interrupt
4753		 * from the last not overwritten vector (wptr + 16). Hopefully
4754		 * this should allow us to catchup.
4755		 */
4756		dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
4757			wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
4758		rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4759		tmp = RREG32(IH_RB_CNTL);
4760		tmp |= IH_WPTR_OVERFLOW_CLEAR;
4761		WREG32(IH_RB_CNTL, tmp);
4762	}
4763	return (wptr & rdev->ih.ptr_mask);
4764}
4765
4766int evergreen_irq_process(struct radeon_device *rdev)
4767{
4768	u32 wptr;
4769	u32 rptr;
4770	u32 src_id, src_data;
4771	u32 ring_index;
 
4772	bool queue_hotplug = false;
4773	bool queue_hdmi = false;
4774	bool queue_thermal = false;
4775	u32 status, addr;
4776
4777	if (!rdev->ih.enabled || rdev->shutdown)
4778		return IRQ_NONE;
4779
4780	wptr = evergreen_get_ih_wptr(rdev);
4781
4782restart_ih:
4783	/* is somebody else already processing irqs? */
4784	if (atomic_xchg(&rdev->ih.lock, 1))
4785		return IRQ_NONE;
4786
4787	rptr = rdev->ih.rptr;
4788	DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4789
 
 
 
 
 
 
4790	/* Order reading of wptr vs. reading of IH ring data */
4791	rmb();
4792
4793	/* display interrupts */
4794	evergreen_irq_ack(rdev);
4795
 
4796	while (rptr != wptr) {
4797		/* wptr/rptr are in bytes! */
4798		ring_index = rptr / 4;
4799		src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4800		src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4801
4802		switch (src_id) {
4803		case 1: /* D1 vblank/vline */
4804			switch (src_data) {
4805			case 0: /* D1 vblank */
4806				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4807					if (rdev->irq.crtc_vblank_int[0]) {
4808						drm_handle_vblank(rdev->ddev, 0);
4809						rdev->pm.vblank_sync = true;
4810						wake_up(&rdev->irq.vblank_queue);
4811					}
4812					if (atomic_read(&rdev->irq.pflip[0]))
4813						radeon_crtc_handle_flip(rdev, 0);
4814					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4815					DRM_DEBUG("IH: D1 vblank\n");
4816				}
4817				break;
4818			case 1: /* D1 vline */
4819				if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4820					rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4821					DRM_DEBUG("IH: D1 vline\n");
4822				}
4823				break;
4824			default:
4825				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4826				break;
4827			}
4828			break;
4829		case 2: /* D2 vblank/vline */
4830			switch (src_data) {
4831			case 0: /* D2 vblank */
4832				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4833					if (rdev->irq.crtc_vblank_int[1]) {
4834						drm_handle_vblank(rdev->ddev, 1);
4835						rdev->pm.vblank_sync = true;
4836						wake_up(&rdev->irq.vblank_queue);
4837					}
4838					if (atomic_read(&rdev->irq.pflip[1]))
4839						radeon_crtc_handle_flip(rdev, 1);
4840					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4841					DRM_DEBUG("IH: D2 vblank\n");
4842				}
4843				break;
4844			case 1: /* D2 vline */
4845				if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4846					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4847					DRM_DEBUG("IH: D2 vline\n");
4848				}
4849				break;
4850			default:
4851				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4852				break;
4853			}
4854			break;
4855		case 3: /* D3 vblank/vline */
4856			switch (src_data) {
4857			case 0: /* D3 vblank */
4858				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4859					if (rdev->irq.crtc_vblank_int[2]) {
4860						drm_handle_vblank(rdev->ddev, 2);
4861						rdev->pm.vblank_sync = true;
4862						wake_up(&rdev->irq.vblank_queue);
4863					}
4864					if (atomic_read(&rdev->irq.pflip[2]))
4865						radeon_crtc_handle_flip(rdev, 2);
4866					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4867					DRM_DEBUG("IH: D3 vblank\n");
4868				}
4869				break;
4870			case 1: /* D3 vline */
4871				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4872					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4873					DRM_DEBUG("IH: D3 vline\n");
4874				}
4875				break;
4876			default:
4877				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4878				break;
4879			}
4880			break;
4881		case 4: /* D4 vblank/vline */
4882			switch (src_data) {
4883			case 0: /* D4 vblank */
4884				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4885					if (rdev->irq.crtc_vblank_int[3]) {
4886						drm_handle_vblank(rdev->ddev, 3);
4887						rdev->pm.vblank_sync = true;
4888						wake_up(&rdev->irq.vblank_queue);
4889					}
4890					if (atomic_read(&rdev->irq.pflip[3]))
4891						radeon_crtc_handle_flip(rdev, 3);
4892					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4893					DRM_DEBUG("IH: D4 vblank\n");
4894				}
4895				break;
4896			case 1: /* D4 vline */
4897				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4898					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4899					DRM_DEBUG("IH: D4 vline\n");
4900				}
4901				break;
4902			default:
4903				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4904				break;
4905			}
4906			break;
4907		case 5: /* D5 vblank/vline */
4908			switch (src_data) {
4909			case 0: /* D5 vblank */
4910				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4911					if (rdev->irq.crtc_vblank_int[4]) {
4912						drm_handle_vblank(rdev->ddev, 4);
4913						rdev->pm.vblank_sync = true;
4914						wake_up(&rdev->irq.vblank_queue);
4915					}
4916					if (atomic_read(&rdev->irq.pflip[4]))
4917						radeon_crtc_handle_flip(rdev, 4);
4918					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4919					DRM_DEBUG("IH: D5 vblank\n");
4920				}
4921				break;
4922			case 1: /* D5 vline */
4923				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4924					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4925					DRM_DEBUG("IH: D5 vline\n");
4926				}
4927				break;
4928			default:
4929				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4930				break;
4931			}
4932			break;
4933		case 6: /* D6 vblank/vline */
4934			switch (src_data) {
4935			case 0: /* D6 vblank */
4936				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4937					if (rdev->irq.crtc_vblank_int[5]) {
4938						drm_handle_vblank(rdev->ddev, 5);
4939						rdev->pm.vblank_sync = true;
4940						wake_up(&rdev->irq.vblank_queue);
4941					}
4942					if (atomic_read(&rdev->irq.pflip[5]))
4943						radeon_crtc_handle_flip(rdev, 5);
4944					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4945					DRM_DEBUG("IH: D6 vblank\n");
4946				}
4947				break;
4948			case 1: /* D6 vline */
4949				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4950					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4951					DRM_DEBUG("IH: D6 vline\n");
4952				}
4953				break;
4954			default:
4955				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4956				break;
4957			}
4958			break;
4959		case 8: /* D1 page flip */
4960		case 10: /* D2 page flip */
4961		case 12: /* D3 page flip */
4962		case 14: /* D4 page flip */
4963		case 16: /* D5 page flip */
4964		case 18: /* D6 page flip */
4965			DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4966			radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4967			break;
4968		case 42: /* HPD hotplug */
4969			switch (src_data) {
4970			case 0:
4971				if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4972					rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4973					queue_hotplug = true;
4974					DRM_DEBUG("IH: HPD1\n");
4975				}
4976				break;
4977			case 1:
4978				if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4979					rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4980					queue_hotplug = true;
4981					DRM_DEBUG("IH: HPD2\n");
4982				}
4983				break;
4984			case 2:
4985				if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4986					rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4987					queue_hotplug = true;
4988					DRM_DEBUG("IH: HPD3\n");
4989				}
4990				break;
4991			case 3:
4992				if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4993					rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
4994					queue_hotplug = true;
4995					DRM_DEBUG("IH: HPD4\n");
4996				}
4997				break;
4998			case 4:
4999				if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
5000					rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5001					queue_hotplug = true;
5002					DRM_DEBUG("IH: HPD5\n");
5003				}
5004				break;
5005			case 5:
5006				if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5007					rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5008					queue_hotplug = true;
5009					DRM_DEBUG("IH: HPD6\n");
5010				}
5011				break;
5012			default:
5013				DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5014				break;
5015			}
5016			break;
5017		case 44: /* hdmi */
5018			switch (src_data) {
5019			case 0:
5020				if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
5021					rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5022					queue_hdmi = true;
5023					DRM_DEBUG("IH: HDMI0\n");
5024				}
5025				break;
5026			case 1:
5027				if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
5028					rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5029					queue_hdmi = true;
5030					DRM_DEBUG("IH: HDMI1\n");
5031				}
5032				break;
5033			case 2:
5034				if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
5035					rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5036					queue_hdmi = true;
5037					DRM_DEBUG("IH: HDMI2\n");
5038				}
5039				break;
5040			case 3:
5041				if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5042					rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5043					queue_hdmi = true;
5044					DRM_DEBUG("IH: HDMI3\n");
5045				}
5046				break;
5047			case 4:
5048				if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5049					rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5050					queue_hdmi = true;
5051					DRM_DEBUG("IH: HDMI4\n");
5052				}
5053				break;
5054			case 5:
5055				if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5056					rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5057					queue_hdmi = true;
5058					DRM_DEBUG("IH: HDMI5\n");
5059				}
5060				break;
5061			default:
5062				DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5063				break;
5064			}
5065		case 124: /* UVD */
5066			DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5067			radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5068			break;
5069		case 146:
5070		case 147:
5071			addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5072			status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5073			dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5074			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5075				addr);
5076			dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5077				status);
5078			cayman_vm_decode_fault(rdev, status, addr);
5079			/* reset addr and status */
5080			WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5081			break;
5082		case 176: /* CP_INT in ring buffer */
5083		case 177: /* CP_INT in IB1 */
5084		case 178: /* CP_INT in IB2 */
5085			DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5086			radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5087			break;
5088		case 181: /* CP EOP event */
5089			DRM_DEBUG("IH: CP EOP\n");
5090			if (rdev->family >= CHIP_CAYMAN) {
5091				switch (src_data) {
5092				case 0:
5093					radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5094					break;
5095				case 1:
5096					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5097					break;
5098				case 2:
5099					radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5100					break;
5101				}
5102			} else
5103				radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5104			break;
5105		case 224: /* DMA trap event */
5106			DRM_DEBUG("IH: DMA trap\n");
5107			radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5108			break;
5109		case 230: /* thermal low to high */
5110			DRM_DEBUG("IH: thermal low to high\n");
5111			rdev->pm.dpm.thermal.high_to_low = false;
5112			queue_thermal = true;
5113			break;
5114		case 231: /* thermal high to low */
5115			DRM_DEBUG("IH: thermal high to low\n");
5116			rdev->pm.dpm.thermal.high_to_low = true;
5117			queue_thermal = true;
5118			break;
5119		case 233: /* GUI IDLE */
5120			DRM_DEBUG("IH: GUI idle\n");
5121			break;
5122		case 244: /* DMA trap event */
5123			if (rdev->family >= CHIP_CAYMAN) {
5124				DRM_DEBUG("IH: DMA1 trap\n");
5125				radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5126			}
5127			break;
5128		default:
5129			DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5130			break;
5131		}
5132
5133		/* wptr/rptr are in bytes! */
5134		rptr += 16;
5135		rptr &= rdev->ih.ptr_mask;
5136	}
 
 
 
 
5137	if (queue_hotplug)
5138		schedule_work(&rdev->hotplug_work);
5139	if (queue_hdmi)
5140		schedule_work(&rdev->audio_work);
5141	if (queue_thermal && rdev->pm.dpm_enabled)
5142		schedule_work(&rdev->pm.dpm.thermal.work);
5143	rdev->ih.rptr = rptr;
5144	WREG32(IH_RB_RPTR, rdev->ih.rptr);
5145	atomic_set(&rdev->ih.lock, 0);
5146
5147	/* make sure wptr hasn't changed while processing */
5148	wptr = evergreen_get_ih_wptr(rdev);
5149	if (wptr != rptr)
5150		goto restart_ih;
5151
5152	return IRQ_HANDLED;
5153}
5154
5155static int evergreen_startup(struct radeon_device *rdev)
5156{
5157	struct radeon_ring *ring;
5158	int r;
5159
5160	/* enable pcie gen2 link */
5161	evergreen_pcie_gen2_enable(rdev);
5162	/* enable aspm */
5163	evergreen_program_aspm(rdev);
5164
5165	/* scratch needs to be initialized before MC */
5166	r = r600_vram_scratch_init(rdev);
5167	if (r)
5168		return r;
5169
5170	evergreen_mc_program(rdev);
5171
5172	if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5173		r = ni_mc_load_microcode(rdev);
5174		if (r) {
5175			DRM_ERROR("Failed to load MC firmware!\n");
5176			return r;
5177		}
 
 
 
 
 
 
 
 
5178	}
5179
 
5180	if (rdev->flags & RADEON_IS_AGP) {
5181		evergreen_agp_enable(rdev);
5182	} else {
5183		r = evergreen_pcie_gart_enable(rdev);
5184		if (r)
5185			return r;
5186	}
5187	evergreen_gpu_init(rdev);
5188
5189	/* allocate rlc buffers */
5190	if (rdev->flags & RADEON_IS_IGP) {
5191		rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5192		rdev->rlc.reg_list_size =
5193			(u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5194		rdev->rlc.cs_data = evergreen_cs_data;
5195		r = sumo_rlc_init(rdev);
5196		if (r) {
5197			DRM_ERROR("Failed to init rlc BOs!\n");
5198			return r;
5199		}
5200	}
5201
5202	/* allocate wb buffer */
5203	r = radeon_wb_init(rdev);
5204	if (r)
5205		return r;
5206
5207	r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5208	if (r) {
5209		dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5210		return r;
5211	}
5212
5213	r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5214	if (r) {
5215		dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5216		return r;
5217	}
5218
5219	r = uvd_v2_2_resume(rdev);
5220	if (!r) {
5221		r = radeon_fence_driver_start_ring(rdev,
5222						   R600_RING_TYPE_UVD_INDEX);
5223		if (r)
5224			dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5225	}
5226
5227	if (r)
5228		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5229
5230	/* Enable IRQ */
5231	if (!rdev->irq.installed) {
5232		r = radeon_irq_kms_init(rdev);
5233		if (r)
5234			return r;
5235	}
5236
5237	r = r600_irq_init(rdev);
5238	if (r) {
5239		DRM_ERROR("radeon: IH init failed (%d).\n", r);
5240		radeon_irq_kms_fini(rdev);
5241		return r;
5242	}
5243	evergreen_irq_set(rdev);
5244
5245	ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5246	r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5247			     RADEON_CP_PACKET2);
5248	if (r)
5249		return r;
5250
5251	ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5252	r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5253			     DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5254	if (r)
5255		return r;
5256
5257	r = evergreen_cp_load_microcode(rdev);
5258	if (r)
5259		return r;
5260	r = evergreen_cp_resume(rdev);
5261	if (r)
5262		return r;
5263	r = r600_dma_resume(rdev);
5264	if (r)
5265		return r;
5266
5267	ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5268	if (ring->ring_size) {
5269		r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5270				     RADEON_CP_PACKET2);
5271		if (!r)
5272			r = uvd_v1_0_init(rdev);
5273
5274		if (r)
5275			DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5276	}
5277
5278	r = radeon_ib_pool_init(rdev);
5279	if (r) {
5280		dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5281		return r;
5282	}
5283
5284	r = r600_audio_init(rdev);
5285	if (r) {
5286		DRM_ERROR("radeon: audio init failed\n");
5287		return r;
5288	}
5289
5290	return 0;
5291}
5292
5293int evergreen_resume(struct radeon_device *rdev)
5294{
5295	int r;
5296
5297	/* reset the asic, the gfx blocks are often in a bad state
5298	 * after the driver is unloaded or after a resume
5299	 */
5300	if (radeon_asic_reset(rdev))
5301		dev_warn(rdev->dev, "GPU reset failed !\n");
5302	/* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5303	 * posting will perform necessary task to bring back GPU into good
5304	 * shape.
5305	 */
5306	/* post card */
5307	atom_asic_init(rdev->mode_info.atom_context);
5308
5309	/* init golden registers */
5310	evergreen_init_golden_registers(rdev);
5311
5312	if (rdev->pm.pm_method == PM_METHOD_DPM)
5313		radeon_pm_resume(rdev);
5314
5315	rdev->accel_working = true;
5316	r = evergreen_startup(rdev);
5317	if (r) {
5318		DRM_ERROR("evergreen startup failed on resume\n");
5319		rdev->accel_working = false;
 
 
 
 
 
5320		return r;
5321	}
5322
5323	return r;
5324
5325}
5326
5327int evergreen_suspend(struct radeon_device *rdev)
5328{
5329	radeon_pm_suspend(rdev);
5330	r600_audio_fini(rdev);
5331	uvd_v1_0_fini(rdev);
5332	radeon_uvd_suspend(rdev);
5333	r700_cp_stop(rdev);
5334	r600_dma_stop(rdev);
5335	evergreen_irq_suspend(rdev);
5336	radeon_wb_disable(rdev);
5337	evergreen_pcie_gart_disable(rdev);
5338
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5339	return 0;
5340}
5341
5342/* Plan is to move initialization in that function and use
5343 * helper function so that radeon_device_init pretty much
5344 * do nothing more than calling asic specific function. This
5345 * should also allow to remove a bunch of callback function
5346 * like vram_info.
5347 */
5348int evergreen_init(struct radeon_device *rdev)
5349{
5350	int r;
5351
 
 
 
 
5352	/* Read BIOS */
5353	if (!radeon_get_bios(rdev)) {
5354		if (ASIC_IS_AVIVO(rdev))
5355			return -EINVAL;
5356	}
5357	/* Must be an ATOMBIOS */
5358	if (!rdev->is_atom_bios) {
5359		dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5360		return -EINVAL;
5361	}
5362	r = radeon_atombios_init(rdev);
5363	if (r)
5364		return r;
5365	/* reset the asic, the gfx blocks are often in a bad state
5366	 * after the driver is unloaded or after a resume
5367	 */
5368	if (radeon_asic_reset(rdev))
5369		dev_warn(rdev->dev, "GPU reset failed !\n");
5370	/* Post card if necessary */
5371	if (!radeon_card_posted(rdev)) {
5372		if (!rdev->bios) {
5373			dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5374			return -EINVAL;
5375		}
5376		DRM_INFO("GPU not posted. posting now...\n");
5377		atom_asic_init(rdev->mode_info.atom_context);
5378	}
5379	/* init golden registers */
5380	evergreen_init_golden_registers(rdev);
5381	/* Initialize scratch registers */
5382	r600_scratch_init(rdev);
5383	/* Initialize surface registers */
5384	radeon_surface_init(rdev);
5385	/* Initialize clocks */
5386	radeon_get_clock_info(rdev->ddev);
5387	/* Fence driver */
5388	r = radeon_fence_driver_init(rdev);
5389	if (r)
5390		return r;
5391	/* initialize AGP */
5392	if (rdev->flags & RADEON_IS_AGP) {
5393		r = radeon_agp_init(rdev);
5394		if (r)
5395			radeon_agp_disable(rdev);
5396	}
5397	/* initialize memory controller */
5398	r = evergreen_mc_init(rdev);
5399	if (r)
5400		return r;
5401	/* Memory manager */
5402	r = radeon_bo_init(rdev);
5403	if (r)
5404		return r;
5405
5406	if (ASIC_IS_DCE5(rdev)) {
5407		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5408			r = ni_init_microcode(rdev);
5409			if (r) {
5410				DRM_ERROR("Failed to load firmware!\n");
5411				return r;
5412			}
5413		}
5414	} else {
5415		if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5416			r = r600_init_microcode(rdev);
5417			if (r) {
5418				DRM_ERROR("Failed to load firmware!\n");
5419				return r;
5420			}
5421		}
5422	}
5423
5424	/* Initialize power management */
5425	radeon_pm_init(rdev);
5426
5427	rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5428	r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5429
5430	rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5431	r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5432
5433	r = radeon_uvd_init(rdev);
5434	if (!r) {
5435		rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5436		r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5437			       4096);
5438	}
5439
5440	rdev->ih.ring_obj = NULL;
5441	r600_ih_ring_init(rdev, 64 * 1024);
5442
5443	r = r600_pcie_gart_init(rdev);
5444	if (r)
5445		return r;
5446
5447	rdev->accel_working = true;
5448	r = evergreen_startup(rdev);
5449	if (r) {
5450		dev_err(rdev->dev, "disabling GPU acceleration\n");
5451		r700_cp_fini(rdev);
5452		r600_dma_fini(rdev);
5453		r600_irq_fini(rdev);
5454		if (rdev->flags & RADEON_IS_IGP)
5455			sumo_rlc_fini(rdev);
5456		radeon_wb_fini(rdev);
5457		radeon_ib_pool_fini(rdev);
5458		radeon_irq_kms_fini(rdev);
5459		evergreen_pcie_gart_fini(rdev);
5460		rdev->accel_working = false;
5461	}
5462
5463	/* Don't start up if the MC ucode is missing on BTC parts.
5464	 * The default clocks and voltages before the MC ucode
5465	 * is loaded are not suffient for advanced operations.
5466	 */
5467	if (ASIC_IS_DCE5(rdev)) {
5468		if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5469			DRM_ERROR("radeon: MC ucode required for NI+.\n");
5470			return -EINVAL;
 
5471		}
5472	}
5473
5474	return 0;
5475}
5476
5477void evergreen_fini(struct radeon_device *rdev)
5478{
5479	radeon_pm_fini(rdev);
5480	r600_audio_fini(rdev);
5481	r700_cp_fini(rdev);
5482	r600_dma_fini(rdev);
5483	r600_irq_fini(rdev);
5484	if (rdev->flags & RADEON_IS_IGP)
5485		sumo_rlc_fini(rdev);
5486	radeon_wb_fini(rdev);
5487	radeon_ib_pool_fini(rdev);
5488	radeon_irq_kms_fini(rdev);
5489	uvd_v1_0_fini(rdev);
5490	radeon_uvd_fini(rdev);
5491	evergreen_pcie_gart_fini(rdev);
5492	r600_vram_scratch_fini(rdev);
5493	radeon_gem_fini(rdev);
5494	radeon_fence_driver_fini(rdev);
5495	radeon_agp_fini(rdev);
5496	radeon_bo_fini(rdev);
5497	radeon_atombios_fini(rdev);
5498	kfree(rdev->bios);
5499	rdev->bios = NULL;
5500}
5501
5502void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5503{
5504	u32 link_width_cntl, speed_cntl;
5505
5506	if (radeon_pcie_gen2 == 0)
5507		return;
5508
5509	if (rdev->flags & RADEON_IS_IGP)
5510		return;
5511
5512	if (!(rdev->flags & RADEON_IS_PCIE))
5513		return;
5514
5515	/* x2 cards have a special sequence */
5516	if (ASIC_IS_X2(rdev))
5517		return;
5518
5519	if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5520		(rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5521		return;
5522
5523	speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5524	if (speed_cntl & LC_CURRENT_DATA_RATE) {
5525		DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5526		return;
5527	}
5528
5529	DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5530
5531	if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5532	    (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5533
5534		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5535		link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5536		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5537
5538		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5539		speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5540		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5541
5542		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5543		speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5544		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5545
5546		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5547		speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5548		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5549
5550		speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5551		speed_cntl |= LC_GEN2_EN_STRAP;
5552		WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5553
5554	} else {
5555		link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5556		/* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5557		if (1)
5558			link_width_cntl |= LC_UPCONFIGURE_DIS;
5559		else
5560			link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5561		WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5562	}
5563}
5564
5565void evergreen_program_aspm(struct radeon_device *rdev)
5566{
5567	u32 data, orig;
5568	u32 pcie_lc_cntl, pcie_lc_cntl_old;
5569	bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5570	/* fusion_platform = true
5571	 * if the system is a fusion system
5572	 * (APU or DGPU in a fusion system).
5573	 * todo: check if the system is a fusion platform.
5574	 */
5575	bool fusion_platform = false;
5576
5577	if (radeon_aspm == 0)
5578		return;
5579
5580	if (!(rdev->flags & RADEON_IS_PCIE))
5581		return;
5582
5583	switch (rdev->family) {
5584	case CHIP_CYPRESS:
5585	case CHIP_HEMLOCK:
5586	case CHIP_JUNIPER:
5587	case CHIP_REDWOOD:
5588	case CHIP_CEDAR:
5589	case CHIP_SUMO:
5590	case CHIP_SUMO2:
5591	case CHIP_PALM:
5592	case CHIP_ARUBA:
5593		disable_l0s = true;
5594		break;
5595	default:
5596		disable_l0s = false;
5597		break;
5598	}
5599
5600	if (rdev->flags & RADEON_IS_IGP)
5601		fusion_platform = true; /* XXX also dGPUs in a fusion system */
5602
5603	data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5604	if (fusion_platform)
5605		data &= ~MULTI_PIF;
5606	else
5607		data |= MULTI_PIF;
5608	if (data != orig)
5609		WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5610
5611	data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5612	if (fusion_platform)
5613		data &= ~MULTI_PIF;
5614	else
5615		data |= MULTI_PIF;
5616	if (data != orig)
5617		WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5618
5619	pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5620	pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5621	if (!disable_l0s) {
5622		if (rdev->family >= CHIP_BARTS)
5623			pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5624		else
5625			pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5626	}
5627
5628	if (!disable_l1) {
5629		if (rdev->family >= CHIP_BARTS)
5630			pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5631		else
5632			pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5633
5634		if (!disable_plloff_in_l1) {
5635			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5636			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5637			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5638			if (data != orig)
5639				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5640
5641			data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5642			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5643			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5644			if (data != orig)
5645				WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5646
5647			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5648			data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5649			data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5650			if (data != orig)
5651				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5652
5653			data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5654			data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5655			data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5656			if (data != orig)
5657				WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5658
5659			if (rdev->family >= CHIP_BARTS) {
5660				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5661				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5662				data |= PLL_RAMP_UP_TIME_0(4);
5663				if (data != orig)
5664					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5665
5666				data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5667				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5668				data |= PLL_RAMP_UP_TIME_1(4);
5669				if (data != orig)
5670					WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5671
5672				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5673				data &= ~PLL_RAMP_UP_TIME_0_MASK;
5674				data |= PLL_RAMP_UP_TIME_0(4);
5675				if (data != orig)
5676					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5677
5678				data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5679				data &= ~PLL_RAMP_UP_TIME_1_MASK;
5680				data |= PLL_RAMP_UP_TIME_1(4);
5681				if (data != orig)
5682					WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5683			}
5684
5685			data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5686			data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5687			data |= LC_DYN_LANES_PWR_STATE(3);
5688			if (data != orig)
5689				WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5690
5691			if (rdev->family >= CHIP_BARTS) {
5692				data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5693				data &= ~LS2_EXIT_TIME_MASK;
5694				data |= LS2_EXIT_TIME(1);
5695				if (data != orig)
5696					WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5697
5698				data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5699				data &= ~LS2_EXIT_TIME_MASK;
5700				data |= LS2_EXIT_TIME(1);
5701				if (data != orig)
5702					WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5703			}
5704		}
5705	}
5706
5707	/* evergreen parts only */
5708	if (rdev->family < CHIP_BARTS)
5709		pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5710
5711	if (pcie_lc_cntl != pcie_lc_cntl_old)
5712		WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5713}