Linux Audio

Check our new training course

Loading...
Note: File does not exist in v3.1.
  1// SPDX-License-Identifier: MIT
  2/*
  3 * Copyright © 2020 Intel Corporation
  4 */
  5
  6#include <linux/string_helpers.h>
  7
  8#include "i915_drv.h"
  9#include "i915_reg.h"
 10#include "intel_dram.h"
 11#include "intel_mchbar_regs.h"
 12#include "intel_pcode.h"
 13#include "vlv_sideband.h"
 14
 15struct dram_dimm_info {
 16	u16 size;
 17	u8 width, ranks;
 18};
 19
 20struct dram_channel_info {
 21	struct dram_dimm_info dimm_l, dimm_s;
 22	u8 ranks;
 23	bool is_16gb_dimm;
 24};
 25
 26#define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
 27
 28static const char *intel_dram_type_str(enum intel_dram_type type)
 29{
 30	static const char * const str[] = {
 31		DRAM_TYPE_STR(UNKNOWN),
 32		DRAM_TYPE_STR(DDR3),
 33		DRAM_TYPE_STR(DDR4),
 34		DRAM_TYPE_STR(LPDDR3),
 35		DRAM_TYPE_STR(LPDDR4),
 36	};
 37
 38	if (type >= ARRAY_SIZE(str))
 39		type = INTEL_DRAM_UNKNOWN;
 40
 41	return str[type];
 42}
 43
 44#undef DRAM_TYPE_STR
 45
 46static bool pnv_is_ddr3(struct drm_i915_private *i915)
 47{
 48	return intel_uncore_read(&i915->uncore, CSHRDDR3CTL) & CSHRDDR3CTL_DDR3;
 49}
 50
 51static unsigned int pnv_mem_freq(struct drm_i915_private *dev_priv)
 52{
 53	u32 tmp;
 54
 55	tmp = intel_uncore_read(&dev_priv->uncore, CLKCFG);
 56
 57	switch (tmp & CLKCFG_MEM_MASK) {
 58	case CLKCFG_MEM_533:
 59		return 533333;
 60	case CLKCFG_MEM_667:
 61		return 666667;
 62	case CLKCFG_MEM_800:
 63		return 800000;
 64	}
 65
 66	return 0;
 67}
 68
 69static unsigned int ilk_mem_freq(struct drm_i915_private *dev_priv)
 70{
 71	u16 ddrpll;
 72
 73	ddrpll = intel_uncore_read16(&dev_priv->uncore, DDRMPLL1);
 74	switch (ddrpll & 0xff) {
 75	case 0xc:
 76		return 800000;
 77	case 0x10:
 78		return 1066667;
 79	case 0x14:
 80		return 1333333;
 81	case 0x18:
 82		return 1600000;
 83	default:
 84		drm_dbg(&dev_priv->drm, "unknown memory frequency 0x%02x\n",
 85			ddrpll & 0xff);
 86		return 0;
 87	}
 88}
 89
 90static unsigned int chv_mem_freq(struct drm_i915_private *i915)
 91{
 92	u32 val;
 93
 94	vlv_iosf_sb_get(i915, BIT(VLV_IOSF_SB_CCK));
 95	val = vlv_cck_read(i915, CCK_FUSE_REG);
 96	vlv_iosf_sb_put(i915, BIT(VLV_IOSF_SB_CCK));
 97
 98	switch ((val >> 2) & 0x7) {
 99	case 3:
100		return 2000000;
101	default:
102		return 1600000;
103	}
104}
105
106static unsigned int vlv_mem_freq(struct drm_i915_private *i915)
107{
108	u32 val;
109
110	vlv_iosf_sb_get(i915, BIT(VLV_IOSF_SB_PUNIT));
111	val = vlv_punit_read(i915, PUNIT_REG_GPU_FREQ_STS);
112	vlv_iosf_sb_put(i915, BIT(VLV_IOSF_SB_PUNIT));
113
114	switch ((val >> 6) & 3) {
115	case 0:
116	case 1:
117		return 800000;
118	case 2:
119		return 1066667;
120	case 3:
121		return 1333333;
122	}
123
124	return 0;
125}
126
127static void detect_mem_freq(struct drm_i915_private *i915)
128{
129	if (IS_PINEVIEW(i915))
130		i915->mem_freq = pnv_mem_freq(i915);
131	else if (GRAPHICS_VER(i915) == 5)
132		i915->mem_freq = ilk_mem_freq(i915);
133	else if (IS_CHERRYVIEW(i915))
134		i915->mem_freq = chv_mem_freq(i915);
135	else if (IS_VALLEYVIEW(i915))
136		i915->mem_freq = vlv_mem_freq(i915);
137
138	if (IS_PINEVIEW(i915))
139		i915->is_ddr3 = pnv_is_ddr3(i915);
140
141	if (i915->mem_freq)
142		drm_dbg(&i915->drm, "DDR speed: %d kHz\n", i915->mem_freq);
143}
144
145unsigned int i9xx_fsb_freq(struct drm_i915_private *i915)
146{
147	u32 fsb;
148
149	/*
150	 * Note that this only reads the state of the FSB
151	 * straps, not the actual FSB frequency. Some BIOSen
152	 * let you configure each independently. Ideally we'd
153	 * read out the actual FSB frequency but sadly we
154	 * don't know which registers have that information,
155	 * and all the relevant docs have gone to bit heaven :(
156	 */
157	fsb = intel_uncore_read(&i915->uncore, CLKCFG) & CLKCFG_FSB_MASK;
158
159	if (IS_PINEVIEW(i915) || IS_MOBILE(i915)) {
160		switch (fsb) {
161		case CLKCFG_FSB_400:
162			return 400000;
163		case CLKCFG_FSB_533:
164			return 533333;
165		case CLKCFG_FSB_667:
166			return 666667;
167		case CLKCFG_FSB_800:
168			return 800000;
169		case CLKCFG_FSB_1067:
170			return 1066667;
171		case CLKCFG_FSB_1333:
172			return 1333333;
173		default:
174			MISSING_CASE(fsb);
175			return 1333333;
176		}
177	} else {
178		switch (fsb) {
179		case CLKCFG_FSB_400_ALT:
180			return 400000;
181		case CLKCFG_FSB_533:
182			return 533333;
183		case CLKCFG_FSB_667:
184			return 666667;
185		case CLKCFG_FSB_800:
186			return 800000;
187		case CLKCFG_FSB_1067_ALT:
188			return 1066667;
189		case CLKCFG_FSB_1333_ALT:
190			return 1333333;
191		case CLKCFG_FSB_1600_ALT:
192			return 1600000;
193		default:
194			MISSING_CASE(fsb);
195			return 1333333;
196		}
197	}
198}
199
200static unsigned int ilk_fsb_freq(struct drm_i915_private *dev_priv)
201{
202	u16 fsb;
203
204	fsb = intel_uncore_read16(&dev_priv->uncore, CSIPLL0) & 0x3ff;
205
206	switch (fsb) {
207	case 0x00c:
208		return 3200000;
209	case 0x00e:
210		return 3733333;
211	case 0x010:
212		return 4266667;
213	case 0x012:
214		return 4800000;
215	case 0x014:
216		return 5333333;
217	case 0x016:
218		return 5866667;
219	case 0x018:
220		return 6400000;
221	default:
222		drm_dbg(&dev_priv->drm, "unknown fsb frequency 0x%04x\n", fsb);
223		return 0;
224	}
225}
226
227static void detect_fsb_freq(struct drm_i915_private *i915)
228{
229	if (GRAPHICS_VER(i915) == 5)
230		i915->fsb_freq = ilk_fsb_freq(i915);
231	else if (GRAPHICS_VER(i915) == 3 || GRAPHICS_VER(i915) == 4)
232		i915->fsb_freq = i9xx_fsb_freq(i915);
233
234	if (i915->fsb_freq)
235		drm_dbg(&i915->drm, "FSB frequency: %d kHz\n", i915->fsb_freq);
236}
237
238static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
239{
240	return dimm->ranks * 64 / (dimm->width ?: 1);
241}
242
243/* Returns total Gb for the whole DIMM */
244static int skl_get_dimm_size(u16 val)
245{
246	return (val & SKL_DRAM_SIZE_MASK) * 8;
247}
248
249static int skl_get_dimm_width(u16 val)
250{
251	if (skl_get_dimm_size(val) == 0)
252		return 0;
253
254	switch (val & SKL_DRAM_WIDTH_MASK) {
255	case SKL_DRAM_WIDTH_X8:
256	case SKL_DRAM_WIDTH_X16:
257	case SKL_DRAM_WIDTH_X32:
258		val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
259		return 8 << val;
260	default:
261		MISSING_CASE(val);
262		return 0;
263	}
264}
265
266static int skl_get_dimm_ranks(u16 val)
267{
268	if (skl_get_dimm_size(val) == 0)
269		return 0;
270
271	val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
272
273	return val + 1;
274}
275
276/* Returns total Gb for the whole DIMM */
277static int icl_get_dimm_size(u16 val)
278{
279	return (val & ICL_DRAM_SIZE_MASK) * 8 / 2;
280}
281
282static int icl_get_dimm_width(u16 val)
283{
284	if (icl_get_dimm_size(val) == 0)
285		return 0;
286
287	switch (val & ICL_DRAM_WIDTH_MASK) {
288	case ICL_DRAM_WIDTH_X8:
289	case ICL_DRAM_WIDTH_X16:
290	case ICL_DRAM_WIDTH_X32:
291		val = (val & ICL_DRAM_WIDTH_MASK) >> ICL_DRAM_WIDTH_SHIFT;
292		return 8 << val;
293	default:
294		MISSING_CASE(val);
295		return 0;
296	}
297}
298
299static int icl_get_dimm_ranks(u16 val)
300{
301	if (icl_get_dimm_size(val) == 0)
302		return 0;
303
304	val = (val & ICL_DRAM_RANK_MASK) >> ICL_DRAM_RANK_SHIFT;
305
306	return val + 1;
307}
308
309static bool
310skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
311{
312	/* Convert total Gb to Gb per DRAM device */
313	return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
314}
315
316static void
317skl_dram_get_dimm_info(struct drm_i915_private *i915,
318		       struct dram_dimm_info *dimm,
319		       int channel, char dimm_name, u16 val)
320{
321	if (GRAPHICS_VER(i915) >= 11) {
322		dimm->size = icl_get_dimm_size(val);
323		dimm->width = icl_get_dimm_width(val);
324		dimm->ranks = icl_get_dimm_ranks(val);
325	} else {
326		dimm->size = skl_get_dimm_size(val);
327		dimm->width = skl_get_dimm_width(val);
328		dimm->ranks = skl_get_dimm_ranks(val);
329	}
330
331	drm_dbg_kms(&i915->drm,
332		    "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
333		    channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
334		    str_yes_no(skl_is_16gb_dimm(dimm)));
335}
336
337static int
338skl_dram_get_channel_info(struct drm_i915_private *i915,
339			  struct dram_channel_info *ch,
340			  int channel, u32 val)
341{
342	skl_dram_get_dimm_info(i915, &ch->dimm_l,
343			       channel, 'L', val & 0xffff);
344	skl_dram_get_dimm_info(i915, &ch->dimm_s,
345			       channel, 'S', val >> 16);
346
347	if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
348		drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
349		return -EINVAL;
350	}
351
352	if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
353		ch->ranks = 2;
354	else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
355		ch->ranks = 2;
356	else
357		ch->ranks = 1;
358
359	ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
360		skl_is_16gb_dimm(&ch->dimm_s);
361
362	drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
363		    channel, ch->ranks, str_yes_no(ch->is_16gb_dimm));
364
365	return 0;
366}
367
368static bool
369intel_is_dram_symmetric(const struct dram_channel_info *ch0,
370			const struct dram_channel_info *ch1)
371{
372	return !memcmp(ch0, ch1, sizeof(*ch0)) &&
373		(ch0->dimm_s.size == 0 ||
374		 !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
375}
376
377static int
378skl_dram_get_channels_info(struct drm_i915_private *i915)
379{
380	struct dram_info *dram_info = &i915->dram_info;
381	struct dram_channel_info ch0 = {}, ch1 = {};
382	u32 val;
383	int ret;
384
385	val = intel_uncore_read(&i915->uncore,
386				SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
387	ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
388	if (ret == 0)
389		dram_info->num_channels++;
390
391	val = intel_uncore_read(&i915->uncore,
392				SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
393	ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
394	if (ret == 0)
395		dram_info->num_channels++;
396
397	if (dram_info->num_channels == 0) {
398		drm_info(&i915->drm, "Number of memory channels is zero\n");
399		return -EINVAL;
400	}
401
402	if (ch0.ranks == 0 && ch1.ranks == 0) {
403		drm_info(&i915->drm, "couldn't get memory rank information\n");
404		return -EINVAL;
405	}
406
407	dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
408
409	dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
410
411	drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
412		    str_yes_no(dram_info->symmetric_memory));
413
414	return 0;
415}
416
417static enum intel_dram_type
418skl_get_dram_type(struct drm_i915_private *i915)
419{
420	u32 val;
421
422	val = intel_uncore_read(&i915->uncore,
423				SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
424
425	switch (val & SKL_DRAM_DDR_TYPE_MASK) {
426	case SKL_DRAM_DDR_TYPE_DDR3:
427		return INTEL_DRAM_DDR3;
428	case SKL_DRAM_DDR_TYPE_DDR4:
429		return INTEL_DRAM_DDR4;
430	case SKL_DRAM_DDR_TYPE_LPDDR3:
431		return INTEL_DRAM_LPDDR3;
432	case SKL_DRAM_DDR_TYPE_LPDDR4:
433		return INTEL_DRAM_LPDDR4;
434	default:
435		MISSING_CASE(val);
436		return INTEL_DRAM_UNKNOWN;
437	}
438}
439
440static int
441skl_get_dram_info(struct drm_i915_private *i915)
442{
443	struct dram_info *dram_info = &i915->dram_info;
444	int ret;
445
446	dram_info->type = skl_get_dram_type(i915);
447	drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
448		    intel_dram_type_str(dram_info->type));
449
450	ret = skl_dram_get_channels_info(i915);
451	if (ret)
452		return ret;
453
454	return 0;
455}
456
457/* Returns Gb per DRAM device */
458static int bxt_get_dimm_size(u32 val)
459{
460	switch (val & BXT_DRAM_SIZE_MASK) {
461	case BXT_DRAM_SIZE_4GBIT:
462		return 4;
463	case BXT_DRAM_SIZE_6GBIT:
464		return 6;
465	case BXT_DRAM_SIZE_8GBIT:
466		return 8;
467	case BXT_DRAM_SIZE_12GBIT:
468		return 12;
469	case BXT_DRAM_SIZE_16GBIT:
470		return 16;
471	default:
472		MISSING_CASE(val);
473		return 0;
474	}
475}
476
477static int bxt_get_dimm_width(u32 val)
478{
479	if (!bxt_get_dimm_size(val))
480		return 0;
481
482	val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
483
484	return 8 << val;
485}
486
487static int bxt_get_dimm_ranks(u32 val)
488{
489	if (!bxt_get_dimm_size(val))
490		return 0;
491
492	switch (val & BXT_DRAM_RANK_MASK) {
493	case BXT_DRAM_RANK_SINGLE:
494		return 1;
495	case BXT_DRAM_RANK_DUAL:
496		return 2;
497	default:
498		MISSING_CASE(val);
499		return 0;
500	}
501}
502
503static enum intel_dram_type bxt_get_dimm_type(u32 val)
504{
505	if (!bxt_get_dimm_size(val))
506		return INTEL_DRAM_UNKNOWN;
507
508	switch (val & BXT_DRAM_TYPE_MASK) {
509	case BXT_DRAM_TYPE_DDR3:
510		return INTEL_DRAM_DDR3;
511	case BXT_DRAM_TYPE_LPDDR3:
512		return INTEL_DRAM_LPDDR3;
513	case BXT_DRAM_TYPE_DDR4:
514		return INTEL_DRAM_DDR4;
515	case BXT_DRAM_TYPE_LPDDR4:
516		return INTEL_DRAM_LPDDR4;
517	default:
518		MISSING_CASE(val);
519		return INTEL_DRAM_UNKNOWN;
520	}
521}
522
523static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
524{
525	dimm->width = bxt_get_dimm_width(val);
526	dimm->ranks = bxt_get_dimm_ranks(val);
527
528	/*
529	 * Size in register is Gb per DRAM device. Convert to total
530	 * Gb to match the way we report this for non-LP platforms.
531	 */
532	dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
533}
534
535static int bxt_get_dram_info(struct drm_i915_private *i915)
536{
537	struct dram_info *dram_info = &i915->dram_info;
538	u32 val;
539	u8 valid_ranks = 0;
540	int i;
541
542	/*
543	 * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
544	 */
545	for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
546		struct dram_dimm_info dimm;
547		enum intel_dram_type type;
548
549		val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
550		if (val == 0xFFFFFFFF)
551			continue;
552
553		dram_info->num_channels++;
554
555		bxt_get_dimm_info(&dimm, val);
556		type = bxt_get_dimm_type(val);
557
558		drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
559			    dram_info->type != INTEL_DRAM_UNKNOWN &&
560			    dram_info->type != type);
561
562		drm_dbg_kms(&i915->drm,
563			    "CH%u DIMM size: %u Gb, width: X%u, ranks: %u, type: %s\n",
564			    i - BXT_D_CR_DRP0_DUNIT_START,
565			    dimm.size, dimm.width, dimm.ranks,
566			    intel_dram_type_str(type));
567
568		if (valid_ranks == 0)
569			valid_ranks = dimm.ranks;
570
571		if (type != INTEL_DRAM_UNKNOWN)
572			dram_info->type = type;
573	}
574
575	if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
576		drm_info(&i915->drm, "couldn't get memory information\n");
577		return -EINVAL;
578	}
579
580	return 0;
581}
582
583static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)
584{
585	struct dram_info *dram_info = &dev_priv->dram_info;
586	u32 val = 0;
587	int ret;
588
589	ret = snb_pcode_read(&dev_priv->uncore, ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
590			     ICL_PCODE_MEM_SS_READ_GLOBAL_INFO, &val, NULL);
591	if (ret)
592		return ret;
593
594	if (GRAPHICS_VER(dev_priv) == 12) {
595		switch (val & 0xf) {
596		case 0:
597			dram_info->type = INTEL_DRAM_DDR4;
598			break;
599		case 1:
600			dram_info->type = INTEL_DRAM_DDR5;
601			break;
602		case 2:
603			dram_info->type = INTEL_DRAM_LPDDR5;
604			break;
605		case 3:
606			dram_info->type = INTEL_DRAM_LPDDR4;
607			break;
608		case 4:
609			dram_info->type = INTEL_DRAM_DDR3;
610			break;
611		case 5:
612			dram_info->type = INTEL_DRAM_LPDDR3;
613			break;
614		default:
615			MISSING_CASE(val & 0xf);
616			return -EINVAL;
617		}
618	} else {
619		switch (val & 0xf) {
620		case 0:
621			dram_info->type = INTEL_DRAM_DDR4;
622			break;
623		case 1:
624			dram_info->type = INTEL_DRAM_DDR3;
625			break;
626		case 2:
627			dram_info->type = INTEL_DRAM_LPDDR3;
628			break;
629		case 3:
630			dram_info->type = INTEL_DRAM_LPDDR4;
631			break;
632		default:
633			MISSING_CASE(val & 0xf);
634			return -EINVAL;
635		}
636	}
637
638	dram_info->num_channels = (val & 0xf0) >> 4;
639	dram_info->num_qgv_points = (val & 0xf00) >> 8;
640	dram_info->num_psf_gv_points = (val & 0x3000) >> 12;
641
642	return 0;
643}
644
645static int gen11_get_dram_info(struct drm_i915_private *i915)
646{
647	int ret = skl_get_dram_info(i915);
648
649	if (ret)
650		return ret;
651
652	return icl_pcode_read_mem_global_info(i915);
653}
654
655static int gen12_get_dram_info(struct drm_i915_private *i915)
656{
657	i915->dram_info.wm_lv_0_adjust_needed = false;
658
659	return icl_pcode_read_mem_global_info(i915);
660}
661
662static int xelpdp_get_dram_info(struct drm_i915_private *i915)
663{
664	u32 val = intel_uncore_read(&i915->uncore, MTL_MEM_SS_INFO_GLOBAL);
665	struct dram_info *dram_info = &i915->dram_info;
666
667	switch (REG_FIELD_GET(MTL_DDR_TYPE_MASK, val)) {
668	case 0:
669		dram_info->type = INTEL_DRAM_DDR4;
670		break;
671	case 1:
672		dram_info->type = INTEL_DRAM_DDR5;
673		break;
674	case 2:
675		dram_info->type = INTEL_DRAM_LPDDR5;
676		break;
677	case 3:
678		dram_info->type = INTEL_DRAM_LPDDR4;
679		break;
680	case 4:
681		dram_info->type = INTEL_DRAM_DDR3;
682		break;
683	case 5:
684		dram_info->type = INTEL_DRAM_LPDDR3;
685		break;
686	case 8:
687		drm_WARN_ON(&i915->drm, !IS_DGFX(i915));
688		dram_info->type = INTEL_DRAM_GDDR;
689		break;
690	default:
691		MISSING_CASE(val);
692		return -EINVAL;
693	}
694
695	dram_info->num_channels = REG_FIELD_GET(MTL_N_OF_POPULATED_CH_MASK, val);
696	dram_info->num_qgv_points = REG_FIELD_GET(MTL_N_OF_ENABLED_QGV_POINTS_MASK, val);
697	/* PSF GV points not supported in D14+ */
698
699	return 0;
700}
701
702void intel_dram_detect(struct drm_i915_private *i915)
703{
704	struct dram_info *dram_info = &i915->dram_info;
705	int ret;
706
707	detect_fsb_freq(i915);
708	detect_mem_freq(i915);
709
710	if (GRAPHICS_VER(i915) < 9 || IS_DG2(i915) || !HAS_DISPLAY(i915))
711		return;
712
713	/*
714	 * Assume level 0 watermark latency adjustment is needed until proven
715	 * otherwise, this w/a is not needed by bxt/glk.
716	 */
717	dram_info->wm_lv_0_adjust_needed = !IS_BROXTON(i915) && !IS_GEMINILAKE(i915);
718
719	if (DISPLAY_VER(i915) >= 14)
720		ret = xelpdp_get_dram_info(i915);
721	else if (GRAPHICS_VER(i915) >= 12)
722		ret = gen12_get_dram_info(i915);
723	else if (GRAPHICS_VER(i915) >= 11)
724		ret = gen11_get_dram_info(i915);
725	else if (IS_BROXTON(i915) || IS_GEMINILAKE(i915))
726		ret = bxt_get_dram_info(i915);
727	else
728		ret = skl_get_dram_info(i915);
729	if (ret)
730		return;
731
732	drm_dbg_kms(&i915->drm, "Num qgv points %u\n", dram_info->num_qgv_points);
733
734	drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
735
736	drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
737		    str_yes_no(dram_info->wm_lv_0_adjust_needed));
738}
739
740static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
741{
742	static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
743	static const u8 sets[4] = { 1, 1, 2, 2 };
744
745	return EDRAM_NUM_BANKS(cap) *
746		ways[EDRAM_WAYS_IDX(cap)] *
747		sets[EDRAM_SETS_IDX(cap)];
748}
749
750void intel_dram_edram_detect(struct drm_i915_private *i915)
751{
752	u32 edram_cap = 0;
753
754	if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || GRAPHICS_VER(i915) >= 9))
755		return;
756
757	edram_cap = intel_uncore_read_fw(&i915->uncore, HSW_EDRAM_CAP);
758
759	/* NB: We can't write IDICR yet because we don't have gt funcs set up */
760
761	if (!(edram_cap & EDRAM_ENABLED))
762		return;
763
764	/*
765	 * The needed capability bits for size calculation are not there with
766	 * pre gen9 so return 128MB always.
767	 */
768	if (GRAPHICS_VER(i915) < 9)
769		i915->edram_size_mb = 128;
770	else
771		i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
772
773	drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
774}