Linux Audio

Check our new training course

Loading...
v6.13.7
  1// SPDX-License-Identifier: MIT
  2/*
  3 * Copyright © 2020 Intel Corporation
  4 *
  5 */
  6
  7#include "i915_drv.h"
  8#include "i915_reg.h"
  9#include "intel_de.h"
 10#include "intel_display_types.h"
 11#include "intel_vrr.h"
 12#include "intel_vrr_regs.h"
 13#include "intel_dp.h"
 14
 15#define FIXED_POINT_PRECISION		100
 16#define CMRR_PRECISION_TOLERANCE	10
 17
 18bool intel_vrr_is_capable(struct intel_connector *connector)
 19{
 20	struct intel_display *display = to_intel_display(connector);
 21	const struct drm_display_info *info = &connector->base.display_info;
 
 22	struct intel_dp *intel_dp;
 23
 24	/*
 25	 * DP Sink is capable of VRR video timings if
 26	 * Ignore MSA bit is set in DPCD.
 27	 * EDID monitor range also should be atleast 10 for reasonable
 28	 * Adaptive Sync or Variable Refresh Rate end user experience.
 29	 */
 30	switch (connector->base.connector_type) {
 31	case DRM_MODE_CONNECTOR_eDP:
 32		if (!connector->panel.vbt.vrr)
 33			return false;
 34		fallthrough;
 35	case DRM_MODE_CONNECTOR_DisplayPort:
 36		intel_dp = intel_attached_dp(connector);
 37
 38		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
 39			return false;
 40
 41		break;
 42	default:
 43		return false;
 44	}
 45
 46	return HAS_VRR(display) &&
 47		info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
 48}
 49
 50bool intel_vrr_is_in_range(struct intel_connector *connector, int vrefresh)
 51{
 52	const struct drm_display_info *info = &connector->base.display_info;
 53
 54	return intel_vrr_is_capable(connector) &&
 55		vrefresh >= info->monitor_range.min_vfreq &&
 56		vrefresh <= info->monitor_range.max_vfreq;
 57}
 58
 59bool intel_vrr_possible(const struct intel_crtc_state *crtc_state)
 60{
 61	return crtc_state->vrr.flipline;
 62}
 63
 64void
 65intel_vrr_check_modeset(struct intel_atomic_state *state)
 66{
 67	int i;
 68	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
 69	struct intel_crtc *crtc;
 70
 71	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
 72					    new_crtc_state, i) {
 73		if (new_crtc_state->uapi.vrr_enabled !=
 74		    old_crtc_state->uapi.vrr_enabled)
 75			new_crtc_state->uapi.mode_changed = true;
 76	}
 77}
 78
 79/*
 80 * Without VRR registers get latched at:
 81 *  vblank_start
 82 *
 83 * With VRR the earliest registers can get latched is:
 84 *  intel_vrr_vmin_vblank_start(), which if we want to maintain
 85 *  the correct min vtotal is >=vblank_start+1
 86 *
 87 * The latest point registers can get latched is the vmax decision boundary:
 88 *  intel_vrr_vmax_vblank_start()
 89 *
 90 * Between those two points the vblank exit starts (and hence registers get
 91 * latched) ASAP after a push is sent.
 92 *
 93 * framestart_delay is programmable 1-4.
 94 */
 95static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
 96{
 97	struct intel_display *display = to_intel_display(crtc_state);
 
 98
 99	if (DISPLAY_VER(display) >= 13)
100		return crtc_state->vrr.guardband;
 
101	else
102		/* The hw imposes the extra scanline before frame start */
103		return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
104}
105
106int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
107{
108	/* Min vblank actually determined by flipline that is always >=vmin+1 */
109	return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state);
110}
111
112int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
113{
114	return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state);
115}
116
117static bool
118is_cmrr_frac_required(struct intel_crtc_state *crtc_state)
119{
120	struct intel_display *display = to_intel_display(crtc_state);
121	int calculated_refresh_k, actual_refresh_k, pixel_clock_per_line;
122	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
123
124	if (!HAS_CMRR(display))
125		return false;
126
127	actual_refresh_k =
128		drm_mode_vrefresh(adjusted_mode) * FIXED_POINT_PRECISION;
129	pixel_clock_per_line =
130		adjusted_mode->crtc_clock * 1000 / adjusted_mode->crtc_htotal;
131	calculated_refresh_k =
132		pixel_clock_per_line * FIXED_POINT_PRECISION / adjusted_mode->crtc_vtotal;
133
134	if ((actual_refresh_k - calculated_refresh_k) < CMRR_PRECISION_TOLERANCE)
135		return false;
136
137	return true;
138}
139
140static unsigned int
141cmrr_get_vtotal(struct intel_crtc_state *crtc_state, bool video_mode_required)
142{
143	int multiplier_m = 1, multiplier_n = 1, vtotal, desired_refresh_rate;
144	u64 adjusted_pixel_rate;
145	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
146
147	desired_refresh_rate = drm_mode_vrefresh(adjusted_mode);
148
149	if (video_mode_required) {
150		multiplier_m = 1001;
151		multiplier_n = 1000;
152	}
153
154	crtc_state->cmrr.cmrr_n = mul_u32_u32(desired_refresh_rate * adjusted_mode->crtc_htotal,
155					      multiplier_n);
156	vtotal = DIV_ROUND_UP_ULL(mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_n),
157				  crtc_state->cmrr.cmrr_n);
158	adjusted_pixel_rate = mul_u32_u32(adjusted_mode->crtc_clock * 1000, multiplier_m);
159	crtc_state->cmrr.cmrr_m = do_div(adjusted_pixel_rate, crtc_state->cmrr.cmrr_n);
160
161	return vtotal;
162}
163
164void
165intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
166			 struct drm_connector_state *conn_state)
167{
168	struct intel_display *display = to_intel_display(crtc_state);
 
169	struct intel_connector *connector =
170		to_intel_connector(conn_state->connector);
171	struct intel_dp *intel_dp = intel_attached_dp(connector);
172	bool is_edp = intel_dp_is_edp(intel_dp);
173	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
174	const struct drm_display_info *info = &connector->base.display_info;
175	int vmin, vmax;
176
177	/*
178	 * FIXME all joined pipes share the same transcoder.
179	 * Need to account for that during VRR toggle/push/etc.
180	 */
181	if (crtc_state->joiner_pipes)
182		return;
183
184	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
185		return;
186
187	crtc_state->vrr.in_range =
188		intel_vrr_is_in_range(connector, drm_mode_vrefresh(adjusted_mode));
189	if (!crtc_state->vrr.in_range)
190		return;
191
192	if (HAS_LRR(display))
193		crtc_state->update_lrr = true;
194
195	vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
196			    adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
197	vmax = adjusted_mode->crtc_clock * 1000 /
198		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
199
200	vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
201	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
202
203	if (vmin >= vmax)
204		return;
205
206	/*
207	 * flipline determines the min vblank length the hardware will
208	 * generate, and flipline>=vmin+1, hence we reduce vmin by one
209	 * to make sure we can get the actual min vblank length.
210	 */
211	crtc_state->vrr.vmin = vmin - 1;
212	crtc_state->vrr.vmax = vmax;
 
213
214	crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1;
215
216	/*
217	 * When panel is VRR capable and userspace has
218	 * not enabled adaptive sync mode then Fixed Average
219	 * Vtotal mode should be enabled.
220	 */
221	if (crtc_state->uapi.vrr_enabled) {
222		crtc_state->vrr.enable = true;
223		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
224	} else if (is_cmrr_frac_required(crtc_state) && is_edp) {
225		crtc_state->vrr.enable = true;
226		crtc_state->cmrr.enable = true;
227		/*
228		 * TODO: Compute precise target refresh rate to determine
229		 * if video_mode_required should be true. Currently set to
230		 * false due to uncertainty about the precise target
231		 * refresh Rate.
232		 */
233		crtc_state->vrr.vmax = cmrr_get_vtotal(crtc_state, false);
234		crtc_state->vrr.vmin = crtc_state->vrr.vmax;
235		crtc_state->vrr.flipline = crtc_state->vrr.vmin;
236		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
237	}
238
239	if (intel_dp->as_sdp_supported && crtc_state->vrr.enable) {
240		crtc_state->vrr.vsync_start =
241			(crtc_state->hw.adjusted_mode.crtc_vtotal -
242			 crtc_state->hw.adjusted_mode.vsync_start);
243		crtc_state->vrr.vsync_end =
244			(crtc_state->hw.adjusted_mode.crtc_vtotal -
245			 crtc_state->hw.adjusted_mode.vsync_end);
246	}
247}
248
249void intel_vrr_compute_config_late(struct intel_crtc_state *crtc_state)
250{
251	struct intel_display *display = to_intel_display(crtc_state);
252	const struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
253
254	if (!intel_vrr_possible(crtc_state))
255		return;
256
257	if (DISPLAY_VER(display) >= 13) {
258		crtc_state->vrr.guardband =
259			crtc_state->vrr.vmin + 1 - adjusted_mode->crtc_vblank_start;
260	} else {
 
 
 
 
 
 
 
 
 
 
261		crtc_state->vrr.pipeline_full =
262			min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vblank_start -
263			    crtc_state->framestart_delay - 1);
264	}
265}
266
267static u32 trans_vrr_ctl(const struct intel_crtc_state *crtc_state)
268{
269	struct intel_display *display = to_intel_display(crtc_state);
270
271	if (DISPLAY_VER(display) >= 13)
272		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
273			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
274	else
275		return VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
276			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
277			VRR_CTL_PIPELINE_FULL_OVERRIDE;
278}
279
280void intel_vrr_set_transcoder_timings(const struct intel_crtc_state *crtc_state)
 
281{
282	struct intel_display *display = to_intel_display(crtc_state);
283	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
 
284
285	/*
286	 * This bit seems to have two meanings depending on the platform:
287	 * TGL: generate VRR "safe window" for DSB vblank waits
288	 * ADL/DG2: make TRANS_SET_CONTEXT_LATENCY effective with VRR
289	 */
290	if (IS_DISPLAY_VER(display, 12, 13))
291		intel_de_rmw(display, CHICKEN_TRANS(cpu_transcoder),
292			     0, PIPE_VBLANK_WITH_DELAY);
293
294	if (!intel_vrr_possible(crtc_state)) {
295		intel_de_write(display,
296			       TRANS_VRR_CTL(display, cpu_transcoder), 0);
297		return;
298	}
299
300	if (crtc_state->cmrr.enable) {
301		intel_de_write(display, TRANS_CMRR_M_HI(display, cpu_transcoder),
302			       upper_32_bits(crtc_state->cmrr.cmrr_m));
303		intel_de_write(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
304			       lower_32_bits(crtc_state->cmrr.cmrr_m));
305		intel_de_write(display, TRANS_CMRR_N_HI(display, cpu_transcoder),
306			       upper_32_bits(crtc_state->cmrr.cmrr_n));
307		intel_de_write(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
308			       lower_32_bits(crtc_state->cmrr.cmrr_n));
309	}
310
311	intel_de_write(display, TRANS_VRR_VMIN(display, cpu_transcoder),
312		       crtc_state->vrr.vmin - 1);
313	intel_de_write(display, TRANS_VRR_VMAX(display, cpu_transcoder),
314		       crtc_state->vrr.vmax - 1);
315	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
316		       trans_vrr_ctl(crtc_state));
317	intel_de_write(display, TRANS_VRR_FLIPLINE(display, cpu_transcoder),
318		       crtc_state->vrr.flipline - 1);
319}
320
321void intel_vrr_send_push(const struct intel_crtc_state *crtc_state)
322{
323	struct intel_display *display = to_intel_display(crtc_state);
 
324	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
325
326	if (!crtc_state->vrr.enable)
327		return;
328
329	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
330		       TRANS_PUSH_EN | TRANS_PUSH_SEND);
331}
332
333bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
334{
335	struct intel_display *display = to_intel_display(crtc_state);
 
336	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
337
338	if (!crtc_state->vrr.enable)
339		return false;
340
341	return intel_de_read(display, TRANS_PUSH(display, cpu_transcoder)) & TRANS_PUSH_SEND;
342}
343
344void intel_vrr_enable(const struct intel_crtc_state *crtc_state)
345{
346	struct intel_display *display = to_intel_display(crtc_state);
347	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
348
349	if (!crtc_state->vrr.enable)
350		return;
351
352	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder),
353		       TRANS_PUSH_EN);
354
355	if (HAS_AS_SDP(display))
356		intel_de_write(display,
357			       TRANS_VRR_VSYNC(display, cpu_transcoder),
358			       VRR_VSYNC_END(crtc_state->vrr.vsync_end) |
359			       VRR_VSYNC_START(crtc_state->vrr.vsync_start));
360
361	if (crtc_state->cmrr.enable) {
362		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
363			       VRR_CTL_VRR_ENABLE | VRR_CTL_CMRR_ENABLE |
364			       trans_vrr_ctl(crtc_state));
365	} else {
366		intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
367			       VRR_CTL_VRR_ENABLE | trans_vrr_ctl(crtc_state));
368	}
369}
370
371void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
372{
373	struct intel_display *display = to_intel_display(old_crtc_state);
 
374	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
375
376	if (!old_crtc_state->vrr.enable)
377		return;
378
379	intel_de_write(display, TRANS_VRR_CTL(display, cpu_transcoder),
380		       trans_vrr_ctl(old_crtc_state));
381	intel_de_wait_for_clear(display,
382				TRANS_VRR_STATUS(display, cpu_transcoder),
383				VRR_STATUS_VRR_EN_LIVE, 1000);
384	intel_de_write(display, TRANS_PUSH(display, cpu_transcoder), 0);
385
386	if (HAS_AS_SDP(display))
387		intel_de_write(display,
388			       TRANS_VRR_VSYNC(display, cpu_transcoder), 0);
389}
390
391void intel_vrr_get_config(struct intel_crtc_state *crtc_state)
 
392{
393	struct intel_display *display = to_intel_display(crtc_state);
394	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
395	u32 trans_vrr_ctl, trans_vrr_vsync;
396
397	trans_vrr_ctl = intel_de_read(display,
398				      TRANS_VRR_CTL(display, cpu_transcoder));
399
 
400	crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
401	if (HAS_CMRR(display))
402		crtc_state->cmrr.enable = (trans_vrr_ctl & VRR_CTL_CMRR_ENABLE);
403
404	if (crtc_state->cmrr.enable) {
405		crtc_state->cmrr.cmrr_n =
406			intel_de_read64_2x32(display, TRANS_CMRR_N_LO(display, cpu_transcoder),
407					     TRANS_CMRR_N_HI(display, cpu_transcoder));
408		crtc_state->cmrr.cmrr_m =
409			intel_de_read64_2x32(display, TRANS_CMRR_M_LO(display, cpu_transcoder),
410					     TRANS_CMRR_M_HI(display, cpu_transcoder));
411	}
412
413	if (DISPLAY_VER(display) >= 13)
414		crtc_state->vrr.guardband =
415			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
416	else
417		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
418			crtc_state->vrr.pipeline_full =
419				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
 
 
 
 
420
421	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN) {
422		crtc_state->vrr.flipline = intel_de_read(display,
423							 TRANS_VRR_FLIPLINE(display, cpu_transcoder)) + 1;
424		crtc_state->vrr.vmax = intel_de_read(display,
425						     TRANS_VRR_VMAX(display, cpu_transcoder)) + 1;
426		crtc_state->vrr.vmin = intel_de_read(display,
427						     TRANS_VRR_VMIN(display, cpu_transcoder)) + 1;
428	}
429
430	if (crtc_state->vrr.enable) {
431		crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
432
433		if (HAS_AS_SDP(display)) {
434			trans_vrr_vsync =
435				intel_de_read(display,
436					      TRANS_VRR_VSYNC(display, cpu_transcoder));
437			crtc_state->vrr.vsync_start =
438				REG_FIELD_GET(VRR_VSYNC_START_MASK, trans_vrr_vsync);
439			crtc_state->vrr.vsync_end =
440				REG_FIELD_GET(VRR_VSYNC_END_MASK, trans_vrr_vsync);
441		}
442	}
443}
v6.2
  1// SPDX-License-Identifier: MIT
  2/*
  3 * Copyright © 2020 Intel Corporation
  4 *
  5 */
  6
  7#include "i915_drv.h"
  8#include "i915_reg.h"
  9#include "intel_de.h"
 10#include "intel_display_types.h"
 11#include "intel_vrr.h"
 
 
 
 
 
 12
 13bool intel_vrr_is_capable(struct intel_connector *connector)
 14{
 
 15	const struct drm_display_info *info = &connector->base.display_info;
 16	struct drm_i915_private *i915 = to_i915(connector->base.dev);
 17	struct intel_dp *intel_dp;
 18
 19	/*
 20	 * DP Sink is capable of VRR video timings if
 21	 * Ignore MSA bit is set in DPCD.
 22	 * EDID monitor range also should be atleast 10 for reasonable
 23	 * Adaptive Sync or Variable Refresh Rate end user experience.
 24	 */
 25	switch (connector->base.connector_type) {
 26	case DRM_MODE_CONNECTOR_eDP:
 27		if (!connector->panel.vbt.vrr)
 28			return false;
 29		fallthrough;
 30	case DRM_MODE_CONNECTOR_DisplayPort:
 31		intel_dp = intel_attached_dp(connector);
 32
 33		if (!drm_dp_sink_can_do_video_without_timing_msa(intel_dp->dpcd))
 34			return false;
 35
 36		break;
 37	default:
 38		return false;
 39	}
 40
 41	return HAS_VRR(i915) &&
 42		info->monitor_range.max_vfreq - info->monitor_range.min_vfreq > 10;
 43}
 44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 45void
 46intel_vrr_check_modeset(struct intel_atomic_state *state)
 47{
 48	int i;
 49	struct intel_crtc_state *old_crtc_state, *new_crtc_state;
 50	struct intel_crtc *crtc;
 51
 52	for_each_oldnew_intel_crtc_in_state(state, crtc, old_crtc_state,
 53					    new_crtc_state, i) {
 54		if (new_crtc_state->uapi.vrr_enabled !=
 55		    old_crtc_state->uapi.vrr_enabled)
 56			new_crtc_state->uapi.mode_changed = true;
 57	}
 58}
 59
 60/*
 61 * Without VRR registers get latched at:
 62 *  vblank_start
 63 *
 64 * With VRR the earliest registers can get latched is:
 65 *  intel_vrr_vmin_vblank_start(), which if we want to maintain
 66 *  the correct min vtotal is >=vblank_start+1
 67 *
 68 * The latest point registers can get latched is the vmax decision boundary:
 69 *  intel_vrr_vmax_vblank_start()
 70 *
 71 * Between those two points the vblank exit starts (and hence registers get
 72 * latched) ASAP after a push is sent.
 73 *
 74 * framestart_delay is programmable 1-4.
 75 */
 76static int intel_vrr_vblank_exit_length(const struct intel_crtc_state *crtc_state)
 77{
 78	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
 79	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
 80
 81	/* The hw imposes the extra scanline before frame start */
 82	if (DISPLAY_VER(i915) >= 13)
 83		return crtc_state->vrr.guardband + crtc_state->framestart_delay + 1;
 84	else
 
 85		return crtc_state->vrr.pipeline_full + crtc_state->framestart_delay + 1;
 86}
 87
 88int intel_vrr_vmin_vblank_start(const struct intel_crtc_state *crtc_state)
 89{
 90	/* Min vblank actually determined by flipline that is always >=vmin+1 */
 91	return crtc_state->vrr.vmin + 1 - intel_vrr_vblank_exit_length(crtc_state);
 92}
 93
 94int intel_vrr_vmax_vblank_start(const struct intel_crtc_state *crtc_state)
 95{
 96	return crtc_state->vrr.vmax - intel_vrr_vblank_exit_length(crtc_state);
 97}
 98
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 99void
100intel_vrr_compute_config(struct intel_crtc_state *crtc_state,
101			 struct drm_connector_state *conn_state)
102{
103	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
104	struct drm_i915_private *i915 = to_i915(crtc->base.dev);
105	struct intel_connector *connector =
106		to_intel_connector(conn_state->connector);
 
 
107	struct drm_display_mode *adjusted_mode = &crtc_state->hw.adjusted_mode;
108	const struct drm_display_info *info = &connector->base.display_info;
109	int vmin, vmax;
110
111	if (!intel_vrr_is_capable(connector))
 
 
 
 
112		return;
113
114	if (adjusted_mode->flags & DRM_MODE_FLAG_INTERLACE)
115		return;
116
117	if (!crtc_state->uapi.vrr_enabled)
 
 
118		return;
119
 
 
 
120	vmin = DIV_ROUND_UP(adjusted_mode->crtc_clock * 1000,
121			    adjusted_mode->crtc_htotal * info->monitor_range.max_vfreq);
122	vmax = adjusted_mode->crtc_clock * 1000 /
123		(adjusted_mode->crtc_htotal * info->monitor_range.min_vfreq);
124
125	vmin = max_t(int, vmin, adjusted_mode->crtc_vtotal);
126	vmax = max_t(int, vmax, adjusted_mode->crtc_vtotal);
127
128	if (vmin >= vmax)
129		return;
130
131	/*
132	 * flipline determines the min vblank length the hardware will
133	 * generate, and flipline>=vmin+1, hence we reduce vmin by one
134	 * to make sure we can get the actual min vblank length.
135	 */
136	crtc_state->vrr.vmin = vmin - 1;
137	crtc_state->vrr.vmax = vmax;
138	crtc_state->vrr.enable = true;
139
140	crtc_state->vrr.flipline = crtc_state->vrr.vmin + 1;
141
142	/*
143	 * For XE_LPD+, we use guardband and pipeline override
144	 * is deprecated.
 
145	 */
146	if (DISPLAY_VER(i915) >= 13) {
 
 
 
 
 
147		/*
148		 * FIXME: Subtract Window2 delay from below value.
149		 *
150		 * Window2 specifies time required to program DSB (Window2) in
151		 * number of scan lines. Assuming 0 for no DSB.
152		 */
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153		crtc_state->vrr.guardband =
154			crtc_state->vrr.vmin - adjusted_mode->crtc_vdisplay;
155	} else {
156		/*
157		 * FIXME: s/4/framestart_delay/ to get consistent
158		 * earliest/latest points for register latching regardless
159		 * of the framestart_delay used?
160		 *
161		 * FIXME: this really needs the extra scanline to provide consistent
162		 * behaviour for all framestart_delay values. Otherwise with
163		 * framestart_delay==4 we will end up extending the min vblank by
164		 * one extra line.
165		 */
166		crtc_state->vrr.pipeline_full =
167			min(255, crtc_state->vrr.vmin - adjusted_mode->crtc_vdisplay - 4 - 1);
 
168	}
 
 
 
 
 
169
170	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
 
 
 
 
 
 
171}
172
173void intel_vrr_enable(struct intel_encoder *encoder,
174		      const struct intel_crtc_state *crtc_state)
175{
176	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
177	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
178	u32 trans_vrr_ctl;
179
180	if (!crtc_state->vrr.enable)
 
 
 
 
 
 
 
 
 
 
 
181		return;
 
182
183	if (DISPLAY_VER(dev_priv) >= 13)
184		trans_vrr_ctl = VRR_CTL_VRR_ENABLE |
185			VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
186			XELPD_VRR_CTL_VRR_GUARDBAND(crtc_state->vrr.guardband);
187	else
188		trans_vrr_ctl = VRR_CTL_VRR_ENABLE |
189			VRR_CTL_IGN_MAX_SHIFT | VRR_CTL_FLIP_LINE_EN |
190			VRR_CTL_PIPELINE_FULL(crtc_state->vrr.pipeline_full) |
191			VRR_CTL_PIPELINE_FULL_OVERRIDE;
 
192
193	intel_de_write(dev_priv, TRANS_VRR_VMIN(cpu_transcoder), crtc_state->vrr.vmin - 1);
194	intel_de_write(dev_priv, TRANS_VRR_VMAX(cpu_transcoder), crtc_state->vrr.vmax - 1);
195	intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), trans_vrr_ctl);
196	intel_de_write(dev_priv, TRANS_VRR_FLIPLINE(cpu_transcoder), crtc_state->vrr.flipline - 1);
197	intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder), TRANS_PUSH_EN);
 
 
 
198}
199
200void intel_vrr_send_push(const struct intel_crtc_state *crtc_state)
201{
202	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
203	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
204	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
205
206	if (!crtc_state->vrr.enable)
207		return;
208
209	intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder),
210		       TRANS_PUSH_EN | TRANS_PUSH_SEND);
211}
212
213bool intel_vrr_is_push_sent(const struct intel_crtc_state *crtc_state)
214{
215	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
216	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
217	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
218
219	if (!crtc_state->vrr.enable)
220		return false;
221
222	return intel_de_read(dev_priv, TRANS_PUSH(cpu_transcoder)) & TRANS_PUSH_SEND;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
223}
224
225void intel_vrr_disable(const struct intel_crtc_state *old_crtc_state)
226{
227	struct intel_crtc *crtc = to_intel_crtc(old_crtc_state->uapi.crtc);
228	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
229	enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
230
231	if (!old_crtc_state->vrr.enable)
232		return;
233
234	intel_de_write(dev_priv, TRANS_VRR_CTL(cpu_transcoder), 0);
235	intel_de_write(dev_priv, TRANS_PUSH(cpu_transcoder), 0);
 
 
 
 
 
 
 
 
236}
237
238void intel_vrr_get_config(struct intel_crtc *crtc,
239			  struct intel_crtc_state *crtc_state)
240{
241	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
242	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
243	u32 trans_vrr_ctl;
 
 
 
244
245	trans_vrr_ctl = intel_de_read(dev_priv, TRANS_VRR_CTL(cpu_transcoder));
246	crtc_state->vrr.enable = trans_vrr_ctl & VRR_CTL_VRR_ENABLE;
247	if (!crtc_state->vrr.enable)
248		return;
 
 
 
 
 
 
 
 
 
249
250	if (DISPLAY_VER(dev_priv) >= 13)
251		crtc_state->vrr.guardband =
252			REG_FIELD_GET(XELPD_VRR_CTL_VRR_GUARDBAND_MASK, trans_vrr_ctl);
253	else
254		if (trans_vrr_ctl & VRR_CTL_PIPELINE_FULL_OVERRIDE)
255			crtc_state->vrr.pipeline_full =
256				REG_FIELD_GET(VRR_CTL_PIPELINE_FULL_MASK, trans_vrr_ctl);
257	if (trans_vrr_ctl & VRR_CTL_FLIP_LINE_EN)
258		crtc_state->vrr.flipline = intel_de_read(dev_priv, TRANS_VRR_FLIPLINE(cpu_transcoder)) + 1;
259	crtc_state->vrr.vmax = intel_de_read(dev_priv, TRANS_VRR_VMAX(cpu_transcoder)) + 1;
260	crtc_state->vrr.vmin = intel_de_read(dev_priv, TRANS_VRR_VMIN(cpu_transcoder)) + 1;
261
262	crtc_state->mode_flags |= I915_MODE_FLAG_VRR;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
263}