Linux Audio

Check our new training course

Loading...
v5.14.15
 1/* SPDX-License-Identifier: MIT */
 2/*
 3 * Copyright © 2019 Intel Corporation
 4 */
 5
 6#ifndef __INTEL_DE_H__
 7#define __INTEL_DE_H__
 8
 9#include "i915_drv.h"
10#include "i915_reg.h"
11#include "i915_trace.h"
 
12#include "intel_uncore.h"
13
 
 
 
 
 
14static inline u32
15intel_de_read(struct drm_i915_private *i915, i915_reg_t reg)
16{
17	return intel_uncore_read(&i915->uncore, reg);
 
 
 
 
 
 
 
 
18}
 
19
20static inline void
21intel_de_posting_read(struct drm_i915_private *i915, i915_reg_t reg)
22{
23	intel_uncore_posting_read(&i915->uncore, reg);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24}
25
26static inline void
27intel_de_write(struct drm_i915_private *i915, i915_reg_t reg, u32 val)
28{
29	intel_uncore_write(&i915->uncore, reg, val);
 
 
 
 
30}
 
31
32static inline void
33intel_de_rmw(struct drm_i915_private *i915, i915_reg_t reg, u32 clear, u32 set)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
34{
35	intel_uncore_rmw(&i915->uncore, reg, clear, set);
 
36}
37
38static inline int
39intel_de_wait_for_register(struct drm_i915_private *i915, i915_reg_t reg,
40			   u32 mask, u32 value, unsigned int timeout)
41{
42	return intel_wait_for_register(&i915->uncore, reg, mask, value, timeout);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43}
44
45static inline int
46intel_de_wait_for_set(struct drm_i915_private *i915, i915_reg_t reg,
47		      u32 mask, unsigned int timeout)
 
 
48{
49	return intel_de_wait_for_register(i915, reg, mask, mask, timeout);
 
 
 
 
 
 
 
 
 
 
50}
51
52static inline int
53intel_de_wait_for_clear(struct drm_i915_private *i915, i915_reg_t reg,
54			u32 mask, unsigned int timeout)
55{
56	return intel_de_wait_for_register(i915, reg, mask, 0, timeout);
 
 
 
 
 
 
 
 
57}
 
58
59/*
60 * Unlocked mmio-accessors, think carefully before using these.
61 *
62 * Certain architectures will die if the same cacheline is concurrently accessed
63 * by different clients (e.g. on Ivybridge). Access to registers should
64 * therefore generally be serialised, by either the dev_priv->uncore.lock or
65 * a more localised lock guarding all access to that bank of registers.
66 */
67static inline u32
68intel_de_read_fw(struct drm_i915_private *i915, i915_reg_t reg)
69{
70	u32 val;
71
72	val = intel_uncore_read_fw(&i915->uncore, reg);
73	trace_i915_reg_rw(false, reg, val, sizeof(val), true);
74
75	return val;
76}
 
77
78static inline void
79intel_de_write_fw(struct drm_i915_private *i915, i915_reg_t reg, u32 val)
80{
81	trace_i915_reg_rw(true, reg, val, sizeof(val), true);
82	intel_uncore_write_fw(&i915->uncore, reg, val);
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83}
84
85#endif /* __INTEL_DE_H__ */
v6.13.7
  1/* SPDX-License-Identifier: MIT */
  2/*
  3 * Copyright © 2019 Intel Corporation
  4 */
  5
  6#ifndef __INTEL_DE_H__
  7#define __INTEL_DE_H__
  8
  9#include "i915_drv.h"
 
 10#include "i915_trace.h"
 11#include "intel_dsb.h"
 12#include "intel_uncore.h"
 13
 14static inline struct intel_uncore *__to_uncore(struct intel_display *display)
 15{
 16	return &to_i915(display->drm)->uncore;
 17}
 18
 19static inline u32
 20__intel_de_read(struct intel_display *display, i915_reg_t reg)
 21{
 22	u32 val;
 23
 24	intel_dmc_wl_get(display, reg);
 25
 26	val = intel_uncore_read(__to_uncore(display), reg);
 27
 28	intel_dmc_wl_put(display, reg);
 29
 30	return val;
 31}
 32#define intel_de_read(p,...) __intel_de_read(__to_intel_display(p), __VA_ARGS__)
 33
 34static inline u8
 35intel_de_read8(struct intel_display *display, i915_reg_t reg)
 36{
 37	u8 val;
 38
 39	intel_dmc_wl_get(display, reg);
 40
 41	val = intel_uncore_read8(__to_uncore(display), reg);
 42
 43	intel_dmc_wl_put(display, reg);
 44
 45	return val;
 46}
 47
 48static inline u64
 49intel_de_read64_2x32(struct intel_display *display,
 50		     i915_reg_t lower_reg, i915_reg_t upper_reg)
 51{
 52	u64 val;
 53
 54	intel_dmc_wl_get(display, lower_reg);
 55	intel_dmc_wl_get(display, upper_reg);
 56
 57	val = intel_uncore_read64_2x32(__to_uncore(display), lower_reg,
 58				       upper_reg);
 59
 60	intel_dmc_wl_put(display, upper_reg);
 61	intel_dmc_wl_put(display, lower_reg);
 62
 63	return val;
 64}
 65
 66static inline void
 67__intel_de_posting_read(struct intel_display *display, i915_reg_t reg)
 68{
 69	intel_dmc_wl_get(display, reg);
 70
 71	intel_uncore_posting_read(__to_uncore(display), reg);
 72
 73	intel_dmc_wl_put(display, reg);
 74}
 75#define intel_de_posting_read(p,...) __intel_de_posting_read(__to_intel_display(p), __VA_ARGS__)
 76
 77static inline void
 78__intel_de_write(struct intel_display *display, i915_reg_t reg, u32 val)
 79{
 80	intel_dmc_wl_get(display, reg);
 81
 82	intel_uncore_write(__to_uncore(display), reg, val);
 83
 84	intel_dmc_wl_put(display, reg);
 85}
 86#define intel_de_write(p,...) __intel_de_write(__to_intel_display(p), __VA_ARGS__)
 87
 88static inline u32
 89__intel_de_rmw_nowl(struct intel_display *display, i915_reg_t reg,
 90		    u32 clear, u32 set)
 91{
 92	return intel_uncore_rmw(__to_uncore(display), reg, clear, set);
 93}
 94
 95static inline u32
 96__intel_de_rmw(struct intel_display *display, i915_reg_t reg, u32 clear,
 97	       u32 set)
 98{
 99	u32 val;
100
101	intel_dmc_wl_get(display, reg);
102
103	val = __intel_de_rmw_nowl(display, reg, clear, set);
104
105	intel_dmc_wl_put(display, reg);
106
107	return val;
108}
109#define intel_de_rmw(p,...) __intel_de_rmw(__to_intel_display(p), __VA_ARGS__)
110
111static inline int
112__intel_de_wait_for_register_nowl(struct intel_display *display,
113				  i915_reg_t reg,
114				  u32 mask, u32 value, unsigned int timeout)
115{
116	return intel_wait_for_register(__to_uncore(display), reg, mask,
117				       value, timeout);
118}
119
120static inline int
121intel_de_wait(struct intel_display *display, i915_reg_t reg,
122	      u32 mask, u32 value, unsigned int timeout)
123{
124	int ret;
125
126	intel_dmc_wl_get(display, reg);
127
128	ret = __intel_de_wait_for_register_nowl(display, reg, mask, value,
129						timeout);
130
131	intel_dmc_wl_put(display, reg);
132
133	return ret;
134}
135
136static inline int
137intel_de_wait_fw(struct intel_display *display, i915_reg_t reg,
138		 u32 mask, u32 value, unsigned int timeout)
139{
140	int ret;
141
142	intel_dmc_wl_get(display, reg);
143
144	ret = intel_wait_for_register_fw(__to_uncore(display), reg, mask,
145					 value, timeout);
146
147	intel_dmc_wl_put(display, reg);
148
149	return ret;
150}
151
152static inline int
153intel_de_wait_custom(struct intel_display *display, i915_reg_t reg,
154		     u32 mask, u32 value,
155		     unsigned int fast_timeout_us,
156		     unsigned int slow_timeout_ms, u32 *out_value)
157{
158	int ret;
159
160	intel_dmc_wl_get(display, reg);
161
162	ret = __intel_wait_for_register(__to_uncore(display), reg, mask,
163					value,
164					fast_timeout_us, slow_timeout_ms, out_value);
165
166	intel_dmc_wl_put(display, reg);
167
168	return ret;
169}
170
171static inline int
172__intel_de_wait_for_set(struct intel_display *display, i915_reg_t reg,
173			u32 mask, unsigned int timeout)
174{
175	return intel_de_wait(display, reg, mask, mask, timeout);
176}
177#define intel_de_wait_for_set(p,...) __intel_de_wait_for_set(__to_intel_display(p), __VA_ARGS__)
178
179static inline int
180__intel_de_wait_for_clear(struct intel_display *display, i915_reg_t reg,
181			  u32 mask, unsigned int timeout)
182{
183	return intel_de_wait(display, reg, mask, 0, timeout);
184}
185#define intel_de_wait_for_clear(p,...) __intel_de_wait_for_clear(__to_intel_display(p), __VA_ARGS__)
186
187/*
188 * Unlocked mmio-accessors, think carefully before using these.
189 *
190 * Certain architectures will die if the same cacheline is concurrently accessed
191 * by different clients (e.g. on Ivybridge). Access to registers should
192 * therefore generally be serialised, by either the dev_priv->uncore.lock or
193 * a more localised lock guarding all access to that bank of registers.
194 */
195static inline u32
196__intel_de_read_fw(struct intel_display *display, i915_reg_t reg)
197{
198	u32 val;
199
200	val = intel_uncore_read_fw(__to_uncore(display), reg);
201	trace_i915_reg_rw(false, reg, val, sizeof(val), true);
202
203	return val;
204}
205#define intel_de_read_fw(p,...) __intel_de_read_fw(__to_intel_display(p), __VA_ARGS__)
206
207static inline void
208__intel_de_write_fw(struct intel_display *display, i915_reg_t reg, u32 val)
209{
210	trace_i915_reg_rw(true, reg, val, sizeof(val), true);
211	intel_uncore_write_fw(__to_uncore(display), reg, val);
212}
213#define intel_de_write_fw(p,...) __intel_de_write_fw(__to_intel_display(p), __VA_ARGS__)
214
215static inline u32
216intel_de_read_notrace(struct intel_display *display, i915_reg_t reg)
217{
218	return intel_uncore_read_notrace(__to_uncore(display), reg);
219}
220
221static inline void
222intel_de_write_notrace(struct intel_display *display, i915_reg_t reg, u32 val)
223{
224	intel_uncore_write_notrace(__to_uncore(display), reg, val);
225}
226
227static __always_inline void
228intel_de_write_dsb(struct intel_display *display, struct intel_dsb *dsb,
229		   i915_reg_t reg, u32 val)
230{
231	if (dsb)
232		intel_dsb_reg_write(dsb, reg, val);
233	else
234		intel_de_write_fw(display, reg, val);
235}
236
237#endif /* __INTEL_DE_H__ */