Linux Audio

Check our new training course

Loading...
v6.2
  1/*
  2 * SPDX-License-Identifier: MIT
  3 *
  4 * Copyright © 2008-2018 Intel Corporation
  5 */
  6
  7#ifndef _I915_GPU_ERROR_H_
  8#define _I915_GPU_ERROR_H_
  9
 10#include <linux/atomic.h>
 11#include <linux/kref.h>
 12#include <linux/ktime.h>
 13#include <linux/sched.h>
 14
 15#include <drm/drm_mm.h>
 16
 17#include "gt/intel_engine.h"
 18#include "gt/intel_gt_types.h"
 19#include "gt/uc/intel_uc_fw.h"
 20
 21#include "intel_device_info.h"
 22
 23#include "i915_gem.h"
 24#include "i915_gem_gtt.h"
 25#include "i915_params.h"
 26#include "i915_scheduler.h"
 27
 28struct drm_i915_private;
 29struct i915_vma_compress;
 30struct intel_engine_capture_vma;
 31struct intel_overlay_error_state;
 
 32
 33struct i915_vma_coredump {
 34	struct i915_vma_coredump *next;
 35
 36	char name[20];
 37
 38	u64 gtt_offset;
 39	u64 gtt_size;
 40	u32 gtt_page_sizes;
 41
 
 
 42	int unused;
 43	struct list_head page_list;
 44};
 45
 46struct i915_request_coredump {
 47	unsigned long flags;
 48	pid_t pid;
 49	u32 context;
 50	u32 seqno;
 51	u32 head;
 52	u32 tail;
 53	struct i915_sched_attr sched_attr;
 54};
 55
 56struct __guc_capture_parsed_output;
 57
 58struct intel_engine_coredump {
 59	const struct intel_engine_cs *engine;
 60
 61	bool hung;
 62	bool simulated;
 63	u32 reset_count;
 64
 65	/* position of active request inside the ring */
 66	u32 rq_head, rq_post, rq_tail;
 67
 68	/* Register state */
 69	u32 ccid;
 70	u32 start;
 71	u32 tail;
 72	u32 head;
 73	u32 ctl;
 74	u32 mode;
 75	u32 hws;
 76	u32 ipeir;
 77	u32 ipehr;
 78	u32 esr;
 79	u32 bbstate;
 80	u32 instpm;
 81	u32 instps;
 82	u64 bbaddr;
 83	u64 acthd;
 84	u32 fault_reg;
 85	u64 faddr;
 86	u32 rc_psmi; /* sleep state */
 87	u32 nopid;
 88	u32 excc;
 89	u32 cmd_cctl;
 90	u32 cscmdop;
 91	u32 ctx_sr_ctl;
 92	u32 dma_faddr_hi;
 93	u32 dma_faddr_lo;
 94	struct intel_instdone instdone;
 95
 96	/* GuC matched capture-lists info */
 97	struct intel_guc_state_capture *capture;
 98	struct __guc_capture_parsed_output *guc_capture_node;
 99
100	struct i915_gem_context_coredump {
101		char comm[TASK_COMM_LEN];
102
103		u64 total_runtime;
104		u64 avg_runtime;
105
106		pid_t pid;
107		int active;
108		int guilty;
109		struct i915_sched_attr sched_attr;
110	} context;
111
112	struct i915_vma_coredump *vma;
113
114	struct i915_request_coredump execlist[EXECLIST_MAX_PORTS];
115	unsigned int num_ports;
116
117	struct {
118		u32 gfx_mode;
119		union {
120			u64 pdp[4];
121			u32 pp_dir_base;
122		};
123	} vm_info;
124
125	struct intel_engine_coredump *next;
126};
127
128struct intel_ctb_coredump {
129	u32 raw_head, head;
130	u32 raw_tail, tail;
131	u32 raw_status;
132	u32 desc_offset;
133	u32 cmds_offset;
134	u32 size;
135};
136
137struct intel_gt_coredump {
138	const struct intel_gt *_gt;
139	bool awake;
140	bool simulated;
141
142	struct intel_gt_info info;
143
144	/* Generic register state */
145	u32 eir;
146	u32 pgtbl_er;
147	u32 ier;
148	u32 gtier[6], ngtier;
 
149	u32 forcewake;
150	u32 error; /* gen6+ */
151	u32 err_int; /* gen7 */
152	u32 fault_data0; /* gen8, gen9 */
153	u32 fault_data1; /* gen8, gen9 */
154	u32 done_reg;
155	u32 gac_eco;
156	u32 gam_ecochk;
157	u32 gab_ctl;
158	u32 gfx_mode;
159	u32 gtt_cache;
160	u32 aux_err; /* gen12 */
 
161	u32 gam_done; /* gen12 */
162	u32 clock_frequency;
163	u32 clock_period_ns;
164
165	/* Display related */
166	u32 derrmr;
167	u32 sfc_done[I915_MAX_SFC]; /* gen12 */
168
169	u32 nfence;
170	u64 fence[I915_MAX_NUM_FENCES];
171
172	struct intel_engine_coredump *engine;
173
174	struct intel_uc_coredump {
175		struct intel_uc_fw guc_fw;
176		struct intel_uc_fw huc_fw;
177		struct guc_info {
178			struct intel_ctb_coredump ctb[2];
179			struct i915_vma_coredump *vma_ctb;
180			struct i915_vma_coredump *vma_log;
181			u32 timestamp;
182			u16 last_fence;
183			bool is_guc_capture;
184		} guc;
185	} *uc;
186
187	struct intel_gt_coredump *next;
188};
189
190struct i915_gpu_coredump {
191	struct kref ref;
192	ktime_t time;
193	ktime_t boottime;
194	ktime_t uptime;
195	unsigned long capture;
196
197	struct drm_i915_private *i915;
198
199	struct intel_gt_coredump *gt;
200
201	char error_msg[128];
202	bool simulated;
203	bool wakelock;
204	bool suspended;
205	int iommu;
206	u32 reset_count;
207	u32 suspend_count;
208
209	struct intel_device_info device_info;
210	struct intel_runtime_info runtime_info;
211	struct intel_driver_caps driver_caps;
212	struct i915_params params;
213
214	struct intel_overlay_error_state *overlay;
 
215
216	struct scatterlist *sgl, *fit;
217};
218
219struct i915_gpu_error {
220	/* For reset and error_state handling. */
221	spinlock_t lock;
222	/* Protected by the above dev->gpu_error.lock. */
223	struct i915_gpu_coredump *first_error;
224
225	atomic_t pending_fb_pin;
226
227	/** Number of times the device has been reset (global) */
228	atomic_t reset_count;
229
230	/** Number of times an engine has been reset */
231	atomic_t reset_engine_count[I915_NUM_ENGINES];
232};
233
234struct drm_i915_error_state_buf {
235	struct drm_i915_private *i915;
236	struct scatterlist *sgl, *cur, *end;
237
238	char *buf;
239	size_t bytes;
240	size_t size;
241	loff_t iter;
242
243	int err;
244};
245
246static inline u32 i915_reset_count(struct i915_gpu_error *error)
247{
248	return atomic_read(&error->reset_count);
249}
250
251static inline u32 i915_reset_engine_count(struct i915_gpu_error *error,
252					  const struct intel_engine_cs *engine)
253{
254	return atomic_read(&error->reset_engine_count[engine->uabi_class]);
255}
256
257#define CORE_DUMP_FLAG_NONE           0x0
258#define CORE_DUMP_FLAG_IS_GUC_CAPTURE BIT(0)
259
260#if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
261
262__printf(2, 3)
263void i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...);
264void intel_gpu_error_print_vma(struct drm_i915_error_state_buf *m,
265			       const struct intel_engine_cs *engine,
266			       const struct i915_vma_coredump *vma);
267struct i915_vma_coredump *
268intel_gpu_error_find_batch(const struct intel_engine_coredump *ee);
269
270struct i915_gpu_coredump *i915_gpu_coredump(struct intel_gt *gt,
271					    intel_engine_mask_t engine_mask, u32 dump_flags);
272void i915_capture_error_state(struct intel_gt *gt,
273			      intel_engine_mask_t engine_mask, u32 dump_flags);
274
275struct i915_gpu_coredump *
276i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp);
277
278struct intel_gt_coredump *
279intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags);
280
281struct intel_engine_coredump *
282intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags);
283
284struct intel_engine_capture_vma *
285intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
286				  struct i915_request *rq,
287				  gfp_t gfp);
288
289void intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
290				   struct intel_engine_capture_vma *capture,
291				   struct i915_vma_compress *compress);
292
293struct i915_vma_compress *
294i915_vma_capture_prepare(struct intel_gt_coredump *gt);
295
296void i915_vma_capture_finish(struct intel_gt_coredump *gt,
297			     struct i915_vma_compress *compress);
298
299void i915_error_state_store(struct i915_gpu_coredump *error);
300
301static inline struct i915_gpu_coredump *
302i915_gpu_coredump_get(struct i915_gpu_coredump *gpu)
303{
304	kref_get(&gpu->ref);
305	return gpu;
306}
307
308ssize_t
309i915_gpu_coredump_copy_to_buffer(struct i915_gpu_coredump *error,
310				 char *buf, loff_t offset, size_t count);
311
312void __i915_gpu_coredump_free(struct kref *kref);
313static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
314{
315	if (gpu)
316		kref_put(&gpu->ref, __i915_gpu_coredump_free);
317}
318
319struct i915_gpu_coredump *i915_first_error_state(struct drm_i915_private *i915);
320void i915_reset_error_state(struct drm_i915_private *i915);
321void i915_disable_error_state(struct drm_i915_private *i915, int err);
322
323#else
324
325__printf(2, 3)
326static inline void
327i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...)
328{
329}
330
331static inline void
332i915_capture_error_state(struct intel_gt *gt, intel_engine_mask_t engine_mask, u32 dump_flags)
333{
334}
335
336static inline struct i915_gpu_coredump *
337i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp)
338{
339	return NULL;
340}
341
342static inline struct intel_gt_coredump *
343intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp, u32 dump_flags)
344{
345	return NULL;
346}
347
348static inline struct intel_engine_coredump *
349intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp, u32 dump_flags)
350{
351	return NULL;
352}
353
354static inline struct intel_engine_capture_vma *
355intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
356				  struct i915_request *rq,
357				  gfp_t gfp)
358{
359	return NULL;
360}
361
362static inline void
363intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
364			      struct intel_engine_capture_vma *capture,
365			      struct i915_vma_compress *compress)
366{
367}
368
369static inline struct i915_vma_compress *
370i915_vma_capture_prepare(struct intel_gt_coredump *gt)
371{
372	return NULL;
373}
374
375static inline void
376i915_vma_capture_finish(struct intel_gt_coredump *gt,
377			struct i915_vma_compress *compress)
378{
379}
380
381static inline void
382i915_error_state_store(struct i915_gpu_coredump *error)
383{
384}
385
386static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
387{
388}
389
390static inline struct i915_gpu_coredump *
391i915_first_error_state(struct drm_i915_private *i915)
392{
393	return ERR_PTR(-ENODEV);
394}
395
396static inline void i915_reset_error_state(struct drm_i915_private *i915)
397{
398}
399
400static inline void i915_disable_error_state(struct drm_i915_private *i915,
401					    int err)
402{
403}
404
405#endif /* IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR) */
406
407#endif /* _I915_GPU_ERROR_H_ */
v5.9
  1/*
  2 * SPDX-License-Identifier: MIT
  3 *
  4 * Copyright � 2008-2018 Intel Corporation
  5 */
  6
  7#ifndef _I915_GPU_ERROR_H_
  8#define _I915_GPU_ERROR_H_
  9
 10#include <linux/atomic.h>
 11#include <linux/kref.h>
 12#include <linux/ktime.h>
 13#include <linux/sched.h>
 14
 15#include <drm/drm_mm.h>
 16
 17#include "gt/intel_engine.h"
 18#include "gt/intel_gt_types.h"
 19#include "gt/uc/intel_uc_fw.h"
 20
 21#include "intel_device_info.h"
 22
 23#include "i915_gem.h"
 24#include "i915_gem_gtt.h"
 25#include "i915_params.h"
 26#include "i915_scheduler.h"
 27
 28struct drm_i915_private;
 29struct i915_vma_compress;
 30struct intel_engine_capture_vma;
 31struct intel_overlay_error_state;
 32struct intel_display_error_state;
 33
 34struct i915_vma_coredump {
 35	struct i915_vma_coredump *next;
 36
 37	char name[20];
 38
 39	u64 gtt_offset;
 40	u64 gtt_size;
 41	u32 gtt_page_sizes;
 42
 43	int num_pages;
 44	int page_count;
 45	int unused;
 46	u32 *pages[];
 47};
 48
 49struct i915_request_coredump {
 50	unsigned long flags;
 51	pid_t pid;
 52	u32 context;
 53	u32 seqno;
 54	u32 head;
 55	u32 tail;
 56	struct i915_sched_attr sched_attr;
 57};
 58
 
 
 59struct intel_engine_coredump {
 60	const struct intel_engine_cs *engine;
 61
 
 62	bool simulated;
 63	u32 reset_count;
 64
 65	/* position of active request inside the ring */
 66	u32 rq_head, rq_post, rq_tail;
 67
 68	/* Register state */
 69	u32 ccid;
 70	u32 start;
 71	u32 tail;
 72	u32 head;
 73	u32 ctl;
 74	u32 mode;
 75	u32 hws;
 76	u32 ipeir;
 77	u32 ipehr;
 78	u32 esr;
 79	u32 bbstate;
 80	u32 instpm;
 81	u32 instps;
 82	u64 bbaddr;
 83	u64 acthd;
 84	u32 fault_reg;
 85	u64 faddr;
 86	u32 rc_psmi; /* sleep state */
 
 
 
 
 
 
 
 87	struct intel_instdone instdone;
 88
 
 
 
 
 89	struct i915_gem_context_coredump {
 90		char comm[TASK_COMM_LEN];
 91
 92		u64 total_runtime;
 93		u32 avg_runtime;
 94
 95		pid_t pid;
 96		int active;
 97		int guilty;
 98		struct i915_sched_attr sched_attr;
 99	} context;
100
101	struct i915_vma_coredump *vma;
102
103	struct i915_request_coredump execlist[EXECLIST_MAX_PORTS];
104	unsigned int num_ports;
105
106	struct {
107		u32 gfx_mode;
108		union {
109			u64 pdp[4];
110			u32 pp_dir_base;
111		};
112	} vm_info;
113
114	struct intel_engine_coredump *next;
115};
116
 
 
 
 
 
 
 
 
 
117struct intel_gt_coredump {
118	const struct intel_gt *_gt;
119	bool awake;
120	bool simulated;
121
122	struct intel_gt_info info;
123
124	/* Generic register state */
125	u32 eir;
126	u32 pgtbl_er;
127	u32 ier;
128	u32 gtier[6], ngtier;
129	u32 derrmr;
130	u32 forcewake;
131	u32 error; /* gen6+ */
132	u32 err_int; /* gen7 */
133	u32 fault_data0; /* gen8, gen9 */
134	u32 fault_data1; /* gen8, gen9 */
135	u32 done_reg;
136	u32 gac_eco;
137	u32 gam_ecochk;
138	u32 gab_ctl;
139	u32 gfx_mode;
140	u32 gtt_cache;
141	u32 aux_err; /* gen12 */
142	u32 sfc_done[GEN12_SFC_DONE_MAX]; /* gen12 */
143	u32 gam_done; /* gen12 */
 
 
 
 
 
 
144
145	u32 nfence;
146	u64 fence[I915_MAX_NUM_FENCES];
147
148	struct intel_engine_coredump *engine;
149
150	struct intel_uc_coredump {
151		struct intel_uc_fw guc_fw;
152		struct intel_uc_fw huc_fw;
153		struct i915_vma_coredump *guc_log;
 
 
 
 
 
 
 
154	} *uc;
155
156	struct intel_gt_coredump *next;
157};
158
159struct i915_gpu_coredump {
160	struct kref ref;
161	ktime_t time;
162	ktime_t boottime;
163	ktime_t uptime;
164	unsigned long capture;
165
166	struct drm_i915_private *i915;
167
168	struct intel_gt_coredump *gt;
169
170	char error_msg[128];
171	bool simulated;
172	bool wakelock;
173	bool suspended;
174	int iommu;
175	u32 reset_count;
176	u32 suspend_count;
177
178	struct intel_device_info device_info;
179	struct intel_runtime_info runtime_info;
180	struct intel_driver_caps driver_caps;
181	struct i915_params params;
182
183	struct intel_overlay_error_state *overlay;
184	struct intel_display_error_state *display;
185
186	struct scatterlist *sgl, *fit;
187};
188
189struct i915_gpu_error {
190	/* For reset and error_state handling. */
191	spinlock_t lock;
192	/* Protected by the above dev->gpu_error.lock. */
193	struct i915_gpu_coredump *first_error;
194
195	atomic_t pending_fb_pin;
196
197	/** Number of times the device has been reset (global) */
198	atomic_t reset_count;
199
200	/** Number of times an engine has been reset */
201	atomic_t reset_engine_count[I915_NUM_ENGINES];
202};
203
204struct drm_i915_error_state_buf {
205	struct drm_i915_private *i915;
206	struct scatterlist *sgl, *cur, *end;
207
208	char *buf;
209	size_t bytes;
210	size_t size;
211	loff_t iter;
212
213	int err;
214};
215
 
 
 
 
 
 
 
 
 
 
 
 
 
 
216#if IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR)
217
218__printf(2, 3)
219void i915_error_printf(struct drm_i915_error_state_buf *e, const char *f, ...);
220
221struct i915_gpu_coredump *i915_gpu_coredump(struct drm_i915_private *i915);
222void i915_capture_error_state(struct drm_i915_private *i915);
 
 
 
 
 
 
 
223
224struct i915_gpu_coredump *
225i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp);
226
227struct intel_gt_coredump *
228intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp);
229
230struct intel_engine_coredump *
231intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp);
232
233struct intel_engine_capture_vma *
234intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
235				  struct i915_request *rq,
236				  gfp_t gfp);
237
238void intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
239				   struct intel_engine_capture_vma *capture,
240				   struct i915_vma_compress *compress);
241
242struct i915_vma_compress *
243i915_vma_capture_prepare(struct intel_gt_coredump *gt);
244
245void i915_vma_capture_finish(struct intel_gt_coredump *gt,
246			     struct i915_vma_compress *compress);
247
248void i915_error_state_store(struct i915_gpu_coredump *error);
249
250static inline struct i915_gpu_coredump *
251i915_gpu_coredump_get(struct i915_gpu_coredump *gpu)
252{
253	kref_get(&gpu->ref);
254	return gpu;
255}
256
257ssize_t
258i915_gpu_coredump_copy_to_buffer(struct i915_gpu_coredump *error,
259				 char *buf, loff_t offset, size_t count);
260
261void __i915_gpu_coredump_free(struct kref *kref);
262static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
263{
264	if (gpu)
265		kref_put(&gpu->ref, __i915_gpu_coredump_free);
266}
267
268struct i915_gpu_coredump *i915_first_error_state(struct drm_i915_private *i915);
269void i915_reset_error_state(struct drm_i915_private *i915);
270void i915_disable_error_state(struct drm_i915_private *i915, int err);
271
272#else
273
274static inline void i915_capture_error_state(struct drm_i915_private *i915)
 
 
 
 
 
 
 
275{
276}
277
278static inline struct i915_gpu_coredump *
279i915_gpu_coredump_alloc(struct drm_i915_private *i915, gfp_t gfp)
280{
281	return NULL;
282}
283
284static inline struct intel_gt_coredump *
285intel_gt_coredump_alloc(struct intel_gt *gt, gfp_t gfp)
286{
287	return NULL;
288}
289
290static inline struct intel_engine_coredump *
291intel_engine_coredump_alloc(struct intel_engine_cs *engine, gfp_t gfp)
292{
293	return NULL;
294}
295
296static inline struct intel_engine_capture_vma *
297intel_engine_coredump_add_request(struct intel_engine_coredump *ee,
298				  struct i915_request *rq,
299				  gfp_t gfp)
300{
301	return NULL;
302}
303
304static inline void
305intel_engine_coredump_add_vma(struct intel_engine_coredump *ee,
306			      struct intel_engine_capture_vma *capture,
307			      struct i915_vma_compress *compress)
308{
309}
310
311static inline struct i915_vma_compress *
312i915_vma_capture_prepare(struct intel_gt_coredump *gt)
313{
314	return NULL;
315}
316
317static inline void
318i915_vma_capture_finish(struct intel_gt_coredump *gt,
319			struct i915_vma_compress *compress)
320{
321}
322
323static inline void
324i915_error_state_store(struct i915_gpu_coredump *error)
325{
326}
327
328static inline void i915_gpu_coredump_put(struct i915_gpu_coredump *gpu)
329{
330}
331
332static inline struct i915_gpu_coredump *
333i915_first_error_state(struct drm_i915_private *i915)
334{
335	return ERR_PTR(-ENODEV);
336}
337
338static inline void i915_reset_error_state(struct drm_i915_private *i915)
339{
340}
341
342static inline void i915_disable_error_state(struct drm_i915_private *i915,
343					    int err)
344{
345}
346
347#endif /* IS_ENABLED(CONFIG_DRM_I915_CAPTURE_ERROR) */
348
349#endif /* _I915_GPU_ERROR_H_ */