Linux Audio

Check our new training course

Loading...
v4.6
 
  1#ifndef __NOUVEAU_BO_H__
  2#define __NOUVEAU_BO_H__
  3
  4#include <drm/drm_gem.h>
 
 
  5
  6struct nouveau_channel;
 
 
  7struct nouveau_fence;
  8struct nvkm_vma;
  9
 10struct nouveau_bo {
 11	struct ttm_buffer_object bo;
 12	struct ttm_placement placement;
 13	u32 valid_domains;
 14	struct ttm_place placements[3];
 15	struct ttm_place busy_placements[3];
 16	bool force_coherent;
 17	struct ttm_bo_kmap_obj kmap;
 18	struct list_head head;
 
 19
 20	/* protected by ttm_bo_reserve() */
 21	struct drm_file *reserved_by;
 22	struct list_head entry;
 23	int pbbo_index;
 24	bool validate_mapped;
 25
 26	struct list_head vma_list;
 27	unsigned page_shift;
 
 
 
 28
 29	u32 tile_mode;
 30	u32 tile_flags;
 31	struct nouveau_drm_tile *tile;
 32
 33	/* Only valid if allocated via nouveau_gem_new() and iff you hold a
 34	 * gem reference to it! For debugging, use gem.filp != NULL to test
 35	 * whether it is valid. */
 36	struct drm_gem_object gem;
 37
 38	/* protect by the ttm reservation lock */
 39	int pin_refcnt;
 
 
 
 
 40
 41	struct ttm_bo_kmap_obj dma_buf_vmap;
 42};
 43
 44static inline struct nouveau_bo *
 45nouveau_bo(struct ttm_buffer_object *bo)
 46{
 47	return container_of(bo, struct nouveau_bo, bo);
 48}
 49
 50static inline int
 51nouveau_bo_ref(struct nouveau_bo *ref, struct nouveau_bo **pnvbo)
 52{
 53	struct nouveau_bo *prev;
 54
 55	if (!pnvbo)
 56		return -EINVAL;
 57	prev = *pnvbo;
 58
 59	*pnvbo = ref ? nouveau_bo(ttm_bo_reference(&ref->bo)) : NULL;
 60	if (prev) {
 61		struct ttm_buffer_object *bo = &prev->bo;
 62
 63		ttm_bo_unref(&bo);
 64	}
 
 
 65
 66	return 0;
 67}
 68
 69extern struct ttm_bo_driver nouveau_bo_driver;
 70
 71void nouveau_bo_move_init(struct nouveau_drm *);
 72int  nouveau_bo_new(struct drm_device *, int size, int align, u32 flags,
 
 
 
 
 73		    u32 tile_mode, u32 tile_flags, struct sg_table *sg,
 74		    struct reservation_object *robj,
 75		    struct nouveau_bo **);
 76int  nouveau_bo_pin(struct nouveau_bo *, u32 flags, bool contig);
 77int  nouveau_bo_unpin(struct nouveau_bo *);
 78int  nouveau_bo_map(struct nouveau_bo *);
 79void nouveau_bo_unmap(struct nouveau_bo *);
 80void nouveau_bo_placement_set(struct nouveau_bo *, u32 type, u32 busy);
 81void nouveau_bo_wr16(struct nouveau_bo *, unsigned index, u16 val);
 82u32  nouveau_bo_rd32(struct nouveau_bo *, unsigned index);
 83void nouveau_bo_wr32(struct nouveau_bo *, unsigned index, u32 val);
 
 84void nouveau_bo_fence(struct nouveau_bo *, struct nouveau_fence *, bool exclusive);
 85int  nouveau_bo_validate(struct nouveau_bo *, bool interruptible,
 86			 bool no_wait_gpu);
 87void nouveau_bo_sync_for_device(struct nouveau_bo *nvbo);
 88void nouveau_bo_sync_for_cpu(struct nouveau_bo *nvbo);
 89
 90struct nvkm_vma *
 91nouveau_bo_vma_find(struct nouveau_bo *, struct nvkm_vm *);
 92
 93int  nouveau_bo_vma_add(struct nouveau_bo *, struct nvkm_vm *,
 94			struct nvkm_vma *);
 95void nouveau_bo_vma_del(struct nouveau_bo *, struct nvkm_vma *);
 96
 97/* TODO: submit equivalent to TTM generic API upstream? */
 98static inline void __iomem *
 99nvbo_kmap_obj_iovirtual(struct nouveau_bo *nvbo)
100{
101	bool is_iomem;
102	void __iomem *ioptr = (void __force __iomem *)ttm_kmap_obj_virtual(
103						&nvbo->kmap, &is_iomem);
104	WARN_ON_ONCE(ioptr && !is_iomem);
105	return ioptr;
106}
107
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
108#endif
v6.8
  1/* SPDX-License-Identifier: MIT */
  2#ifndef __NOUVEAU_BO_H__
  3#define __NOUVEAU_BO_H__
 
  4#include <drm/drm_gem.h>
  5#include <drm/ttm/ttm_bo.h>
  6#include <drm/ttm/ttm_placement.h>
  7
  8struct nouveau_channel;
  9struct nouveau_cli;
 10struct nouveau_drm;
 11struct nouveau_fence;
 
 12
 13struct nouveau_bo {
 14	struct ttm_buffer_object bo;
 15	struct ttm_placement placement;
 16	u32 valid_domains;
 17	struct ttm_place placements[3];
 18	struct ttm_place busy_placements[3];
 19	bool force_coherent;
 20	struct ttm_bo_kmap_obj kmap;
 21	struct list_head head;
 22	struct list_head io_reserve_lru;
 23
 24	/* protected by ttm_bo_reserve() */
 25	struct drm_file *reserved_by;
 26	struct list_head entry;
 27	int pbbo_index;
 28	bool validate_mapped;
 29
 30	/* Root GEM object we derive the dma_resv of in case this BO is not
 31	 * shared between VMs.
 32	 */
 33	struct drm_gem_object *r_obj;
 34	bool no_share;
 35
 36	/* GPU address space is independent of CPU word size */
 37	uint64_t offset;
 
 38
 39	struct list_head vma_list;
 
 
 
 40
 41	unsigned contig:1;
 42	unsigned page:5;
 43	unsigned kind:8;
 44	unsigned comp:3;
 45	unsigned zeta:3;
 46	unsigned mode;
 47
 48	struct nouveau_drm_tile *tile;
 49};
 50
 51static inline struct nouveau_bo *
 52nouveau_bo(struct ttm_buffer_object *bo)
 53{
 54	return container_of(bo, struct nouveau_bo, bo);
 55}
 56
 57static inline int
 58nouveau_bo_ref(struct nouveau_bo *ref, struct nouveau_bo **pnvbo)
 59{
 60	struct nouveau_bo *prev;
 61
 62	if (!pnvbo)
 63		return -EINVAL;
 64	prev = *pnvbo;
 65
 66	if (ref) {
 67		ttm_bo_get(&ref->bo);
 68		*pnvbo = nouveau_bo(&ref->bo);
 69	} else {
 70		*pnvbo = NULL;
 71	}
 72	if (prev)
 73		ttm_bo_put(&prev->bo);
 74
 75	return 0;
 76}
 77
 78extern struct ttm_device_funcs nouveau_bo_driver;
 79
 80void nouveau_bo_move_init(struct nouveau_drm *);
 81struct nouveau_bo *nouveau_bo_alloc(struct nouveau_cli *, u64 *size, int *align,
 82				    u32 domain, u32 tile_mode, u32 tile_flags, bool internal);
 83int  nouveau_bo_init(struct nouveau_bo *, u64 size, int align, u32 domain,
 84		     struct sg_table *sg, struct dma_resv *robj);
 85int  nouveau_bo_new(struct nouveau_cli *, u64 size, int align, u32 domain,
 86		    u32 tile_mode, u32 tile_flags, struct sg_table *sg,
 87		    struct dma_resv *robj,
 88		    struct nouveau_bo **);
 89int  nouveau_bo_pin(struct nouveau_bo *, u32 flags, bool contig);
 90int  nouveau_bo_unpin(struct nouveau_bo *);
 91int  nouveau_bo_map(struct nouveau_bo *);
 92void nouveau_bo_unmap(struct nouveau_bo *);
 93void nouveau_bo_placement_set(struct nouveau_bo *, u32 type, u32 busy);
 94void nouveau_bo_wr16(struct nouveau_bo *, unsigned index, u16 val);
 95u32  nouveau_bo_rd32(struct nouveau_bo *, unsigned index);
 96void nouveau_bo_wr32(struct nouveau_bo *, unsigned index, u32 val);
 97vm_fault_t nouveau_ttm_fault_reserve_notify(struct ttm_buffer_object *bo);
 98void nouveau_bo_fence(struct nouveau_bo *, struct nouveau_fence *, bool exclusive);
 99int  nouveau_bo_validate(struct nouveau_bo *, bool interruptible,
100			 bool no_wait_gpu);
101void nouveau_bo_sync_for_device(struct nouveau_bo *nvbo);
102void nouveau_bo_sync_for_cpu(struct nouveau_bo *nvbo);
103void nouveau_bo_add_io_reserve_lru(struct ttm_buffer_object *bo);
104void nouveau_bo_del_io_reserve_lru(struct ttm_buffer_object *bo);
 
 
 
 
 
105
106/* TODO: submit equivalent to TTM generic API upstream? */
107static inline void __iomem *
108nvbo_kmap_obj_iovirtual(struct nouveau_bo *nvbo)
109{
110	bool is_iomem;
111	void __iomem *ioptr = (void __force __iomem *)ttm_kmap_obj_virtual(
112						&nvbo->kmap, &is_iomem);
113	WARN_ON_ONCE(ioptr && !is_iomem);
114	return ioptr;
115}
116
117static inline void
118nouveau_bo_unmap_unpin_unref(struct nouveau_bo **pnvbo)
119{
120	if (*pnvbo) {
121		nouveau_bo_unmap(*pnvbo);
122		nouveau_bo_unpin(*pnvbo);
123		nouveau_bo_ref(NULL, pnvbo);
124	}
125}
126
127static inline int
128nouveau_bo_new_pin_map(struct nouveau_cli *cli, u64 size, int align, u32 domain,
129		       struct nouveau_bo **pnvbo)
130{
131	int ret = nouveau_bo_new(cli, size, align, domain,
132				 0, 0, NULL, NULL, pnvbo);
133	if (ret == 0) {
134		ret = nouveau_bo_pin(*pnvbo, domain, true);
135		if (ret == 0) {
136			ret = nouveau_bo_map(*pnvbo);
137			if (ret == 0)
138				return ret;
139			nouveau_bo_unpin(*pnvbo);
140		}
141		nouveau_bo_ref(NULL, pnvbo);
142	}
143	return ret;
144}
145
146int nv04_bo_move_init(struct nouveau_channel *, u32);
147int nv04_bo_move_m2mf(struct nouveau_channel *, struct ttm_buffer_object *,
148		      struct ttm_resource *, struct ttm_resource *);
149
150int nv50_bo_move_init(struct nouveau_channel *, u32);
151int nv50_bo_move_m2mf(struct nouveau_channel *, struct ttm_buffer_object *,
152		      struct ttm_resource *, struct ttm_resource *);
153
154int nv84_bo_move_exec(struct nouveau_channel *, struct ttm_buffer_object *,
155		      struct ttm_resource *, struct ttm_resource *);
156
157int nva3_bo_move_copy(struct nouveau_channel *, struct ttm_buffer_object *,
158		      struct ttm_resource *, struct ttm_resource *);
159
160int nvc0_bo_move_init(struct nouveau_channel *, u32);
161int nvc0_bo_move_m2mf(struct nouveau_channel *, struct ttm_buffer_object *,
162		      struct ttm_resource *, struct ttm_resource *);
163
164int nvc0_bo_move_copy(struct nouveau_channel *, struct ttm_buffer_object *,
165		      struct ttm_resource *, struct ttm_resource *);
166
167int nve0_bo_move_init(struct nouveau_channel *, u32);
168int nve0_bo_move_copy(struct nouveau_channel *, struct ttm_buffer_object *,
169		      struct ttm_resource *, struct ttm_resource *);
170
171#define NVBO_WR32_(b,o,dr,f) nouveau_bo_wr32((b), (o)/4 + (dr), (f))
172#define NVBO_RD32_(b,o,dr)   nouveau_bo_rd32((b), (o)/4 + (dr))
173#define NVBO_RD32(A...) DRF_RD(NVBO_RD32_,                  ##A)
174#define NVBO_RV32(A...) DRF_RV(NVBO_RD32_,                  ##A)
175#define NVBO_TV32(A...) DRF_TV(NVBO_RD32_,                  ##A)
176#define NVBO_TD32(A...) DRF_TD(NVBO_RD32_,                  ##A)
177#define NVBO_WR32(A...) DRF_WR(            NVBO_WR32_,      ##A)
178#define NVBO_WV32(A...) DRF_WV(            NVBO_WR32_,      ##A)
179#define NVBO_WD32(A...) DRF_WD(            NVBO_WR32_,      ##A)
180#define NVBO_MR32(A...) DRF_MR(NVBO_RD32_, NVBO_WR32_, u32, ##A)
181#define NVBO_MV32(A...) DRF_MV(NVBO_RD32_, NVBO_WR32_, u32, ##A)
182#define NVBO_MD32(A...) DRF_MD(NVBO_RD32_, NVBO_WR32_, u32, ##A)
183#endif