1848b8605Smrg#ifndef __NOUVEAU_BUFFER_H__
2848b8605Smrg#define __NOUVEAU_BUFFER_H__
3848b8605Smrg
4848b8605Smrg#include "util/u_range.h"
5848b8605Smrg#include "util/u_transfer.h"
6b8e80941Smrg#include "util/list.h"
7848b8605Smrg
8848b8605Smrgstruct pipe_resource;
9848b8605Smrgstruct nouveau_context;
10848b8605Smrgstruct nouveau_bo;
11848b8605Smrg
12848b8605Smrg/* DIRTY: buffer was (or will be after the next flush) written to by GPU and
13848b8605Smrg *  resource->data has not been updated to reflect modified VRAM contents
14848b8605Smrg *
15848b8605Smrg * USER_MEMORY: resource->data is a pointer to client memory and may change
16848b8605Smrg *  between GL calls
17848b8605Smrg */
18848b8605Smrg#define NOUVEAU_BUFFER_STATUS_GPU_READING (1 << 0)
19848b8605Smrg#define NOUVEAU_BUFFER_STATUS_GPU_WRITING (1 << 1)
20848b8605Smrg#define NOUVEAU_BUFFER_STATUS_DIRTY       (1 << 2)
21848b8605Smrg#define NOUVEAU_BUFFER_STATUS_USER_MEMORY (1 << 7)
22848b8605Smrg
23848b8605Smrg#define NOUVEAU_BUFFER_STATUS_REALLOC_MASK NOUVEAU_BUFFER_STATUS_USER_MEMORY
24848b8605Smrg
25848b8605Smrg/* Resources, if mapped into the GPU's address space, are guaranteed to
26848b8605Smrg * have constant virtual addresses (nv50+).
27848b8605Smrg *
28848b8605Smrg * The address of a resource will lie within the nouveau_bo referenced,
29848b8605Smrg * and this bo should be added to the memory manager's validation list.
30848b8605Smrg */
31848b8605Smrgstruct nv04_resource {
32848b8605Smrg   struct pipe_resource base;
33848b8605Smrg   const struct u_resource_vtbl *vtbl;
34848b8605Smrg
35848b8605Smrg   uint64_t address; /* virtual address (nv50+) */
36848b8605Smrg
37848b8605Smrg   uint8_t *data; /* resource's contents, if domain == 0, or cached */
38848b8605Smrg   struct nouveau_bo *bo;
39848b8605Smrg   uint32_t offset; /* offset into the data/bo */
40848b8605Smrg
41848b8605Smrg   uint8_t status;
42848b8605Smrg   uint8_t domain;
43848b8605Smrg
44b8e80941Smrg   uint16_t cb_bindings[6]; /* per-shader per-slot bindings */
45b8e80941Smrg
46848b8605Smrg   struct nouveau_fence *fence;
47848b8605Smrg   struct nouveau_fence *fence_wr;
48848b8605Smrg
49848b8605Smrg   struct nouveau_mm_allocation *mm;
50848b8605Smrg
51848b8605Smrg   /* buffer range that has been initialized */
52848b8605Smrg   struct util_range valid_buffer_range;
53848b8605Smrg};
54848b8605Smrg
55848b8605Smrgvoid
56848b8605Smrgnouveau_buffer_release_gpu_storage(struct nv04_resource *);
57848b8605Smrg
58848b8605Smrgvoid
59848b8605Smrgnouveau_copy_buffer(struct nouveau_context *,
60848b8605Smrg                    struct nv04_resource *dst, unsigned dst_pos,
61848b8605Smrg                    struct nv04_resource *src, unsigned src_pos, unsigned size);
62848b8605Smrg
63b8e80941Smrgbool
64848b8605Smrgnouveau_buffer_migrate(struct nouveau_context *,
65848b8605Smrg                       struct nv04_resource *, unsigned domain);
66848b8605Smrg
67848b8605Smrgvoid *
68848b8605Smrgnouveau_resource_map_offset(struct nouveau_context *, struct nv04_resource *,
69848b8605Smrg                            uint32_t offset, uint32_t flags);
70848b8605Smrg
71b8e80941Smrgstatic inline void
72848b8605Smrgnouveau_resource_unmap(struct nv04_resource *res)
73848b8605Smrg{
74848b8605Smrg   /* no-op */
75848b8605Smrg}
76848b8605Smrg
77b8e80941Smrgstatic inline struct nv04_resource *
78848b8605Smrgnv04_resource(struct pipe_resource *resource)
79848b8605Smrg{
80848b8605Smrg   return (struct nv04_resource *)resource;
81848b8605Smrg}
82848b8605Smrg
83848b8605Smrg/* is resource mapped into the GPU's address space (i.e. VRAM or GART) ? */
84b8e80941Smrgstatic inline bool
85848b8605Smrgnouveau_resource_mapped_by_gpu(struct pipe_resource *resource)
86848b8605Smrg{
87848b8605Smrg   return nv04_resource(resource)->domain != 0;
88848b8605Smrg}
89848b8605Smrg
90848b8605Smrgstruct pipe_resource *
91848b8605Smrgnouveau_buffer_create(struct pipe_screen *pscreen,
92848b8605Smrg                      const struct pipe_resource *templ);
93848b8605Smrg
94848b8605Smrgstruct pipe_resource *
95848b8605Smrgnouveau_user_buffer_create(struct pipe_screen *screen, void *ptr,
96848b8605Smrg                           unsigned bytes, unsigned usage);
97848b8605Smrg
98b8e80941Smrgbool
99848b8605Smrgnouveau_user_buffer_upload(struct nouveau_context *, struct nv04_resource *,
100848b8605Smrg                           unsigned base, unsigned size);
101848b8605Smrg
102b8e80941Smrgvoid
103b8e80941Smrgnouveau_buffer_invalidate(struct pipe_context *pipe,
104b8e80941Smrg                          struct pipe_resource *resource);
105b8e80941Smrg
106848b8605Smrg/* Copy data to a scratch buffer and return address & bo the data resides in.
107848b8605Smrg * Returns 0 on failure.
108848b8605Smrg */
109848b8605Smrguint64_t
110848b8605Smrgnouveau_scratch_data(struct nouveau_context *,
111848b8605Smrg                     const void *data, unsigned base, unsigned size,
112848b8605Smrg                     struct nouveau_bo **);
113848b8605Smrg
114848b8605Smrg#endif
115