1848b8605Smrg/**************************************************************************
2b8e80941Smrg *
3848b8605Smrg * Copyright 2007 VMware, Inc.
4848b8605Smrg * All Rights Reserved.
5b8e80941Smrg *
6848b8605Smrg * Permission is hereby granted, free of charge, to any person obtaining a
7848b8605Smrg * copy of this software and associated documentation files (the
8848b8605Smrg * "Software"), to deal in the Software without restriction, including
9848b8605Smrg * without limitation the rights to use, copy, modify, merge, publish,
10848b8605Smrg * distribute, sub license, and/or sell copies of the Software, and to
11848b8605Smrg * permit persons to whom the Software is furnished to do so, subject to
12848b8605Smrg * the following conditions:
13b8e80941Smrg *
14848b8605Smrg * The above copyright notice and this permission notice (including the
15848b8605Smrg * next paragraph) shall be included in all copies or substantial portions
16848b8605Smrg * of the Software.
17b8e80941Smrg *
18848b8605Smrg * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
19848b8605Smrg * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
20848b8605Smrg * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
21848b8605Smrg * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
22848b8605Smrg * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
23848b8605Smrg * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
24848b8605Smrg * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
25b8e80941Smrg *
26848b8605Smrg **************************************************************************/
27848b8605Smrg
28848b8605Smrg#ifndef U_INLINES_H
29848b8605Smrg#define U_INLINES_H
30848b8605Smrg
31848b8605Smrg#include "pipe/p_context.h"
32848b8605Smrg#include "pipe/p_defines.h"
33848b8605Smrg#include "pipe/p_shader_tokens.h"
34848b8605Smrg#include "pipe/p_state.h"
35848b8605Smrg#include "pipe/p_screen.h"
36848b8605Smrg#include "util/u_debug.h"
37848b8605Smrg#include "util/u_debug_describe.h"
38848b8605Smrg#include "util/u_debug_refcnt.h"
39848b8605Smrg#include "util/u_atomic.h"
40848b8605Smrg#include "util/u_box.h"
41848b8605Smrg#include "util/u_math.h"
42848b8605Smrg
43848b8605Smrg
44848b8605Smrg#ifdef __cplusplus
45848b8605Smrgextern "C" {
46848b8605Smrg#endif
47848b8605Smrg
48848b8605Smrg
49848b8605Smrg/*
50848b8605Smrg * Reference counting helper functions.
51848b8605Smrg */
52848b8605Smrg
53848b8605Smrg
54b8e80941Smrgstatic inline void
55b8e80941Smrgpipe_reference_init(struct pipe_reference *dst, unsigned count)
56848b8605Smrg{
57b8e80941Smrg   p_atomic_set(&dst->count, count);
58848b8605Smrg}
59848b8605Smrg
60b8e80941Smrgstatic inline boolean
61b8e80941Smrgpipe_is_referenced(struct pipe_reference *src)
62848b8605Smrg{
63b8e80941Smrg   return p_atomic_read(&src->count) != 0;
64848b8605Smrg}
65848b8605Smrg
66848b8605Smrg/**
67848b8605Smrg * Update reference counting.
68848b8605Smrg * The old thing pointed to, if any, will be unreferenced.
69b8e80941Smrg * Both 'dst' and 'src' may be NULL.
70848b8605Smrg * \return TRUE if the object's refcount hits zero and should be destroyed.
71848b8605Smrg */
72b8e80941Smrgstatic inline boolean
73b8e80941Smrgpipe_reference_described(struct pipe_reference *dst,
74b8e80941Smrg                         struct pipe_reference *src,
75848b8605Smrg                         debug_reference_descriptor get_desc)
76848b8605Smrg{
77b8e80941Smrg   if (dst != src) {
78b8e80941Smrg      /* bump the src.count first */
79b8e80941Smrg      if (src) {
80b8e80941Smrg         MAYBE_UNUSED int count = p_atomic_inc_return(&src->count);
81b8e80941Smrg         assert(count != 1); /* src had to be referenced */
82b8e80941Smrg         debug_reference(src, get_desc, 1);
83848b8605Smrg      }
84848b8605Smrg
85b8e80941Smrg      if (dst) {
86b8e80941Smrg         int count = p_atomic_dec_return(&dst->count);
87b8e80941Smrg         assert(count != -1); /* dst had to be referenced */
88b8e80941Smrg         debug_reference(dst, get_desc, -1);
89b8e80941Smrg         if (!count)
90b8e80941Smrg            return true;
91848b8605Smrg      }
92848b8605Smrg   }
93848b8605Smrg
94b8e80941Smrg   return false;
95848b8605Smrg}
96848b8605Smrg
97b8e80941Smrgstatic inline boolean
98b8e80941Smrgpipe_reference(struct pipe_reference *dst, struct pipe_reference *src)
99848b8605Smrg{
100b8e80941Smrg   return pipe_reference_described(dst, src,
101b8e80941Smrg                                   (debug_reference_descriptor)
102b8e80941Smrg                                   debug_describe_reference);
103848b8605Smrg}
104848b8605Smrg
105b8e80941Smrgstatic inline void
106b8e80941Smrgpipe_surface_reference(struct pipe_surface **dst, struct pipe_surface *src)
107848b8605Smrg{
108b8e80941Smrg   struct pipe_surface *old_dst = *dst;
109848b8605Smrg
110b8e80941Smrg   if (pipe_reference_described(&old_dst->reference, &src->reference,
111b8e80941Smrg                                (debug_reference_descriptor)
112b8e80941Smrg                                debug_describe_surface))
113b8e80941Smrg      old_dst->context->surface_destroy(old_dst->context, old_dst);
114b8e80941Smrg   *dst = src;
115848b8605Smrg}
116848b8605Smrg
117848b8605Smrg/**
118848b8605Smrg * Similar to pipe_surface_reference() but always set the pointer to NULL
119848b8605Smrg * and pass in an explicit context.  The explicit context avoids the problem
120848b8605Smrg * of using a deleted context's surface_destroy() method when freeing a surface
121848b8605Smrg * that's shared by multiple contexts.
122848b8605Smrg */
123b8e80941Smrgstatic inline void
124848b8605Smrgpipe_surface_release(struct pipe_context *pipe, struct pipe_surface **ptr)
125848b8605Smrg{
126b8e80941Smrg   struct pipe_surface *old = *ptr;
127b8e80941Smrg
128b8e80941Smrg   if (pipe_reference_described(&old->reference, NULL,
129b8e80941Smrg                                (debug_reference_descriptor)
130b8e80941Smrg                                debug_describe_surface))
131b8e80941Smrg      pipe->surface_destroy(pipe, old);
132848b8605Smrg   *ptr = NULL;
133848b8605Smrg}
134848b8605Smrg
135848b8605Smrg
136b8e80941Smrgstatic inline void
137b8e80941Smrgpipe_resource_reference(struct pipe_resource **dst, struct pipe_resource *src)
138848b8605Smrg{
139b8e80941Smrg   struct pipe_resource *old_dst = *dst;
140848b8605Smrg
141b8e80941Smrg   if (pipe_reference_described(&old_dst->reference, &src->reference,
142b8e80941Smrg                                (debug_reference_descriptor)
143b8e80941Smrg                                debug_describe_resource)) {
144b8e80941Smrg      /* Avoid recursion, which would prevent inlining this function */
145b8e80941Smrg      do {
146b8e80941Smrg         struct pipe_resource *next = old_dst->next;
147848b8605Smrg
148b8e80941Smrg         old_dst->screen->resource_destroy(old_dst->screen, old_dst);
149b8e80941Smrg         old_dst = next;
150b8e80941Smrg      } while (pipe_reference_described(&old_dst->reference, NULL,
151b8e80941Smrg                                        (debug_reference_descriptor)
152b8e80941Smrg                                        debug_describe_resource));
153b8e80941Smrg   }
154b8e80941Smrg   *dst = src;
155848b8605Smrg}
156848b8605Smrg
157848b8605Smrg/**
158b8e80941Smrg * Same as pipe_surface_release, but used when pipe_context doesn't exist
159b8e80941Smrg * anymore.
160848b8605Smrg */
161b8e80941Smrgstatic inline void
162b8e80941Smrgpipe_surface_release_no_context(struct pipe_surface **ptr)
163848b8605Smrg{
164b8e80941Smrg   struct pipe_surface *surf = *ptr;
165b8e80941Smrg
166b8e80941Smrg   if (pipe_reference_described(&surf->reference, NULL,
167b8e80941Smrg                                (debug_reference_descriptor)
168b8e80941Smrg                                debug_describe_surface)) {
169b8e80941Smrg      /* trivially destroy pipe_surface */
170b8e80941Smrg      pipe_resource_reference(&surf->texture, NULL);
171b8e80941Smrg      free(surf);
172848b8605Smrg   }
173848b8605Smrg   *ptr = NULL;
174848b8605Smrg}
175848b8605Smrg
176b8e80941Smrg/**
177b8e80941Smrg * Set *dst to \p src with proper reference counting.
178b8e80941Smrg *
179b8e80941Smrg * The caller must guarantee that \p src and *dst were created in
180b8e80941Smrg * the same context (if they exist), and that this must be the current context.
181b8e80941Smrg */
182b8e80941Smrgstatic inline void
183b8e80941Smrgpipe_sampler_view_reference(struct pipe_sampler_view **dst,
184b8e80941Smrg                            struct pipe_sampler_view *src)
185b8e80941Smrg{
186b8e80941Smrg   struct pipe_sampler_view *old_dst = *dst;
187b8e80941Smrg
188b8e80941Smrg   if (pipe_reference_described(&old_dst->reference, &src->reference,
189b8e80941Smrg                                (debug_reference_descriptor)
190b8e80941Smrg                                debug_describe_sampler_view))
191b8e80941Smrg      old_dst->context->sampler_view_destroy(old_dst->context, old_dst);
192b8e80941Smrg   *dst = src;
193b8e80941Smrg}
194848b8605Smrg
195b8e80941Smrgstatic inline void
196b8e80941Smrgpipe_so_target_reference(struct pipe_stream_output_target **dst,
197b8e80941Smrg                         struct pipe_stream_output_target *src)
198848b8605Smrg{
199b8e80941Smrg   struct pipe_stream_output_target *old_dst = *dst;
200848b8605Smrg
201b8e80941Smrg   if (pipe_reference_described(&old_dst->reference, &src->reference,
202848b8605Smrg                     (debug_reference_descriptor)debug_describe_so_target))
203b8e80941Smrg      old_dst->context->stream_output_target_destroy(old_dst->context, old_dst);
204b8e80941Smrg   *dst = src;
205b8e80941Smrg}
206b8e80941Smrg
207b8e80941Smrgstatic inline void
208b8e80941Smrgpipe_vertex_buffer_unreference(struct pipe_vertex_buffer *dst)
209b8e80941Smrg{
210b8e80941Smrg   if (dst->is_user_buffer)
211b8e80941Smrg      dst->buffer.user = NULL;
212b8e80941Smrg   else
213b8e80941Smrg      pipe_resource_reference(&dst->buffer.resource, NULL);
214b8e80941Smrg}
215b8e80941Smrg
216b8e80941Smrgstatic inline void
217b8e80941Smrgpipe_vertex_buffer_reference(struct pipe_vertex_buffer *dst,
218b8e80941Smrg                             const struct pipe_vertex_buffer *src)
219b8e80941Smrg{
220b8e80941Smrg   pipe_vertex_buffer_unreference(dst);
221b8e80941Smrg   if (!src->is_user_buffer)
222b8e80941Smrg      pipe_resource_reference(&dst->buffer.resource, src->buffer.resource);
223b8e80941Smrg   memcpy(dst, src, sizeof(*src));
224848b8605Smrg}
225848b8605Smrg
226b8e80941Smrgstatic inline void
227848b8605Smrgpipe_surface_reset(struct pipe_context *ctx, struct pipe_surface* ps,
228848b8605Smrg                   struct pipe_resource *pt, unsigned level, unsigned layer)
229848b8605Smrg{
230848b8605Smrg   pipe_resource_reference(&ps->texture, pt);
231848b8605Smrg   ps->format = pt->format;
232848b8605Smrg   ps->width = u_minify(pt->width0, level);
233848b8605Smrg   ps->height = u_minify(pt->height0, level);
234848b8605Smrg   ps->u.tex.level = level;
235848b8605Smrg   ps->u.tex.first_layer = ps->u.tex.last_layer = layer;
236848b8605Smrg   ps->context = ctx;
237848b8605Smrg}
238848b8605Smrg
239b8e80941Smrgstatic inline void
240848b8605Smrgpipe_surface_init(struct pipe_context *ctx, struct pipe_surface* ps,
241848b8605Smrg                  struct pipe_resource *pt, unsigned level, unsigned layer)
242848b8605Smrg{
243848b8605Smrg   ps->texture = 0;
244848b8605Smrg   pipe_reference_init(&ps->reference, 1);
245848b8605Smrg   pipe_surface_reset(ctx, ps, pt, level, layer);
246848b8605Smrg}
247848b8605Smrg
248848b8605Smrg/* Return true if the surfaces are equal. */
249b8e80941Smrgstatic inline boolean
250848b8605Smrgpipe_surface_equal(struct pipe_surface *s1, struct pipe_surface *s2)
251848b8605Smrg{
252848b8605Smrg   return s1->texture == s2->texture &&
253848b8605Smrg          s1->format == s2->format &&
254848b8605Smrg          (s1->texture->target != PIPE_BUFFER ||
255848b8605Smrg           (s1->u.buf.first_element == s2->u.buf.first_element &&
256848b8605Smrg            s1->u.buf.last_element == s2->u.buf.last_element)) &&
257848b8605Smrg          (s1->texture->target == PIPE_BUFFER ||
258848b8605Smrg           (s1->u.tex.level == s2->u.tex.level &&
259848b8605Smrg            s1->u.tex.first_layer == s2->u.tex.first_layer &&
260848b8605Smrg            s1->u.tex.last_layer == s2->u.tex.last_layer));
261848b8605Smrg}
262848b8605Smrg
263848b8605Smrg/*
264848b8605Smrg * Convenience wrappers for screen buffer functions.
265848b8605Smrg */
266848b8605Smrg
267848b8605Smrg
268848b8605Smrg/**
269848b8605Smrg * Create a new resource.
270848b8605Smrg * \param bind  bitmask of PIPE_BIND_x flags
271b8e80941Smrg * \param usage  a PIPE_USAGE_x value
272848b8605Smrg */
273b8e80941Smrgstatic inline struct pipe_resource *
274b8e80941Smrgpipe_buffer_create(struct pipe_screen *screen,
275b8e80941Smrg                   unsigned bind,
276b8e80941Smrg                   enum pipe_resource_usage usage,
277b8e80941Smrg                   unsigned size)
278848b8605Smrg{
279848b8605Smrg   struct pipe_resource buffer;
280848b8605Smrg   memset(&buffer, 0, sizeof buffer);
281848b8605Smrg   buffer.target = PIPE_BUFFER;
282848b8605Smrg   buffer.format = PIPE_FORMAT_R8_UNORM; /* want TYPELESS or similar */
283848b8605Smrg   buffer.bind = bind;
284848b8605Smrg   buffer.usage = usage;
285848b8605Smrg   buffer.flags = 0;
286848b8605Smrg   buffer.width0 = size;
287848b8605Smrg   buffer.height0 = 1;
288848b8605Smrg   buffer.depth0 = 1;
289848b8605Smrg   buffer.array_size = 1;
290848b8605Smrg   return screen->resource_create(screen, &buffer);
291848b8605Smrg}
292848b8605Smrg
293848b8605Smrg
294b8e80941Smrgstatic inline struct pipe_resource *
295b8e80941Smrgpipe_buffer_create_const0(struct pipe_screen *screen,
296b8e80941Smrg                          unsigned bind,
297b8e80941Smrg                          enum pipe_resource_usage usage,
298b8e80941Smrg                          unsigned size)
299b8e80941Smrg{
300b8e80941Smrg   struct pipe_resource buffer;
301b8e80941Smrg   memset(&buffer, 0, sizeof buffer);
302b8e80941Smrg   buffer.target = PIPE_BUFFER;
303b8e80941Smrg   buffer.format = PIPE_FORMAT_R8_UNORM;
304b8e80941Smrg   buffer.bind = bind;
305b8e80941Smrg   buffer.usage = usage;
306b8e80941Smrg   buffer.flags = screen->get_param(screen, PIPE_CAP_CONSTBUF0_FLAGS);
307b8e80941Smrg   buffer.width0 = size;
308b8e80941Smrg   buffer.height0 = 1;
309b8e80941Smrg   buffer.depth0 = 1;
310b8e80941Smrg   buffer.array_size = 1;
311b8e80941Smrg   return screen->resource_create(screen, &buffer);
312b8e80941Smrg}
313b8e80941Smrg
314b8e80941Smrg
315848b8605Smrg/**
316848b8605Smrg * Map a range of a resource.
317b8e80941Smrg * \param offset  start of region, in bytes
318b8e80941Smrg * \param length  size of region, in bytes
319848b8605Smrg * \param access  bitmask of PIPE_TRANSFER_x flags
320848b8605Smrg * \param transfer  returns a transfer object
321848b8605Smrg */
322b8e80941Smrgstatic inline void *
323848b8605Smrgpipe_buffer_map_range(struct pipe_context *pipe,
324b8e80941Smrg                      struct pipe_resource *buffer,
325b8e80941Smrg                      unsigned offset,
326b8e80941Smrg                      unsigned length,
327b8e80941Smrg                      unsigned access,
328b8e80941Smrg                      struct pipe_transfer **transfer)
329848b8605Smrg{
330848b8605Smrg   struct pipe_box box;
331848b8605Smrg   void *map;
332848b8605Smrg
333848b8605Smrg   assert(offset < buffer->width0);
334848b8605Smrg   assert(offset + length <= buffer->width0);
335848b8605Smrg   assert(length);
336848b8605Smrg
337848b8605Smrg   u_box_1d(offset, length, &box);
338848b8605Smrg
339848b8605Smrg   map = pipe->transfer_map(pipe, buffer, 0, access, &box, transfer);
340b8e80941Smrg   if (!map) {
341848b8605Smrg      return NULL;
342848b8605Smrg   }
343848b8605Smrg
344848b8605Smrg   return map;
345848b8605Smrg}
346848b8605Smrg
347848b8605Smrg
348848b8605Smrg/**
349848b8605Smrg * Map whole resource.
350848b8605Smrg * \param access  bitmask of PIPE_TRANSFER_x flags
351848b8605Smrg * \param transfer  returns a transfer object
352848b8605Smrg */
353b8e80941Smrgstatic inline void *
354848b8605Smrgpipe_buffer_map(struct pipe_context *pipe,
355848b8605Smrg                struct pipe_resource *buffer,
356848b8605Smrg                unsigned access,
357848b8605Smrg                struct pipe_transfer **transfer)
358848b8605Smrg{
359b8e80941Smrg   return pipe_buffer_map_range(pipe, buffer, 0, buffer->width0,
360b8e80941Smrg                                access, transfer);
361848b8605Smrg}
362848b8605Smrg
363848b8605Smrg
364b8e80941Smrgstatic inline void
365848b8605Smrgpipe_buffer_unmap(struct pipe_context *pipe,
366848b8605Smrg                  struct pipe_transfer *transfer)
367848b8605Smrg{
368848b8605Smrg   pipe->transfer_unmap(pipe, transfer);
369848b8605Smrg}
370848b8605Smrg
371b8e80941Smrgstatic inline void
372848b8605Smrgpipe_buffer_flush_mapped_range(struct pipe_context *pipe,
373848b8605Smrg                               struct pipe_transfer *transfer,
374848b8605Smrg                               unsigned offset,
375848b8605Smrg                               unsigned length)
376848b8605Smrg{
377848b8605Smrg   struct pipe_box box;
378848b8605Smrg   int transfer_offset;
379848b8605Smrg
380848b8605Smrg   assert(length);
381848b8605Smrg   assert(transfer->box.x <= (int) offset);
382848b8605Smrg   assert((int) (offset + length) <= transfer->box.x + transfer->box.width);
383848b8605Smrg
384848b8605Smrg   /* Match old screen->buffer_flush_mapped_range() behaviour, where
385848b8605Smrg    * offset parameter is relative to the start of the buffer, not the
386848b8605Smrg    * mapped range.
387848b8605Smrg    */
388848b8605Smrg   transfer_offset = offset - transfer->box.x;
389848b8605Smrg
390848b8605Smrg   u_box_1d(transfer_offset, length, &box);
391848b8605Smrg
392848b8605Smrg   pipe->transfer_flush_region(pipe, transfer, &box);
393848b8605Smrg}
394848b8605Smrg
395b8e80941Smrgstatic inline void
396848b8605Smrgpipe_buffer_write(struct pipe_context *pipe,
397848b8605Smrg                  struct pipe_resource *buf,
398848b8605Smrg                  unsigned offset,
399848b8605Smrg                  unsigned size,
400848b8605Smrg                  const void *data)
401848b8605Smrg{
402b8e80941Smrg   /* Don't set any other usage bits. Drivers should derive them. */
403b8e80941Smrg   pipe->buffer_subdata(pipe, buf, PIPE_TRANSFER_WRITE, offset, size, data);
404848b8605Smrg}
405848b8605Smrg
406848b8605Smrg/**
407848b8605Smrg * Special case for writing non-overlapping ranges.
408848b8605Smrg *
409848b8605Smrg * We can avoid GPU/CPU synchronization when writing range that has never
410848b8605Smrg * been written before.
411848b8605Smrg */
412b8e80941Smrgstatic inline void
413848b8605Smrgpipe_buffer_write_nooverlap(struct pipe_context *pipe,
414848b8605Smrg                            struct pipe_resource *buf,
415848b8605Smrg                            unsigned offset, unsigned size,
416848b8605Smrg                            const void *data)
417848b8605Smrg{
418b8e80941Smrg   pipe->buffer_subdata(pipe, buf,
419b8e80941Smrg                        (PIPE_TRANSFER_WRITE |
420b8e80941Smrg                         PIPE_TRANSFER_UNSYNCHRONIZED),
421b8e80941Smrg                        offset, size, data);
422848b8605Smrg}
423848b8605Smrg
424848b8605Smrg
425848b8605Smrg/**
426848b8605Smrg * Create a new resource and immediately put data into it
427848b8605Smrg * \param bind  bitmask of PIPE_BIND_x flags
428848b8605Smrg * \param usage  bitmask of PIPE_USAGE_x flags
429848b8605Smrg */
430b8e80941Smrgstatic inline struct pipe_resource *
431848b8605Smrgpipe_buffer_create_with_data(struct pipe_context *pipe,
432848b8605Smrg                             unsigned bind,
433b8e80941Smrg                             enum pipe_resource_usage usage,
434848b8605Smrg                             unsigned size,
435848b8605Smrg                             const void *ptr)
436848b8605Smrg{
437848b8605Smrg   struct pipe_resource *res = pipe_buffer_create(pipe->screen,
438848b8605Smrg                                                  bind, usage, size);
439848b8605Smrg   pipe_buffer_write_nooverlap(pipe, res, 0, size, ptr);
440848b8605Smrg   return res;
441848b8605Smrg}
442848b8605Smrg
443b8e80941Smrgstatic inline void
444848b8605Smrgpipe_buffer_read(struct pipe_context *pipe,
445848b8605Smrg                 struct pipe_resource *buf,
446848b8605Smrg                 unsigned offset,
447848b8605Smrg                 unsigned size,
448848b8605Smrg                 void *data)
449848b8605Smrg{
450848b8605Smrg   struct pipe_transfer *src_transfer;
451848b8605Smrg   ubyte *map;
452848b8605Smrg
453848b8605Smrg   map = (ubyte *) pipe_buffer_map_range(pipe,
454b8e80941Smrg                                         buf,
455b8e80941Smrg                                         offset, size,
456b8e80941Smrg                                         PIPE_TRANSFER_READ,
457b8e80941Smrg                                         &src_transfer);
458848b8605Smrg   if (!map)
459848b8605Smrg      return;
460848b8605Smrg
461848b8605Smrg   memcpy(data, map, size);
462848b8605Smrg   pipe_buffer_unmap(pipe, src_transfer);
463848b8605Smrg}
464848b8605Smrg
465848b8605Smrg
466848b8605Smrg/**
467848b8605Smrg * Map a resource for reading/writing.
468848b8605Smrg * \param access  bitmask of PIPE_TRANSFER_x flags
469848b8605Smrg */
470b8e80941Smrgstatic inline void *
471848b8605Smrgpipe_transfer_map(struct pipe_context *context,
472848b8605Smrg                  struct pipe_resource *resource,
473848b8605Smrg                  unsigned level, unsigned layer,
474848b8605Smrg                  unsigned access,
475848b8605Smrg                  unsigned x, unsigned y,
476848b8605Smrg                  unsigned w, unsigned h,
477848b8605Smrg                  struct pipe_transfer **transfer)
478848b8605Smrg{
479848b8605Smrg   struct pipe_box box;
480848b8605Smrg   u_box_2d_zslice(x, y, layer, w, h, &box);
481848b8605Smrg   return context->transfer_map(context,
482848b8605Smrg                                resource,
483848b8605Smrg                                level,
484848b8605Smrg                                access,
485848b8605Smrg                                &box, transfer);
486848b8605Smrg}
487848b8605Smrg
488848b8605Smrg
489848b8605Smrg/**
490848b8605Smrg * Map a 3D (texture) resource for reading/writing.
491848b8605Smrg * \param access  bitmask of PIPE_TRANSFER_x flags
492848b8605Smrg */
493b8e80941Smrgstatic inline void *
494848b8605Smrgpipe_transfer_map_3d(struct pipe_context *context,
495848b8605Smrg                     struct pipe_resource *resource,
496848b8605Smrg                     unsigned level,
497848b8605Smrg                     unsigned access,
498848b8605Smrg                     unsigned x, unsigned y, unsigned z,
499848b8605Smrg                     unsigned w, unsigned h, unsigned d,
500848b8605Smrg                     struct pipe_transfer **transfer)
501848b8605Smrg{
502848b8605Smrg   struct pipe_box box;
503848b8605Smrg   u_box_3d(x, y, z, w, h, d, &box);
504848b8605Smrg   return context->transfer_map(context,
505848b8605Smrg                                resource,
506848b8605Smrg                                level,
507848b8605Smrg                                access,
508848b8605Smrg                                &box, transfer);
509848b8605Smrg}
510848b8605Smrg
511b8e80941Smrgstatic inline void
512b8e80941Smrgpipe_transfer_unmap(struct pipe_context *context,
513b8e80941Smrg                    struct pipe_transfer *transfer)
514848b8605Smrg{
515b8e80941Smrg   context->transfer_unmap(context, transfer);
516848b8605Smrg}
517848b8605Smrg
518b8e80941Smrgstatic inline void
519b8e80941Smrgpipe_set_constant_buffer(struct pipe_context *pipe,
520b8e80941Smrg                         enum pipe_shader_type shader, uint index,
521848b8605Smrg                         struct pipe_resource *buf)
522848b8605Smrg{
523848b8605Smrg   if (buf) {
524848b8605Smrg      struct pipe_constant_buffer cb;
525848b8605Smrg      cb.buffer = buf;
526848b8605Smrg      cb.buffer_offset = 0;
527848b8605Smrg      cb.buffer_size = buf->width0;
528848b8605Smrg      cb.user_buffer = NULL;
529848b8605Smrg      pipe->set_constant_buffer(pipe, shader, index, &cb);
530848b8605Smrg   } else {
531848b8605Smrg      pipe->set_constant_buffer(pipe, shader, index, NULL);
532848b8605Smrg   }
533848b8605Smrg}
534848b8605Smrg
535848b8605Smrg
536848b8605Smrg/**
537848b8605Smrg * Get the polygon offset enable/disable flag for the given polygon fill mode.
538848b8605Smrg * \param fill_mode  one of PIPE_POLYGON_MODE_POINT/LINE/FILL
539848b8605Smrg */
540b8e80941Smrgstatic inline boolean
541848b8605Smrgutil_get_offset(const struct pipe_rasterizer_state *templ,
542848b8605Smrg                unsigned fill_mode)
543848b8605Smrg{
544848b8605Smrg   switch(fill_mode) {
545848b8605Smrg   case PIPE_POLYGON_MODE_POINT:
546848b8605Smrg      return templ->offset_point;
547848b8605Smrg   case PIPE_POLYGON_MODE_LINE:
548848b8605Smrg      return templ->offset_line;
549848b8605Smrg   case PIPE_POLYGON_MODE_FILL:
550848b8605Smrg      return templ->offset_tri;
551848b8605Smrg   default:
552848b8605Smrg      assert(0);
553848b8605Smrg      return FALSE;
554848b8605Smrg   }
555848b8605Smrg}
556848b8605Smrg
557b8e80941Smrgstatic inline float
558848b8605Smrgutil_get_min_point_size(const struct pipe_rasterizer_state *state)
559848b8605Smrg{
560848b8605Smrg   /* The point size should be clamped to this value at the rasterizer stage.
561848b8605Smrg    */
562848b8605Smrg   return !state->point_quad_rasterization &&
563848b8605Smrg          !state->point_smooth &&
564848b8605Smrg          !state->multisample ? 1.0f : 0.0f;
565848b8605Smrg}
566848b8605Smrg
567b8e80941Smrgstatic inline void
568848b8605Smrgutil_query_clear_result(union pipe_query_result *result, unsigned type)
569848b8605Smrg{
570848b8605Smrg   switch (type) {
571848b8605Smrg   case PIPE_QUERY_OCCLUSION_PREDICATE:
572b8e80941Smrg   case PIPE_QUERY_OCCLUSION_PREDICATE_CONSERVATIVE:
573848b8605Smrg   case PIPE_QUERY_SO_OVERFLOW_PREDICATE:
574b8e80941Smrg   case PIPE_QUERY_SO_OVERFLOW_ANY_PREDICATE:
575848b8605Smrg   case PIPE_QUERY_GPU_FINISHED:
576848b8605Smrg      result->b = FALSE;
577848b8605Smrg      break;
578848b8605Smrg   case PIPE_QUERY_OCCLUSION_COUNTER:
579848b8605Smrg   case PIPE_QUERY_TIMESTAMP:
580848b8605Smrg   case PIPE_QUERY_TIME_ELAPSED:
581848b8605Smrg   case PIPE_QUERY_PRIMITIVES_GENERATED:
582848b8605Smrg   case PIPE_QUERY_PRIMITIVES_EMITTED:
583848b8605Smrg      result->u64 = 0;
584848b8605Smrg      break;
585848b8605Smrg   case PIPE_QUERY_SO_STATISTICS:
586848b8605Smrg      memset(&result->so_statistics, 0, sizeof(result->so_statistics));
587848b8605Smrg      break;
588848b8605Smrg   case PIPE_QUERY_TIMESTAMP_DISJOINT:
589848b8605Smrg      memset(&result->timestamp_disjoint, 0, sizeof(result->timestamp_disjoint));
590848b8605Smrg      break;
591848b8605Smrg   case PIPE_QUERY_PIPELINE_STATISTICS:
592848b8605Smrg      memset(&result->pipeline_statistics, 0, sizeof(result->pipeline_statistics));
593848b8605Smrg      break;
594848b8605Smrg   default:
595848b8605Smrg      memset(result, 0, sizeof(*result));
596848b8605Smrg   }
597848b8605Smrg}
598848b8605Smrg
599848b8605Smrg/** Convert PIPE_TEXTURE_x to TGSI_TEXTURE_x */
600b8e80941Smrgstatic inline enum tgsi_texture_type
601848b8605Smrgutil_pipe_tex_to_tgsi_tex(enum pipe_texture_target pipe_tex_target,
602848b8605Smrg                          unsigned nr_samples)
603848b8605Smrg{
604848b8605Smrg   switch (pipe_tex_target) {
605848b8605Smrg   case PIPE_BUFFER:
606848b8605Smrg      return TGSI_TEXTURE_BUFFER;
607848b8605Smrg
608848b8605Smrg   case PIPE_TEXTURE_1D:
609848b8605Smrg      assert(nr_samples <= 1);
610848b8605Smrg      return TGSI_TEXTURE_1D;
611848b8605Smrg
612848b8605Smrg   case PIPE_TEXTURE_2D:
613848b8605Smrg      return nr_samples > 1 ? TGSI_TEXTURE_2D_MSAA : TGSI_TEXTURE_2D;
614848b8605Smrg
615848b8605Smrg   case PIPE_TEXTURE_RECT:
616848b8605Smrg      assert(nr_samples <= 1);
617848b8605Smrg      return TGSI_TEXTURE_RECT;
618848b8605Smrg
619848b8605Smrg   case PIPE_TEXTURE_3D:
620848b8605Smrg      assert(nr_samples <= 1);
621848b8605Smrg      return TGSI_TEXTURE_3D;
622848b8605Smrg
623848b8605Smrg   case PIPE_TEXTURE_CUBE:
624848b8605Smrg      assert(nr_samples <= 1);
625848b8605Smrg      return TGSI_TEXTURE_CUBE;
626848b8605Smrg
627848b8605Smrg   case PIPE_TEXTURE_1D_ARRAY:
628848b8605Smrg      assert(nr_samples <= 1);
629848b8605Smrg      return TGSI_TEXTURE_1D_ARRAY;
630848b8605Smrg
631848b8605Smrg   case PIPE_TEXTURE_2D_ARRAY:
632848b8605Smrg      return nr_samples > 1 ? TGSI_TEXTURE_2D_ARRAY_MSAA :
633848b8605Smrg                              TGSI_TEXTURE_2D_ARRAY;
634848b8605Smrg
635848b8605Smrg   case PIPE_TEXTURE_CUBE_ARRAY:
636848b8605Smrg      return TGSI_TEXTURE_CUBE_ARRAY;
637848b8605Smrg
638848b8605Smrg   default:
639848b8605Smrg      assert(0 && "unexpected texture target");
640848b8605Smrg      return TGSI_TEXTURE_UNKNOWN;
641848b8605Smrg   }
642848b8605Smrg}
643848b8605Smrg
644848b8605Smrg
645b8e80941Smrgstatic inline void
646848b8605Smrgutil_copy_constant_buffer(struct pipe_constant_buffer *dst,
647848b8605Smrg                          const struct pipe_constant_buffer *src)
648848b8605Smrg{
649848b8605Smrg   if (src) {
650848b8605Smrg      pipe_resource_reference(&dst->buffer, src->buffer);
651848b8605Smrg      dst->buffer_offset = src->buffer_offset;
652848b8605Smrg      dst->buffer_size = src->buffer_size;
653848b8605Smrg      dst->user_buffer = src->user_buffer;
654848b8605Smrg   }
655848b8605Smrg   else {
656848b8605Smrg      pipe_resource_reference(&dst->buffer, NULL);
657848b8605Smrg      dst->buffer_offset = 0;
658848b8605Smrg      dst->buffer_size = 0;
659848b8605Smrg      dst->user_buffer = NULL;
660848b8605Smrg   }
661848b8605Smrg}
662848b8605Smrg
663b8e80941Smrgstatic inline void
664b8e80941Smrgutil_copy_image_view(struct pipe_image_view *dst,
665b8e80941Smrg                     const struct pipe_image_view *src)
666b8e80941Smrg{
667b8e80941Smrg   if (src) {
668b8e80941Smrg      pipe_resource_reference(&dst->resource, src->resource);
669b8e80941Smrg      dst->format = src->format;
670b8e80941Smrg      dst->access = src->access;
671b8e80941Smrg      dst->shader_access = src->shader_access;
672b8e80941Smrg      dst->u = src->u;
673b8e80941Smrg   } else {
674b8e80941Smrg      pipe_resource_reference(&dst->resource, NULL);
675b8e80941Smrg      dst->format = PIPE_FORMAT_NONE;
676b8e80941Smrg      dst->access = 0;
677b8e80941Smrg      dst->shader_access = 0;
678b8e80941Smrg      memset(&dst->u, 0, sizeof(dst->u));
679b8e80941Smrg   }
680b8e80941Smrg}
681b8e80941Smrg
682b8e80941Smrgstatic inline unsigned
683848b8605Smrgutil_max_layer(const struct pipe_resource *r, unsigned level)
684848b8605Smrg{
685848b8605Smrg   switch (r->target) {
686848b8605Smrg   case PIPE_TEXTURE_3D:
687848b8605Smrg      return u_minify(r->depth0, level) - 1;
688b8e80941Smrg   case PIPE_TEXTURE_CUBE:
689b8e80941Smrg      assert(r->array_size == 6);
690b8e80941Smrg      /* fall-through */
691848b8605Smrg   case PIPE_TEXTURE_1D_ARRAY:
692848b8605Smrg   case PIPE_TEXTURE_2D_ARRAY:
693848b8605Smrg   case PIPE_TEXTURE_CUBE_ARRAY:
694848b8605Smrg      return r->array_size - 1;
695848b8605Smrg   default:
696848b8605Smrg      return 0;
697848b8605Smrg   }
698848b8605Smrg}
699848b8605Smrg
700b8e80941Smrgstatic inline unsigned
701b8e80941Smrgutil_num_layers(const struct pipe_resource *r, unsigned level)
702b8e80941Smrg{
703b8e80941Smrg   return util_max_layer(r, level) + 1;
704b8e80941Smrg}
705b8e80941Smrg
706b8e80941Smrgstatic inline bool
707b8e80941Smrgutil_texrange_covers_whole_level(const struct pipe_resource *tex,
708b8e80941Smrg                                 unsigned level, unsigned x, unsigned y,
709b8e80941Smrg                                 unsigned z, unsigned width,
710b8e80941Smrg                                 unsigned height, unsigned depth)
711b8e80941Smrg{
712b8e80941Smrg   return x == 0 && y == 0 && z == 0 &&
713b8e80941Smrg          width == u_minify(tex->width0, level) &&
714b8e80941Smrg          height == u_minify(tex->height0, level) &&
715b8e80941Smrg          depth == util_num_layers(tex, level);
716b8e80941Smrg}
717b8e80941Smrg
718848b8605Smrg#ifdef __cplusplus
719848b8605Smrg}
720848b8605Smrg#endif
721848b8605Smrg
722848b8605Smrg#endif /* U_INLINES_H */
723