1/*
2 * Copyright 2010 Christoph Bumiller
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 */
22
23#include "pipe/p_defines.h"
24#include "util/u_framebuffer.h"
25#include "util/u_upload_mgr.h"
26
27#include "nvc0/nvc0_context.h"
28#include "nvc0/nvc0_screen.h"
29#include "nvc0/nvc0_resource.h"
30
31static void
32nvc0_flush(struct pipe_context *pipe,
33           struct pipe_fence_handle **fence,
34           unsigned flags)
35{
36   struct nvc0_context *nvc0 = nvc0_context(pipe);
37   struct nouveau_screen *screen = &nvc0->screen->base;
38
39   if (fence)
40      nouveau_fence_ref(screen->fence.current, (struct nouveau_fence **)fence);
41
42   PUSH_KICK(nvc0->base.pushbuf); /* fencing handled in kick_notify */
43
44   nouveau_context_update_frame_stats(&nvc0->base);
45}
46
47static void
48nvc0_texture_barrier(struct pipe_context *pipe, unsigned flags)
49{
50   struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf;
51
52   IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
53   IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0);
54}
55
56static void
57nvc0_memory_barrier(struct pipe_context *pipe, unsigned flags)
58{
59   struct nvc0_context *nvc0 = nvc0_context(pipe);
60   struct nouveau_pushbuf *push = nvc0->base.pushbuf;
61   int i, s;
62
63   if (!(flags & ~PIPE_BARRIER_UPDATE))
64      return;
65
66   if (flags & PIPE_BARRIER_MAPPED_BUFFER) {
67      for (i = 0; i < nvc0->num_vtxbufs; ++i) {
68         if (!nvc0->vtxbuf[i].buffer.resource && !nvc0->vtxbuf[i].is_user_buffer)
69            continue;
70         if (nvc0->vtxbuf[i].buffer.resource->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
71            nvc0->base.vbo_dirty = true;
72      }
73
74      for (s = 0; s < 5 && !nvc0->cb_dirty; ++s) {
75         uint32_t valid = nvc0->constbuf_valid[s];
76
77         while (valid && !nvc0->cb_dirty) {
78            const unsigned i = ffs(valid) - 1;
79            struct pipe_resource *res;
80
81            valid &= ~(1 << i);
82            if (nvc0->constbuf[s][i].user)
83               continue;
84
85            res = nvc0->constbuf[s][i].u.buf;
86            if (!res)
87               continue;
88
89            if (res->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
90               nvc0->cb_dirty = true;
91         }
92      }
93   } else {
94      /* Pretty much any writing by shaders needs a serialize after
95       * it. Especially when moving between 3d and compute pipelines, but even
96       * without that.
97       */
98      IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
99   }
100
101   /* If we're going to texture from a buffer/image written by a shader, we
102    * must flush the texture cache.
103    */
104   if (flags & PIPE_BARRIER_TEXTURE)
105      IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0);
106
107   if (flags & PIPE_BARRIER_CONSTANT_BUFFER)
108      nvc0->cb_dirty = true;
109   if (flags & (PIPE_BARRIER_VERTEX_BUFFER | PIPE_BARRIER_INDEX_BUFFER))
110      nvc0->base.vbo_dirty = true;
111}
112
113static void
114nvc0_emit_string_marker(struct pipe_context *pipe, const char *str, int len)
115{
116   struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf;
117   int string_words = len / 4;
118   int data_words;
119
120   if (len <= 0)
121      return;
122   string_words = MIN2(string_words, NV04_PFIFO_MAX_PACKET_LEN);
123   if (string_words == NV04_PFIFO_MAX_PACKET_LEN)
124      data_words = string_words;
125   else
126      data_words = string_words + !!(len & 3);
127   BEGIN_NIC0(push, SUBC_3D(NV04_GRAPH_NOP), data_words);
128   if (string_words)
129      PUSH_DATAp(push, str, string_words);
130   if (string_words != data_words) {
131      int data = 0;
132      memcpy(&data, &str[string_words * 4], len & 3);
133      PUSH_DATA (push, data);
134   }
135}
136
137static void
138nvc0_context_unreference_resources(struct nvc0_context *nvc0)
139{
140   unsigned s, i;
141
142   nouveau_bufctx_del(&nvc0->bufctx_3d);
143   nouveau_bufctx_del(&nvc0->bufctx);
144   nouveau_bufctx_del(&nvc0->bufctx_cp);
145
146   util_unreference_framebuffer_state(&nvc0->framebuffer);
147
148   for (i = 0; i < nvc0->num_vtxbufs; ++i)
149      pipe_vertex_buffer_unreference(&nvc0->vtxbuf[i]);
150
151   for (s = 0; s < 6; ++s) {
152      for (i = 0; i < nvc0->num_textures[s]; ++i)
153         pipe_sampler_view_reference(&nvc0->textures[s][i], NULL);
154
155      for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i)
156         if (!nvc0->constbuf[s][i].user)
157            pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, NULL);
158
159      for (i = 0; i < NVC0_MAX_BUFFERS; ++i)
160         pipe_resource_reference(&nvc0->buffers[s][i].buffer, NULL);
161
162      for (i = 0; i < NVC0_MAX_IMAGES; ++i) {
163         pipe_resource_reference(&nvc0->images[s][i].resource, NULL);
164         if (nvc0->screen->base.class_3d >= GM107_3D_CLASS)
165            pipe_sampler_view_reference(&nvc0->images_tic[s][i], NULL);
166      }
167   }
168
169   for (s = 0; s < 2; ++s) {
170      for (i = 0; i < NVC0_MAX_SURFACE_SLOTS; ++i)
171         pipe_surface_reference(&nvc0->surfaces[s][i], NULL);
172   }
173
174   for (i = 0; i < nvc0->num_tfbbufs; ++i)
175      pipe_so_target_reference(&nvc0->tfbbuf[i], NULL);
176
177   for (i = 0; i < nvc0->global_residents.size / sizeof(struct pipe_resource *);
178        ++i) {
179      struct pipe_resource **res = util_dynarray_element(
180         &nvc0->global_residents, struct pipe_resource *, i);
181      pipe_resource_reference(res, NULL);
182   }
183   util_dynarray_fini(&nvc0->global_residents);
184
185   if (nvc0->tcp_empty)
186      nvc0->base.pipe.delete_tcs_state(&nvc0->base.pipe, nvc0->tcp_empty);
187}
188
189static void
190nvc0_destroy(struct pipe_context *pipe)
191{
192   struct nvc0_context *nvc0 = nvc0_context(pipe);
193
194   if (nvc0->screen->cur_ctx == nvc0) {
195      nvc0->screen->cur_ctx = NULL;
196      nvc0->screen->save_state = nvc0->state;
197      nvc0->screen->save_state.tfb = NULL;
198   }
199
200   if (nvc0->base.pipe.stream_uploader)
201      u_upload_destroy(nvc0->base.pipe.stream_uploader);
202
203   /* Unset bufctx, we don't want to revalidate any resources after the flush.
204    * Other contexts will always set their bufctx again on action calls.
205    */
206   nouveau_pushbuf_bufctx(nvc0->base.pushbuf, NULL);
207   nouveau_pushbuf_kick(nvc0->base.pushbuf, nvc0->base.pushbuf->channel);
208
209   nvc0_context_unreference_resources(nvc0);
210   nvc0_blitctx_destroy(nvc0);
211
212   list_for_each_entry_safe(struct nvc0_resident, pos, &nvc0->tex_head, list) {
213      list_del(&pos->list);
214      free(pos);
215   }
216
217   list_for_each_entry_safe(struct nvc0_resident, pos, &nvc0->img_head, list) {
218      list_del(&pos->list);
219      free(pos);
220   }
221
222   nouveau_context_destroy(&nvc0->base);
223}
224
225void
226nvc0_default_kick_notify(struct nouveau_pushbuf *push)
227{
228   struct nvc0_screen *screen = push->user_priv;
229
230   if (screen) {
231      nouveau_fence_next(&screen->base);
232      nouveau_fence_update(&screen->base, true);
233      if (screen->cur_ctx)
234         screen->cur_ctx->state.flushed = true;
235      NOUVEAU_DRV_STAT(&screen->base, pushbuf_count, 1);
236   }
237}
238
239static int
240nvc0_invalidate_resource_storage(struct nouveau_context *ctx,
241                                 struct pipe_resource *res,
242                                 int ref)
243{
244   struct nvc0_context *nvc0 = nvc0_context(&ctx->pipe);
245   unsigned s, i;
246
247   if (res->bind & PIPE_BIND_RENDER_TARGET) {
248      for (i = 0; i < nvc0->framebuffer.nr_cbufs; ++i) {
249         if (nvc0->framebuffer.cbufs[i] &&
250             nvc0->framebuffer.cbufs[i]->texture == res) {
251            nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER;
252            nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB);
253            if (!--ref)
254               return ref;
255         }
256      }
257   }
258   if (res->bind & PIPE_BIND_DEPTH_STENCIL) {
259      if (nvc0->framebuffer.zsbuf &&
260          nvc0->framebuffer.zsbuf->texture == res) {
261         nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER;
262         nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB);
263         if (!--ref)
264            return ref;
265      }
266   }
267
268   if (res->target == PIPE_BUFFER) {
269      for (i = 0; i < nvc0->num_vtxbufs; ++i) {
270         if (nvc0->vtxbuf[i].buffer.resource == res) {
271            nvc0->dirty_3d |= NVC0_NEW_3D_ARRAYS;
272            nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_VTX);
273            if (!--ref)
274               return ref;
275         }
276      }
277
278      for (s = 0; s < 6; ++s) {
279         for (i = 0; i < nvc0->num_textures[s]; ++i) {
280            if (nvc0->textures[s][i] &&
281                nvc0->textures[s][i]->texture == res) {
282               nvc0->textures_dirty[s] |= 1 << i;
283               if (unlikely(s == 5)) {
284                  nvc0->dirty_cp |= NVC0_NEW_CP_TEXTURES;
285                  nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_TEX(i));
286               } else {
287                  nvc0->dirty_3d |= NVC0_NEW_3D_TEXTURES;
288                  nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_TEX(s, i));
289               }
290               if (!--ref)
291                  return ref;
292            }
293         }
294      }
295
296      for (s = 0; s < 6; ++s) {
297         for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) {
298            if (!(nvc0->constbuf_valid[s] & (1 << i)))
299               continue;
300            if (!nvc0->constbuf[s][i].user &&
301                nvc0->constbuf[s][i].u.buf == res) {
302               nvc0->constbuf_dirty[s] |= 1 << i;
303               if (unlikely(s == 5)) {
304                  nvc0->dirty_cp |= NVC0_NEW_CP_CONSTBUF;
305                  nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_CB(i));
306               } else {
307                  nvc0->dirty_3d |= NVC0_NEW_3D_CONSTBUF;
308                  nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_CB(s, i));
309               }
310               if (!--ref)
311                  return ref;
312            }
313         }
314      }
315
316      for (s = 0; s < 6; ++s) {
317         for (i = 0; i < NVC0_MAX_BUFFERS; ++i) {
318            if (nvc0->buffers[s][i].buffer == res) {
319               nvc0->buffers_dirty[s] |= 1 << i;
320               if (unlikely(s == 5)) {
321                  nvc0->dirty_cp |= NVC0_NEW_CP_BUFFERS;
322                  nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_BUF);
323               } else {
324                  nvc0->dirty_3d |= NVC0_NEW_3D_BUFFERS;
325                  nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_BUF);
326               }
327               if (!--ref)
328                  return ref;
329            }
330         }
331      }
332
333      for (s = 0; s < 6; ++s) {
334         for (i = 0; i < NVC0_MAX_IMAGES; ++i) {
335            if (nvc0->images[s][i].resource == res) {
336               nvc0->images_dirty[s] |= 1 << i;
337               if (unlikely(s == 5)) {
338                  nvc0->dirty_cp |= NVC0_NEW_CP_SURFACES;
339                  nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_SUF);
340               } else {
341                  nvc0->dirty_3d |= NVC0_NEW_3D_SURFACES;
342                  nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_SUF);
343               }
344            }
345            if (!--ref)
346               return ref;
347         }
348      }
349   }
350
351   return ref;
352}
353
354static void
355nvc0_context_get_sample_position(struct pipe_context *, unsigned, unsigned,
356                                 float *);
357
358struct pipe_context *
359nvc0_create(struct pipe_screen *pscreen, void *priv, unsigned ctxflags)
360{
361   struct nvc0_screen *screen = nvc0_screen(pscreen);
362   struct nvc0_context *nvc0;
363   struct pipe_context *pipe;
364   int ret;
365   uint32_t flags;
366
367   nvc0 = CALLOC_STRUCT(nvc0_context);
368   if (!nvc0)
369      return NULL;
370   pipe = &nvc0->base.pipe;
371
372   if (!nvc0_blitctx_create(nvc0))
373      goto out_err;
374
375   nvc0->base.pushbuf = screen->base.pushbuf;
376   nvc0->base.client = screen->base.client;
377
378   ret = nouveau_bufctx_new(screen->base.client, 2, &nvc0->bufctx);
379   if (!ret)
380      ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_3D_COUNT,
381                               &nvc0->bufctx_3d);
382   if (!ret)
383      ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_CP_COUNT,
384                               &nvc0->bufctx_cp);
385   if (ret)
386      goto out_err;
387
388   nvc0->screen = screen;
389   nvc0->base.screen = &screen->base;
390
391   pipe->screen = pscreen;
392   pipe->priv = priv;
393   pipe->stream_uploader = u_upload_create_default(pipe);
394   if (!pipe->stream_uploader)
395      goto out_err;
396   pipe->const_uploader = pipe->stream_uploader;
397
398   pipe->destroy = nvc0_destroy;
399
400   pipe->draw_vbo = nvc0_draw_vbo;
401   pipe->clear = nvc0_clear;
402   pipe->launch_grid = (nvc0->screen->base.class_3d >= NVE4_3D_CLASS) ?
403      nve4_launch_grid : nvc0_launch_grid;
404
405   pipe->flush = nvc0_flush;
406   pipe->texture_barrier = nvc0_texture_barrier;
407   pipe->memory_barrier = nvc0_memory_barrier;
408   pipe->get_sample_position = nvc0_context_get_sample_position;
409   pipe->emit_string_marker = nvc0_emit_string_marker;
410
411   nouveau_context_init(&nvc0->base);
412   nvc0_init_query_functions(nvc0);
413   nvc0_init_surface_functions(nvc0);
414   nvc0_init_state_functions(nvc0);
415   nvc0_init_transfer_functions(nvc0);
416   nvc0_init_resource_functions(pipe);
417   if (nvc0->screen->base.class_3d >= NVE4_3D_CLASS)
418      nvc0_init_bindless_functions(pipe);
419
420   list_inithead(&nvc0->tex_head);
421   list_inithead(&nvc0->img_head);
422
423   nvc0->base.invalidate_resource_storage = nvc0_invalidate_resource_storage;
424
425   pipe->create_video_codec = nvc0_create_decoder;
426   pipe->create_video_buffer = nvc0_video_buffer_create;
427
428   /* shader builtin library is per-screen, but we need a context for m2mf */
429   nvc0_program_library_upload(nvc0);
430   nvc0_program_init_tcp_empty(nvc0);
431   if (!nvc0->tcp_empty)
432      goto out_err;
433   /* set the empty tctl prog on next draw in case one is never set */
434   nvc0->dirty_3d |= NVC0_NEW_3D_TCTLPROG;
435
436   /* Do not bind the COMPUTE driver constbuf at screen initialization because
437    * CBs are aliased between 3D and COMPUTE, but make sure it will be bound if
438    * a grid is launched later. */
439   nvc0->dirty_cp |= NVC0_NEW_CP_DRIVERCONST;
440
441   /* now that there are no more opportunities for errors, set the current
442    * context if there isn't already one.
443    */
444   if (!screen->cur_ctx) {
445      nvc0->state = screen->save_state;
446      screen->cur_ctx = nvc0;
447      nouveau_pushbuf_bufctx(screen->base.pushbuf, nvc0->bufctx);
448   }
449   screen->base.pushbuf->kick_notify = nvc0_default_kick_notify;
450
451   /* add permanently resident buffers to bufctxts */
452
453   flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RD;
454
455   BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->uniform_bo);
456   BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->txc);
457   if (screen->compute) {
458      BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->uniform_bo);
459      BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->txc);
460   }
461
462   flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RDWR;
463
464   if (screen->poly_cache)
465      BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->poly_cache);
466   if (screen->compute)
467      BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->tls);
468
469   flags = NOUVEAU_BO_GART | NOUVEAU_BO_WR;
470
471   BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->fence.bo);
472   BCTX_REFN_bo(nvc0->bufctx, FENCE, flags, screen->fence.bo);
473   if (screen->compute)
474      BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->fence.bo);
475
476   nvc0->base.scratch.bo_size = 2 << 20;
477
478   memset(nvc0->tex_handles, ~0, sizeof(nvc0->tex_handles));
479
480   util_dynarray_init(&nvc0->global_residents, NULL);
481
482   // Make sure that the first TSC entry has SRGB conversion bit set, since we
483   // use it as a fallback on Fermi for TXF, and on Kepler+ generations for
484   // FBFETCH handling (which also uses TXF).
485   //
486   // NOTE: Preliminary testing suggests that this isn't necessary at all at
487   // least on GM20x (untested on Kepler). However this is ~free, so no reason
488   // not to do it.
489   if (!screen->tsc.entries[0])
490      nvc0_upload_tsc0(nvc0);
491
492   // On Fermi, mark samplers dirty so that the proper binding can happen
493   if (screen->base.class_3d < NVE4_3D_CLASS) {
494      for (int s = 0; s < 6; s++)
495         nvc0->samplers_dirty[s] = 1;
496      nvc0->dirty_3d |= NVC0_NEW_3D_SAMPLERS;
497      nvc0->dirty_cp |= NVC0_NEW_CP_SAMPLERS;
498   }
499
500   return pipe;
501
502out_err:
503   if (nvc0) {
504      if (pipe->stream_uploader)
505         u_upload_destroy(pipe->stream_uploader);
506      if (nvc0->bufctx_3d)
507         nouveau_bufctx_del(&nvc0->bufctx_3d);
508      if (nvc0->bufctx_cp)
509         nouveau_bufctx_del(&nvc0->bufctx_cp);
510      if (nvc0->bufctx)
511         nouveau_bufctx_del(&nvc0->bufctx);
512      FREE(nvc0->blit);
513      FREE(nvc0);
514   }
515   return NULL;
516}
517
518void
519nvc0_bufctx_fence(struct nvc0_context *nvc0, struct nouveau_bufctx *bufctx,
520                  bool on_flush)
521{
522   struct nouveau_list *list = on_flush ? &bufctx->current : &bufctx->pending;
523   struct nouveau_list *it;
524   NOUVEAU_DRV_STAT_IFD(unsigned count = 0);
525
526   for (it = list->next; it != list; it = it->next) {
527      struct nouveau_bufref *ref = (struct nouveau_bufref *)it;
528      struct nv04_resource *res = ref->priv;
529      if (res)
530         nvc0_resource_validate(res, (unsigned)ref->priv_data);
531      NOUVEAU_DRV_STAT_IFD(count++);
532   }
533   NOUVEAU_DRV_STAT(&nvc0->screen->base, resource_validate_count, count);
534}
535
536const void *
537nvc0_get_sample_locations(unsigned sample_count)
538{
539   static const uint8_t ms1[1][2] = { { 0x8, 0x8 } };
540   static const uint8_t ms2[2][2] = {
541      { 0x4, 0x4 }, { 0xc, 0xc } }; /* surface coords (0,0), (1,0) */
542   static const uint8_t ms4[4][2] = {
543      { 0x6, 0x2 }, { 0xe, 0x6 },   /* (0,0), (1,0) */
544      { 0x2, 0xa }, { 0xa, 0xe } }; /* (0,1), (1,1) */
545   static const uint8_t ms8[8][2] = {
546      { 0x1, 0x7 }, { 0x5, 0x3 },   /* (0,0), (1,0) */
547      { 0x3, 0xd }, { 0x7, 0xb },   /* (0,1), (1,1) */
548      { 0x9, 0x5 }, { 0xf, 0x1 },   /* (2,0), (3,0) */
549      { 0xb, 0xf }, { 0xd, 0x9 } }; /* (2,1), (3,1) */
550#if 0
551   /* NOTE: there are alternative modes for MS2 and MS8, currently not used */
552   static const uint8_t ms8_alt[8][2] = {
553      { 0x9, 0x5 }, { 0x7, 0xb },   /* (2,0), (1,1) */
554      { 0xd, 0x9 }, { 0x5, 0x3 },   /* (3,1), (1,0) */
555      { 0x3, 0xd }, { 0x1, 0x7 },   /* (0,1), (0,0) */
556      { 0xb, 0xf }, { 0xf, 0x1 } }; /* (2,1), (3,0) */
557#endif
558
559   const uint8_t (*ptr)[2];
560
561   switch (sample_count) {
562   case 0:
563   case 1: ptr = ms1; break;
564   case 2: ptr = ms2; break;
565   case 4: ptr = ms4; break;
566   case 8: ptr = ms8; break;
567   default:
568      assert(0);
569      return NULL; /* bad sample count -> undefined locations */
570   }
571   return ptr;
572}
573
574static void
575nvc0_context_get_sample_position(struct pipe_context *pipe,
576                                 unsigned sample_count, unsigned sample_index,
577                                 float *xy)
578{
579   const uint8_t (*ptr)[2];
580
581   ptr = nvc0_get_sample_locations(sample_count);
582   if (!ptr)
583      return;
584
585   xy[0] = ptr[sample_index][0] * 0.0625f;
586   xy[1] = ptr[sample_index][1] * 0.0625f;
587}
588