1/*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11#include "vn_command_buffer.h"
12
13#include "venus-protocol/vn_protocol_driver_command_buffer.h"
14#include "venus-protocol/vn_protocol_driver_command_pool.h"
15
16#include "vn_device.h"
17#include "vn_image.h"
18#include "vn_render_pass.h"
19
20static bool
21vn_image_memory_barrier_has_present_src(
22   const VkImageMemoryBarrier *img_barriers, uint32_t count)
23{
24   for (uint32_t i = 0; i < count; i++) {
25      if (img_barriers[i].oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR ||
26          img_barriers[i].newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
27         return true;
28   }
29   return false;
30}
31
32static VkImageMemoryBarrier *
33vn_cmd_get_image_memory_barriers(struct vn_command_buffer *cmd,
34                                 uint32_t count)
35{
36   /* avoid shrinking in case of non efficient reallocation implementation */
37   if (count > cmd->builder.image_barrier_count) {
38      size_t size = sizeof(VkImageMemoryBarrier) * count;
39      VkImageMemoryBarrier *img_barriers =
40         vk_realloc(&cmd->allocator, cmd->builder.image_barriers, size,
41                    VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
42      if (!img_barriers)
43         return NULL;
44
45      /* update upon successful reallocation */
46      cmd->builder.image_barrier_count = count;
47      cmd->builder.image_barriers = img_barriers;
48   }
49
50   return cmd->builder.image_barriers;
51}
52
53/* About VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, the spec says
54 *
55 *    VK_IMAGE_LAYOUT_PRESENT_SRC_KHR must only be used for presenting a
56 *    presentable image for display. A swapchain's image must be transitioned
57 *    to this layout before calling vkQueuePresentKHR, and must be
58 *    transitioned away from this layout after calling vkAcquireNextImageKHR.
59 *
60 * That allows us to treat the layout internally as
61 *
62 *  - VK_IMAGE_LAYOUT_GENERAL
63 *  - VK_QUEUE_FAMILY_FOREIGN_EXT has the ownership, if the image is not a
64 *    prime blit source
65 *
66 * while staying performant.
67 *
68 * About queue family ownerships, the spec says
69 *
70 *    A queue family can take ownership of an image subresource or buffer
71 *    range of a resource created with VK_SHARING_MODE_EXCLUSIVE, without an
72 *    ownership transfer, in the same way as for a resource that was just
73 *    created; however, taking ownership in this way has the effect that the
74 *    contents of the image subresource or buffer range are undefined.
75 *
76 * It is unclear if that is applicable to external resources, which supposedly
77 * have the same semantics
78 *
79 *    Binding a resource to a memory object shared between multiple Vulkan
80 *    instances or other APIs does not change the ownership of the underlying
81 *    memory. The first entity to access the resource implicitly acquires
82 *    ownership. Accessing a resource backed by memory that is owned by a
83 *    particular instance or API has the same semantics as accessing a
84 *    VK_SHARING_MODE_EXCLUSIVE resource[...]
85 *
86 * We should get the spec clarified, or get rid of this completely broken code
87 * (TODO).
88 *
89 * Assuming a queue family can acquire the ownership implicitly when the
90 * contents are not needed, we do not need to worry about
91 * VK_IMAGE_LAYOUT_UNDEFINED.  We can use VK_IMAGE_LAYOUT_PRESENT_SRC_KHR as
92 * the sole signal to trigger queue family ownership transfers.
93 *
94 * When the image has VK_SHARING_MODE_CONCURRENT, we can, and are required to,
95 * use VK_QUEUE_FAMILY_IGNORED as the other queue family whether we are
96 * transitioning to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR.
97 *
98 * When the image has VK_SHARING_MODE_EXCLUSIVE, we have to work out who the
99 * other queue family is.  It is easier when the barrier does not also define
100 * a queue family ownership transfer (i.e., srcQueueFamilyIndex equals to
101 * dstQueueFamilyIndex).  The other queue family must be the queue family the
102 * command buffer was allocated for.
103 *
104 * When the barrier also defines a queue family ownership transfer, it is
105 * submitted both to the source queue family to release the ownership and to
106 * the destination queue family to acquire the ownership.  Depending on
107 * whether the barrier transitions to or from VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
108 * we are only interested in the ownership release or acquire respectively and
109 * should be careful to avoid double releases/acquires.
110 *
111 * I haven't followed all transition paths mentally to verify the correctness.
112 * I likely also violate some VUs or miss some cases below.  They are
113 * hopefully fixable and are left as TODOs.
114 */
115static void
116vn_cmd_fix_image_memory_barrier(const struct vn_command_buffer *cmd,
117                                const VkImageMemoryBarrier *src_barrier,
118                                VkImageMemoryBarrier *out_barrier)
119{
120   const struct vn_image *img = vn_image_from_handle(src_barrier->image);
121
122   *out_barrier = *src_barrier;
123
124   /* no fix needed */
125   if (out_barrier->oldLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
126       out_barrier->newLayout != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
127      return;
128
129   assert(img->is_wsi);
130
131   if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
132      return;
133
134   /* prime blit src or no layout transition */
135   if (img->is_prime_blit_src ||
136       out_barrier->oldLayout == out_barrier->newLayout) {
137      if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
138         out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
139      if (out_barrier->newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
140         out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
141      return;
142   }
143
144   if (out_barrier->oldLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
145      out_barrier->oldLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
146
147      /* no availability operation needed */
148      out_barrier->srcAccessMask = 0;
149
150      const uint32_t dst_qfi = out_barrier->dstQueueFamilyIndex;
151      if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
152         out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
153         out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
154      } else if (dst_qfi == out_barrier->srcQueueFamilyIndex ||
155                 dst_qfi == cmd->queue_family_index) {
156         out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
157         out_barrier->dstQueueFamilyIndex = cmd->queue_family_index;
158      } else {
159         /* The barrier also defines a queue family ownership transfer, and
160          * this is the one that gets submitted to the source queue family to
161          * release the ownership.  Skip both the transfer and the transition.
162          */
163         out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
164         out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
165         out_barrier->newLayout = out_barrier->oldLayout;
166      }
167   } else {
168      out_barrier->newLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
169
170      /* no visibility operation needed */
171      out_barrier->dstAccessMask = 0;
172
173      const uint32_t src_qfi = out_barrier->srcQueueFamilyIndex;
174      if (img->sharing_mode == VK_SHARING_MODE_CONCURRENT) {
175         out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
176         out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
177      } else if (src_qfi == out_barrier->dstQueueFamilyIndex ||
178                 src_qfi == cmd->queue_family_index) {
179         out_barrier->srcQueueFamilyIndex = cmd->queue_family_index;
180         out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT;
181      } else {
182         /* The barrier also defines a queue family ownership transfer, and
183          * this is the one that gets submitted to the destination queue
184          * family to acquire the ownership.  Skip both the transfer and the
185          * transition.
186          */
187         out_barrier->srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
188         out_barrier->dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
189         out_barrier->oldLayout = out_barrier->newLayout;
190      }
191   }
192}
193
194static const VkImageMemoryBarrier *
195vn_cmd_wait_events_fix_image_memory_barriers(
196   struct vn_command_buffer *cmd,
197   const VkImageMemoryBarrier *src_barriers,
198   uint32_t count,
199   uint32_t *out_transfer_count)
200{
201   *out_transfer_count = 0;
202
203   if (cmd->builder.render_pass ||
204       !vn_image_memory_barrier_has_present_src(src_barriers, count))
205      return src_barriers;
206
207   VkImageMemoryBarrier *img_barriers =
208      vn_cmd_get_image_memory_barriers(cmd, count * 2);
209   if (!img_barriers) {
210      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
211      return src_barriers;
212   }
213
214   /* vkCmdWaitEvents cannot be used for queue family ownership transfers.
215    * Nothing appears to be said about the submission order of image memory
216    * barriers in the same array.  We take the liberty to move queue family
217    * ownership transfers to the tail.
218    */
219   VkImageMemoryBarrier *transfer_barriers = img_barriers + count;
220   uint32_t transfer_count = 0;
221   uint32_t valid_count = 0;
222   for (uint32_t i = 0; i < count; i++) {
223      VkImageMemoryBarrier *img_barrier = &img_barriers[valid_count];
224      vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i], img_barrier);
225
226      if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
227         valid_count++;
228         continue;
229      }
230
231      if (img_barrier->srcQueueFamilyIndex ==
232          img_barrier->dstQueueFamilyIndex) {
233         valid_count++;
234      } else {
235         transfer_barriers[transfer_count++] = *img_barrier;
236      }
237   }
238
239   assert(valid_count + transfer_count == count);
240   if (transfer_count) {
241      /* copy back to the tail */
242      memcpy(&img_barriers[valid_count], transfer_barriers,
243             sizeof(*transfer_barriers) * transfer_count);
244      *out_transfer_count = transfer_count;
245   }
246
247   return img_barriers;
248}
249
250static const VkImageMemoryBarrier *
251vn_cmd_pipeline_barrier_fix_image_memory_barriers(
252   struct vn_command_buffer *cmd,
253   const VkImageMemoryBarrier *src_barriers,
254   uint32_t count)
255{
256   if (cmd->builder.render_pass ||
257       !vn_image_memory_barrier_has_present_src(src_barriers, count))
258      return src_barriers;
259
260   VkImageMemoryBarrier *img_barriers =
261      vn_cmd_get_image_memory_barriers(cmd, count);
262   if (!img_barriers) {
263      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
264      return src_barriers;
265   }
266
267   for (uint32_t i = 0; i < count; i++) {
268      vn_cmd_fix_image_memory_barrier(cmd, &src_barriers[i],
269                                      &img_barriers[i]);
270   }
271
272   return img_barriers;
273}
274
275static void
276vn_cmd_encode_memory_barriers(struct vn_command_buffer *cmd,
277                              VkPipelineStageFlags src_stage_mask,
278                              VkPipelineStageFlags dst_stage_mask,
279                              uint32_t buf_barrier_count,
280                              const VkBufferMemoryBarrier *buf_barriers,
281                              uint32_t img_barrier_count,
282                              const VkImageMemoryBarrier *img_barriers)
283{
284   const VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
285
286   const size_t cmd_size = vn_sizeof_vkCmdPipelineBarrier(
287      cmd_handle, src_stage_mask, dst_stage_mask, 0, 0, NULL,
288      buf_barrier_count, buf_barriers, img_barrier_count, img_barriers);
289   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
290      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
291      return;
292   }
293
294   vn_encode_vkCmdPipelineBarrier(
295      &cmd->cs, 0, cmd_handle, src_stage_mask, dst_stage_mask, 0, 0, NULL,
296      buf_barrier_count, buf_barriers, img_barrier_count, img_barriers);
297}
298
299static void
300vn_present_src_attachment_to_image_memory_barrier(
301   const struct vn_image *img,
302   const struct vn_present_src_attachment *att,
303   VkImageMemoryBarrier *img_barrier)
304{
305   *img_barrier = (VkImageMemoryBarrier)
306   {
307      .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
308      .srcAccessMask = att->src_access_mask,
309      .dstAccessMask = att->dst_access_mask,
310      .oldLayout = att->acquire ? VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
311                                : VN_PRESENT_SRC_INTERNAL_LAYOUT,
312      .newLayout = att->acquire ? VN_PRESENT_SRC_INTERNAL_LAYOUT
313                                : VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
314      .image = vn_image_to_handle((struct vn_image *)img),
315      .subresourceRange = {
316         .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
317         .levelCount = 1,
318         .layerCount = 1,
319      },
320   };
321}
322
323static void
324vn_cmd_transfer_present_src_images(
325   struct vn_command_buffer *cmd,
326   const struct vn_image *const *images,
327   const struct vn_present_src_attachment *atts,
328   uint32_t count)
329{
330   VkImageMemoryBarrier *img_barriers =
331      vn_cmd_get_image_memory_barriers(cmd, count);
332   if (!img_barriers) {
333      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
334      return;
335   }
336
337   VkPipelineStageFlags src_stage_mask = 0;
338   VkPipelineStageFlags dst_stage_mask = 0;
339   for (uint32_t i = 0; i < count; i++) {
340      src_stage_mask |= atts[i].src_stage_mask;
341      dst_stage_mask |= atts[i].dst_stage_mask;
342
343      vn_present_src_attachment_to_image_memory_barrier(images[i], &atts[i],
344                                                        &img_barriers[i]);
345      vn_cmd_fix_image_memory_barrier(cmd, &img_barriers[i],
346                                      &img_barriers[i]);
347   }
348
349   if (VN_PRESENT_SRC_INTERNAL_LAYOUT == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
350      return;
351
352   vn_cmd_encode_memory_barriers(cmd, src_stage_mask, dst_stage_mask, 0, NULL,
353                                 count, img_barriers);
354}
355
356static void
357vn_cmd_begin_render_pass(struct vn_command_buffer *cmd,
358                         const struct vn_render_pass *pass,
359                         const struct vn_framebuffer *fb,
360                         const VkRenderPassBeginInfo *begin_info)
361{
362   cmd->builder.render_pass = pass;
363   cmd->builder.framebuffer = fb;
364
365   if (!pass->present_src_count ||
366       cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY)
367      return;
368
369   /* find fb attachments */
370   const VkImageView *views;
371   ASSERTED uint32_t view_count;
372   if (fb->image_view_count) {
373      views = fb->image_views;
374      view_count = fb->image_view_count;
375   } else {
376      const VkRenderPassAttachmentBeginInfo *imageless_info =
377         vk_find_struct_const(begin_info->pNext,
378                              RENDER_PASS_ATTACHMENT_BEGIN_INFO);
379      assert(imageless_info);
380      views = imageless_info->pAttachments;
381      view_count = imageless_info->attachmentCount;
382   }
383
384   const struct vn_image **images =
385      vk_alloc(&cmd->allocator, sizeof(*images) * pass->present_src_count,
386               VN_DEFAULT_ALIGN, VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
387   if (!images) {
388      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
389      return;
390   }
391
392   for (uint32_t i = 0; i < pass->present_src_count; i++) {
393      const uint32_t index = pass->present_src_attachments[i].index;
394      assert(index < view_count);
395      images[i] = vn_image_view_from_handle(views[index])->image;
396   }
397
398   if (pass->acquire_count) {
399      vn_cmd_transfer_present_src_images(
400         cmd, images, pass->present_src_attachments, pass->acquire_count);
401   }
402
403   cmd->builder.present_src_images = images;
404}
405
406static void
407vn_cmd_end_render_pass(struct vn_command_buffer *cmd)
408{
409   const struct vn_render_pass *pass = cmd->builder.render_pass;
410
411   cmd->builder.render_pass = NULL;
412   cmd->builder.framebuffer = NULL;
413
414   if (!pass->present_src_count || !cmd->builder.present_src_images)
415      return;
416
417   const struct vn_image **images = cmd->builder.present_src_images;
418   cmd->builder.present_src_images = NULL;
419
420   if (pass->release_count) {
421      vn_cmd_transfer_present_src_images(
422         cmd, images + pass->acquire_count,
423         pass->present_src_attachments + pass->acquire_count,
424         pass->release_count);
425   }
426
427   vk_free(&cmd->allocator, images);
428}
429
430/* command pool commands */
431
432VkResult
433vn_CreateCommandPool(VkDevice device,
434                     const VkCommandPoolCreateInfo *pCreateInfo,
435                     const VkAllocationCallbacks *pAllocator,
436                     VkCommandPool *pCommandPool)
437{
438   struct vn_device *dev = vn_device_from_handle(device);
439   const VkAllocationCallbacks *alloc =
440      pAllocator ? pAllocator : &dev->base.base.alloc;
441
442   struct vn_command_pool *pool =
443      vk_zalloc(alloc, sizeof(*pool), VN_DEFAULT_ALIGN,
444                VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
445   if (!pool)
446      return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
447
448   vn_object_base_init(&pool->base, VK_OBJECT_TYPE_COMMAND_POOL, &dev->base);
449
450   pool->allocator = *alloc;
451   pool->queue_family_index = pCreateInfo->queueFamilyIndex;
452   list_inithead(&pool->command_buffers);
453
454   VkCommandPool pool_handle = vn_command_pool_to_handle(pool);
455   vn_async_vkCreateCommandPool(dev->instance, device, pCreateInfo, NULL,
456                                &pool_handle);
457
458   *pCommandPool = pool_handle;
459
460   return VK_SUCCESS;
461}
462
463void
464vn_DestroyCommandPool(VkDevice device,
465                      VkCommandPool commandPool,
466                      const VkAllocationCallbacks *pAllocator)
467{
468   struct vn_device *dev = vn_device_from_handle(device);
469   struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
470   const VkAllocationCallbacks *alloc;
471
472   if (!pool)
473      return;
474
475   alloc = pAllocator ? pAllocator : &pool->allocator;
476
477   /* We must emit vkDestroyCommandPool before freeing the command buffers in
478    * pool->command_buffers.  Otherwise, another thread might reuse their
479    * object ids while they still refer to the command buffers in the
480    * renderer.
481    */
482   vn_async_vkDestroyCommandPool(dev->instance, device, commandPool, NULL);
483
484   list_for_each_entry_safe(struct vn_command_buffer, cmd,
485                            &pool->command_buffers, head) {
486      vn_cs_encoder_fini(&cmd->cs);
487      vn_object_base_fini(&cmd->base);
488      vk_free(alloc, cmd);
489   }
490
491   vn_object_base_fini(&pool->base);
492   vk_free(alloc, pool);
493}
494
495VkResult
496vn_ResetCommandPool(VkDevice device,
497                    VkCommandPool commandPool,
498                    VkCommandPoolResetFlags flags)
499{
500   struct vn_device *dev = vn_device_from_handle(device);
501   struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
502
503   list_for_each_entry_safe(struct vn_command_buffer, cmd,
504                            &pool->command_buffers, head) {
505      vn_cs_encoder_reset(&cmd->cs);
506      cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
507   }
508
509   vn_async_vkResetCommandPool(dev->instance, device, commandPool, flags);
510
511   return VK_SUCCESS;
512}
513
514void
515vn_TrimCommandPool(VkDevice device,
516                   VkCommandPool commandPool,
517                   VkCommandPoolTrimFlags flags)
518{
519   struct vn_device *dev = vn_device_from_handle(device);
520
521   vn_async_vkTrimCommandPool(dev->instance, device, commandPool, flags);
522}
523
524/* command buffer commands */
525
526VkResult
527vn_AllocateCommandBuffers(VkDevice device,
528                          const VkCommandBufferAllocateInfo *pAllocateInfo,
529                          VkCommandBuffer *pCommandBuffers)
530{
531   struct vn_device *dev = vn_device_from_handle(device);
532   struct vn_command_pool *pool =
533      vn_command_pool_from_handle(pAllocateInfo->commandPool);
534   const VkAllocationCallbacks *alloc = &pool->allocator;
535
536   for (uint32_t i = 0; i < pAllocateInfo->commandBufferCount; i++) {
537      struct vn_command_buffer *cmd =
538         vk_zalloc(alloc, sizeof(*cmd), VN_DEFAULT_ALIGN,
539                   VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
540      if (!cmd) {
541         for (uint32_t j = 0; j < i; j++) {
542            cmd = vn_command_buffer_from_handle(pCommandBuffers[j]);
543            vn_cs_encoder_fini(&cmd->cs);
544            list_del(&cmd->head);
545            vn_object_base_fini(&cmd->base);
546            vk_free(alloc, cmd);
547         }
548         memset(pCommandBuffers, 0,
549                sizeof(*pCommandBuffers) * pAllocateInfo->commandBufferCount);
550         return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
551      }
552
553      vn_object_base_init(&cmd->base, VK_OBJECT_TYPE_COMMAND_BUFFER,
554                          &dev->base);
555      cmd->device = dev;
556      cmd->allocator = pool->allocator;
557      cmd->level = pAllocateInfo->level;
558      cmd->queue_family_index = pool->queue_family_index;
559
560      list_addtail(&cmd->head, &pool->command_buffers);
561
562      cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
563      vn_cs_encoder_init_indirect(&cmd->cs, dev->instance, 16 * 1024);
564
565      VkCommandBuffer cmd_handle = vn_command_buffer_to_handle(cmd);
566      pCommandBuffers[i] = cmd_handle;
567   }
568
569   vn_async_vkAllocateCommandBuffers(dev->instance, device, pAllocateInfo,
570                                     pCommandBuffers);
571
572   return VK_SUCCESS;
573}
574
575void
576vn_FreeCommandBuffers(VkDevice device,
577                      VkCommandPool commandPool,
578                      uint32_t commandBufferCount,
579                      const VkCommandBuffer *pCommandBuffers)
580{
581   struct vn_device *dev = vn_device_from_handle(device);
582   struct vn_command_pool *pool = vn_command_pool_from_handle(commandPool);
583   const VkAllocationCallbacks *alloc = &pool->allocator;
584
585   vn_async_vkFreeCommandBuffers(dev->instance, device, commandPool,
586                                 commandBufferCount, pCommandBuffers);
587
588   for (uint32_t i = 0; i < commandBufferCount; i++) {
589      struct vn_command_buffer *cmd =
590         vn_command_buffer_from_handle(pCommandBuffers[i]);
591
592      if (!cmd)
593         continue;
594
595      if (cmd->builder.image_barriers)
596         vk_free(alloc, cmd->builder.image_barriers);
597
598      vn_cs_encoder_fini(&cmd->cs);
599      list_del(&cmd->head);
600
601      vn_object_base_fini(&cmd->base);
602      vk_free(alloc, cmd);
603   }
604}
605
606VkResult
607vn_ResetCommandBuffer(VkCommandBuffer commandBuffer,
608                      VkCommandBufferResetFlags flags)
609{
610   struct vn_command_buffer *cmd =
611      vn_command_buffer_from_handle(commandBuffer);
612
613   vn_cs_encoder_reset(&cmd->cs);
614   cmd->state = VN_COMMAND_BUFFER_STATE_INITIAL;
615
616   vn_async_vkResetCommandBuffer(cmd->device->instance, commandBuffer, flags);
617
618   return VK_SUCCESS;
619}
620
621VkResult
622vn_BeginCommandBuffer(VkCommandBuffer commandBuffer,
623                      const VkCommandBufferBeginInfo *pBeginInfo)
624{
625   struct vn_command_buffer *cmd =
626      vn_command_buffer_from_handle(commandBuffer);
627   struct vn_instance *instance = cmd->device->instance;
628   size_t cmd_size;
629
630   vn_cs_encoder_reset(&cmd->cs);
631
632   VkCommandBufferBeginInfo local_begin_info;
633   if (pBeginInfo->pInheritanceInfo &&
634       cmd->level == VK_COMMAND_BUFFER_LEVEL_PRIMARY) {
635      local_begin_info = *pBeginInfo;
636      local_begin_info.pInheritanceInfo = NULL;
637      pBeginInfo = &local_begin_info;
638   }
639
640   cmd_size = vn_sizeof_vkBeginCommandBuffer(commandBuffer, pBeginInfo);
641   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
642      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
643      return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
644   }
645
646   vn_encode_vkBeginCommandBuffer(&cmd->cs, 0, commandBuffer, pBeginInfo);
647
648   cmd->state = VN_COMMAND_BUFFER_STATE_RECORDING;
649
650   if (cmd->level == VK_COMMAND_BUFFER_LEVEL_SECONDARY &&
651       (pBeginInfo->flags &
652        VK_COMMAND_BUFFER_USAGE_RENDER_PASS_CONTINUE_BIT)) {
653      const VkCommandBufferInheritanceInfo *inheritance_info =
654         pBeginInfo->pInheritanceInfo;
655      vn_cmd_begin_render_pass(
656         cmd, vn_render_pass_from_handle(inheritance_info->renderPass),
657         vn_framebuffer_from_handle(inheritance_info->framebuffer), NULL);
658   }
659
660   return VK_SUCCESS;
661}
662
663static VkResult
664vn_cmd_submit(struct vn_command_buffer *cmd)
665{
666   struct vn_instance *instance = cmd->device->instance;
667
668   if (cmd->state != VN_COMMAND_BUFFER_STATE_RECORDING)
669      return VK_ERROR_OUT_OF_HOST_MEMORY;
670
671   vn_cs_encoder_commit(&cmd->cs);
672   if (vn_cs_encoder_get_fatal(&cmd->cs)) {
673      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
674      vn_cs_encoder_reset(&cmd->cs);
675      return VK_ERROR_OUT_OF_HOST_MEMORY;
676   }
677
678   vn_instance_wait_roundtrip(instance, cmd->cs.current_buffer_roundtrip);
679   VkResult result = vn_instance_ring_submit(instance, &cmd->cs);
680   if (result != VK_SUCCESS) {
681      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
682      return result;
683   }
684
685   vn_cs_encoder_reset(&cmd->cs);
686
687   return VK_SUCCESS;
688}
689
690VkResult
691vn_EndCommandBuffer(VkCommandBuffer commandBuffer)
692{
693   struct vn_command_buffer *cmd =
694      vn_command_buffer_from_handle(commandBuffer);
695   struct vn_instance *instance = cmd->device->instance;
696   size_t cmd_size;
697
698   cmd_size = vn_sizeof_vkEndCommandBuffer(commandBuffer);
699   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size)) {
700      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
701      return vn_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
702   }
703
704   vn_encode_vkEndCommandBuffer(&cmd->cs, 0, commandBuffer);
705
706   VkResult result = vn_cmd_submit(cmd);
707   if (result != VK_SUCCESS) {
708      cmd->state = VN_COMMAND_BUFFER_STATE_INVALID;
709      return vn_error(instance, result);
710   }
711
712   cmd->state = VN_COMMAND_BUFFER_STATE_EXECUTABLE;
713
714   return VK_SUCCESS;
715}
716
717void
718vn_CmdBindPipeline(VkCommandBuffer commandBuffer,
719                   VkPipelineBindPoint pipelineBindPoint,
720                   VkPipeline pipeline)
721{
722   struct vn_command_buffer *cmd =
723      vn_command_buffer_from_handle(commandBuffer);
724   size_t cmd_size;
725
726   cmd_size =
727      vn_sizeof_vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline);
728   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
729      return;
730
731   vn_encode_vkCmdBindPipeline(&cmd->cs, 0, commandBuffer, pipelineBindPoint,
732                               pipeline);
733}
734
735void
736vn_CmdSetViewport(VkCommandBuffer commandBuffer,
737                  uint32_t firstViewport,
738                  uint32_t viewportCount,
739                  const VkViewport *pViewports)
740{
741   struct vn_command_buffer *cmd =
742      vn_command_buffer_from_handle(commandBuffer);
743   size_t cmd_size;
744
745   cmd_size = vn_sizeof_vkCmdSetViewport(commandBuffer, firstViewport,
746                                         viewportCount, pViewports);
747   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
748      return;
749
750   vn_encode_vkCmdSetViewport(&cmd->cs, 0, commandBuffer, firstViewport,
751                              viewportCount, pViewports);
752}
753
754void
755vn_CmdSetScissor(VkCommandBuffer commandBuffer,
756                 uint32_t firstScissor,
757                 uint32_t scissorCount,
758                 const VkRect2D *pScissors)
759{
760   struct vn_command_buffer *cmd =
761      vn_command_buffer_from_handle(commandBuffer);
762   size_t cmd_size;
763
764   cmd_size = vn_sizeof_vkCmdSetScissor(commandBuffer, firstScissor,
765                                        scissorCount, pScissors);
766   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
767      return;
768
769   vn_encode_vkCmdSetScissor(&cmd->cs, 0, commandBuffer, firstScissor,
770                             scissorCount, pScissors);
771}
772
773void
774vn_CmdSetLineWidth(VkCommandBuffer commandBuffer, float lineWidth)
775{
776   struct vn_command_buffer *cmd =
777      vn_command_buffer_from_handle(commandBuffer);
778   size_t cmd_size;
779
780   cmd_size = vn_sizeof_vkCmdSetLineWidth(commandBuffer, lineWidth);
781   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
782      return;
783
784   vn_encode_vkCmdSetLineWidth(&cmd->cs, 0, commandBuffer, lineWidth);
785}
786
787void
788vn_CmdSetDepthBias(VkCommandBuffer commandBuffer,
789                   float depthBiasConstantFactor,
790                   float depthBiasClamp,
791                   float depthBiasSlopeFactor)
792{
793   struct vn_command_buffer *cmd =
794      vn_command_buffer_from_handle(commandBuffer);
795   size_t cmd_size;
796
797   cmd_size =
798      vn_sizeof_vkCmdSetDepthBias(commandBuffer, depthBiasConstantFactor,
799                                  depthBiasClamp, depthBiasSlopeFactor);
800   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
801      return;
802
803   vn_encode_vkCmdSetDepthBias(&cmd->cs, 0, commandBuffer,
804                               depthBiasConstantFactor, depthBiasClamp,
805                               depthBiasSlopeFactor);
806}
807
808void
809vn_CmdSetBlendConstants(VkCommandBuffer commandBuffer,
810                        const float blendConstants[4])
811{
812   struct vn_command_buffer *cmd =
813      vn_command_buffer_from_handle(commandBuffer);
814   size_t cmd_size;
815
816   cmd_size = vn_sizeof_vkCmdSetBlendConstants(commandBuffer, blendConstants);
817   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
818      return;
819
820   vn_encode_vkCmdSetBlendConstants(&cmd->cs, 0, commandBuffer,
821                                    blendConstants);
822}
823
824void
825vn_CmdSetDepthBounds(VkCommandBuffer commandBuffer,
826                     float minDepthBounds,
827                     float maxDepthBounds)
828{
829   struct vn_command_buffer *cmd =
830      vn_command_buffer_from_handle(commandBuffer);
831   size_t cmd_size;
832
833   cmd_size = vn_sizeof_vkCmdSetDepthBounds(commandBuffer, minDepthBounds,
834                                            maxDepthBounds);
835   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
836      return;
837
838   vn_encode_vkCmdSetDepthBounds(&cmd->cs, 0, commandBuffer, minDepthBounds,
839                                 maxDepthBounds);
840}
841
842void
843vn_CmdSetStencilCompareMask(VkCommandBuffer commandBuffer,
844                            VkStencilFaceFlags faceMask,
845                            uint32_t compareMask)
846{
847   struct vn_command_buffer *cmd =
848      vn_command_buffer_from_handle(commandBuffer);
849   size_t cmd_size;
850
851   cmd_size = vn_sizeof_vkCmdSetStencilCompareMask(commandBuffer, faceMask,
852                                                   compareMask);
853   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
854      return;
855
856   vn_encode_vkCmdSetStencilCompareMask(&cmd->cs, 0, commandBuffer, faceMask,
857                                        compareMask);
858}
859
860void
861vn_CmdSetStencilWriteMask(VkCommandBuffer commandBuffer,
862                          VkStencilFaceFlags faceMask,
863                          uint32_t writeMask)
864{
865   struct vn_command_buffer *cmd =
866      vn_command_buffer_from_handle(commandBuffer);
867   size_t cmd_size;
868
869   cmd_size =
870      vn_sizeof_vkCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask);
871   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
872      return;
873
874   vn_encode_vkCmdSetStencilWriteMask(&cmd->cs, 0, commandBuffer, faceMask,
875                                      writeMask);
876}
877
878void
879vn_CmdSetStencilReference(VkCommandBuffer commandBuffer,
880                          VkStencilFaceFlags faceMask,
881                          uint32_t reference)
882{
883   struct vn_command_buffer *cmd =
884      vn_command_buffer_from_handle(commandBuffer);
885   size_t cmd_size;
886
887   cmd_size =
888      vn_sizeof_vkCmdSetStencilReference(commandBuffer, faceMask, reference);
889   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
890      return;
891
892   vn_encode_vkCmdSetStencilReference(&cmd->cs, 0, commandBuffer, faceMask,
893                                      reference);
894}
895
896void
897vn_CmdBindDescriptorSets(VkCommandBuffer commandBuffer,
898                         VkPipelineBindPoint pipelineBindPoint,
899                         VkPipelineLayout layout,
900                         uint32_t firstSet,
901                         uint32_t descriptorSetCount,
902                         const VkDescriptorSet *pDescriptorSets,
903                         uint32_t dynamicOffsetCount,
904                         const uint32_t *pDynamicOffsets)
905{
906   struct vn_command_buffer *cmd =
907      vn_command_buffer_from_handle(commandBuffer);
908   size_t cmd_size;
909
910   cmd_size = vn_sizeof_vkCmdBindDescriptorSets(
911      commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount,
912      pDescriptorSets, dynamicOffsetCount, pDynamicOffsets);
913   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
914      return;
915
916   vn_encode_vkCmdBindDescriptorSets(&cmd->cs, 0, commandBuffer,
917                                     pipelineBindPoint, layout, firstSet,
918                                     descriptorSetCount, pDescriptorSets,
919                                     dynamicOffsetCount, pDynamicOffsets);
920}
921
922void
923vn_CmdBindIndexBuffer(VkCommandBuffer commandBuffer,
924                      VkBuffer buffer,
925                      VkDeviceSize offset,
926                      VkIndexType indexType)
927{
928   struct vn_command_buffer *cmd =
929      vn_command_buffer_from_handle(commandBuffer);
930   size_t cmd_size;
931
932   cmd_size = vn_sizeof_vkCmdBindIndexBuffer(commandBuffer, buffer, offset,
933                                             indexType);
934   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
935      return;
936
937   vn_encode_vkCmdBindIndexBuffer(&cmd->cs, 0, commandBuffer, buffer, offset,
938                                  indexType);
939}
940
941void
942vn_CmdBindVertexBuffers(VkCommandBuffer commandBuffer,
943                        uint32_t firstBinding,
944                        uint32_t bindingCount,
945                        const VkBuffer *pBuffers,
946                        const VkDeviceSize *pOffsets)
947{
948   struct vn_command_buffer *cmd =
949      vn_command_buffer_from_handle(commandBuffer);
950   size_t cmd_size;
951
952   cmd_size = vn_sizeof_vkCmdBindVertexBuffers(
953      commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets);
954   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
955      return;
956
957   vn_encode_vkCmdBindVertexBuffers(&cmd->cs, 0, commandBuffer, firstBinding,
958                                    bindingCount, pBuffers, pOffsets);
959}
960
961void
962vn_CmdDraw(VkCommandBuffer commandBuffer,
963           uint32_t vertexCount,
964           uint32_t instanceCount,
965           uint32_t firstVertex,
966           uint32_t firstInstance)
967{
968   struct vn_command_buffer *cmd =
969      vn_command_buffer_from_handle(commandBuffer);
970   size_t cmd_size;
971
972   cmd_size = vn_sizeof_vkCmdDraw(commandBuffer, vertexCount, instanceCount,
973                                  firstVertex, firstInstance);
974   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
975      return;
976
977   vn_encode_vkCmdDraw(&cmd->cs, 0, commandBuffer, vertexCount, instanceCount,
978                       firstVertex, firstInstance);
979}
980
981void
982vn_CmdDrawIndexed(VkCommandBuffer commandBuffer,
983                  uint32_t indexCount,
984                  uint32_t instanceCount,
985                  uint32_t firstIndex,
986                  int32_t vertexOffset,
987                  uint32_t firstInstance)
988{
989   struct vn_command_buffer *cmd =
990      vn_command_buffer_from_handle(commandBuffer);
991   size_t cmd_size;
992
993   cmd_size =
994      vn_sizeof_vkCmdDrawIndexed(commandBuffer, indexCount, instanceCount,
995                                 firstIndex, vertexOffset, firstInstance);
996   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
997      return;
998
999   vn_encode_vkCmdDrawIndexed(&cmd->cs, 0, commandBuffer, indexCount,
1000                              instanceCount, firstIndex, vertexOffset,
1001                              firstInstance);
1002}
1003
1004void
1005vn_CmdDrawIndirect(VkCommandBuffer commandBuffer,
1006                   VkBuffer buffer,
1007                   VkDeviceSize offset,
1008                   uint32_t drawCount,
1009                   uint32_t stride)
1010{
1011   struct vn_command_buffer *cmd =
1012      vn_command_buffer_from_handle(commandBuffer);
1013   size_t cmd_size;
1014
1015   cmd_size = vn_sizeof_vkCmdDrawIndirect(commandBuffer, buffer, offset,
1016                                          drawCount, stride);
1017   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1018      return;
1019
1020   vn_encode_vkCmdDrawIndirect(&cmd->cs, 0, commandBuffer, buffer, offset,
1021                               drawCount, stride);
1022}
1023
1024void
1025vn_CmdDrawIndexedIndirect(VkCommandBuffer commandBuffer,
1026                          VkBuffer buffer,
1027                          VkDeviceSize offset,
1028                          uint32_t drawCount,
1029                          uint32_t stride)
1030{
1031   struct vn_command_buffer *cmd =
1032      vn_command_buffer_from_handle(commandBuffer);
1033   size_t cmd_size;
1034
1035   cmd_size = vn_sizeof_vkCmdDrawIndexedIndirect(commandBuffer, buffer,
1036                                                 offset, drawCount, stride);
1037   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1038      return;
1039
1040   vn_encode_vkCmdDrawIndexedIndirect(&cmd->cs, 0, commandBuffer, buffer,
1041                                      offset, drawCount, stride);
1042}
1043
1044void
1045vn_CmdDrawIndirectCount(VkCommandBuffer commandBuffer,
1046                        VkBuffer buffer,
1047                        VkDeviceSize offset,
1048                        VkBuffer countBuffer,
1049                        VkDeviceSize countBufferOffset,
1050                        uint32_t maxDrawCount,
1051                        uint32_t stride)
1052{
1053   struct vn_command_buffer *cmd =
1054      vn_command_buffer_from_handle(commandBuffer);
1055   size_t cmd_size;
1056
1057   cmd_size = vn_sizeof_vkCmdDrawIndirectCount(commandBuffer, buffer, offset,
1058                                               countBuffer, countBufferOffset,
1059                                               maxDrawCount, stride);
1060   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1061      return;
1062
1063   vn_encode_vkCmdDrawIndirectCount(&cmd->cs, 0, commandBuffer, buffer,
1064                                    offset, countBuffer, countBufferOffset,
1065                                    maxDrawCount, stride);
1066}
1067
1068void
1069vn_CmdDrawIndexedIndirectCount(VkCommandBuffer commandBuffer,
1070                               VkBuffer buffer,
1071                               VkDeviceSize offset,
1072                               VkBuffer countBuffer,
1073                               VkDeviceSize countBufferOffset,
1074                               uint32_t maxDrawCount,
1075                               uint32_t stride)
1076{
1077   struct vn_command_buffer *cmd =
1078      vn_command_buffer_from_handle(commandBuffer);
1079   size_t cmd_size;
1080
1081   cmd_size = vn_sizeof_vkCmdDrawIndexedIndirectCount(
1082      commandBuffer, buffer, offset, countBuffer, countBufferOffset,
1083      maxDrawCount, stride);
1084   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1085      return;
1086
1087   vn_encode_vkCmdDrawIndexedIndirectCount(
1088      &cmd->cs, 0, commandBuffer, buffer, offset, countBuffer,
1089      countBufferOffset, maxDrawCount, stride);
1090}
1091
1092void
1093vn_CmdDispatch(VkCommandBuffer commandBuffer,
1094               uint32_t groupCountX,
1095               uint32_t groupCountY,
1096               uint32_t groupCountZ)
1097{
1098   struct vn_command_buffer *cmd =
1099      vn_command_buffer_from_handle(commandBuffer);
1100   size_t cmd_size;
1101
1102   cmd_size = vn_sizeof_vkCmdDispatch(commandBuffer, groupCountX, groupCountY,
1103                                      groupCountZ);
1104   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1105      return;
1106
1107   vn_encode_vkCmdDispatch(&cmd->cs, 0, commandBuffer, groupCountX,
1108                           groupCountY, groupCountZ);
1109}
1110
1111void
1112vn_CmdDispatchIndirect(VkCommandBuffer commandBuffer,
1113                       VkBuffer buffer,
1114                       VkDeviceSize offset)
1115{
1116   struct vn_command_buffer *cmd =
1117      vn_command_buffer_from_handle(commandBuffer);
1118   size_t cmd_size;
1119
1120   cmd_size = vn_sizeof_vkCmdDispatchIndirect(commandBuffer, buffer, offset);
1121   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1122      return;
1123
1124   vn_encode_vkCmdDispatchIndirect(&cmd->cs, 0, commandBuffer, buffer,
1125                                   offset);
1126}
1127
1128void
1129vn_CmdCopyBuffer(VkCommandBuffer commandBuffer,
1130                 VkBuffer srcBuffer,
1131                 VkBuffer dstBuffer,
1132                 uint32_t regionCount,
1133                 const VkBufferCopy *pRegions)
1134{
1135   struct vn_command_buffer *cmd =
1136      vn_command_buffer_from_handle(commandBuffer);
1137   size_t cmd_size;
1138
1139   cmd_size = vn_sizeof_vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer,
1140                                        regionCount, pRegions);
1141   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1142      return;
1143
1144   vn_encode_vkCmdCopyBuffer(&cmd->cs, 0, commandBuffer, srcBuffer, dstBuffer,
1145                             regionCount, pRegions);
1146}
1147
1148void
1149vn_CmdCopyImage(VkCommandBuffer commandBuffer,
1150                VkImage srcImage,
1151                VkImageLayout srcImageLayout,
1152                VkImage dstImage,
1153                VkImageLayout dstImageLayout,
1154                uint32_t regionCount,
1155                const VkImageCopy *pRegions)
1156{
1157   struct vn_command_buffer *cmd =
1158      vn_command_buffer_from_handle(commandBuffer);
1159   size_t cmd_size;
1160
1161   cmd_size = vn_sizeof_vkCmdCopyImage(commandBuffer, srcImage,
1162                                       srcImageLayout, dstImage,
1163                                       dstImageLayout, regionCount, pRegions);
1164   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1165      return;
1166
1167   vn_encode_vkCmdCopyImage(&cmd->cs, 0, commandBuffer, srcImage,
1168                            srcImageLayout, dstImage, dstImageLayout,
1169                            regionCount, pRegions);
1170}
1171
1172void
1173vn_CmdBlitImage(VkCommandBuffer commandBuffer,
1174                VkImage srcImage,
1175                VkImageLayout srcImageLayout,
1176                VkImage dstImage,
1177                VkImageLayout dstImageLayout,
1178                uint32_t regionCount,
1179                const VkImageBlit *pRegions,
1180                VkFilter filter)
1181{
1182   struct vn_command_buffer *cmd =
1183      vn_command_buffer_from_handle(commandBuffer);
1184   size_t cmd_size;
1185
1186   cmd_size = vn_sizeof_vkCmdBlitImage(
1187      commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
1188      regionCount, pRegions, filter);
1189   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1190      return;
1191
1192   vn_encode_vkCmdBlitImage(&cmd->cs, 0, commandBuffer, srcImage,
1193                            srcImageLayout, dstImage, dstImageLayout,
1194                            regionCount, pRegions, filter);
1195}
1196
1197void
1198vn_CmdCopyBufferToImage(VkCommandBuffer commandBuffer,
1199                        VkBuffer srcBuffer,
1200                        VkImage dstImage,
1201                        VkImageLayout dstImageLayout,
1202                        uint32_t regionCount,
1203                        const VkBufferImageCopy *pRegions)
1204{
1205   struct vn_command_buffer *cmd =
1206      vn_command_buffer_from_handle(commandBuffer);
1207   size_t cmd_size;
1208
1209   cmd_size =
1210      vn_sizeof_vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage,
1211                                       dstImageLayout, regionCount, pRegions);
1212   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1213      return;
1214
1215   vn_encode_vkCmdCopyBufferToImage(&cmd->cs, 0, commandBuffer, srcBuffer,
1216                                    dstImage, dstImageLayout, regionCount,
1217                                    pRegions);
1218}
1219
1220void
1221vn_CmdCopyImageToBuffer(VkCommandBuffer commandBuffer,
1222                        VkImage srcImage,
1223                        VkImageLayout srcImageLayout,
1224                        VkBuffer dstBuffer,
1225                        uint32_t regionCount,
1226                        const VkBufferImageCopy *pRegions)
1227{
1228   struct vn_command_buffer *cmd =
1229      vn_command_buffer_from_handle(commandBuffer);
1230   size_t cmd_size;
1231
1232   bool prime_blit = false;
1233   if (srcImageLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR &&
1234       VN_PRESENT_SRC_INTERNAL_LAYOUT != VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
1235      srcImageLayout = VN_PRESENT_SRC_INTERNAL_LAYOUT;
1236
1237      /* sanity check */
1238      const struct vn_image *img = vn_image_from_handle(srcImage);
1239      prime_blit = img->is_wsi && img->is_prime_blit_src;
1240      assert(prime_blit);
1241   }
1242
1243   cmd_size = vn_sizeof_vkCmdCopyImageToBuffer(commandBuffer, srcImage,
1244                                               srcImageLayout, dstBuffer,
1245                                               regionCount, pRegions);
1246   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1247      return;
1248
1249   vn_encode_vkCmdCopyImageToBuffer(&cmd->cs, 0, commandBuffer, srcImage,
1250                                    srcImageLayout, dstBuffer, regionCount,
1251                                    pRegions);
1252
1253   if (prime_blit) {
1254      const VkBufferMemoryBarrier buf_barrier = {
1255         .sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
1256         .srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
1257         .srcQueueFamilyIndex = cmd->queue_family_index,
1258         .dstQueueFamilyIndex = VK_QUEUE_FAMILY_FOREIGN_EXT,
1259         .buffer = dstBuffer,
1260         .size = VK_WHOLE_SIZE,
1261      };
1262      vn_cmd_encode_memory_barriers(cmd, VK_PIPELINE_STAGE_TRANSFER_BIT,
1263                                    VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 1,
1264                                    &buf_barrier, 0, NULL);
1265   }
1266}
1267
1268void
1269vn_CmdUpdateBuffer(VkCommandBuffer commandBuffer,
1270                   VkBuffer dstBuffer,
1271                   VkDeviceSize dstOffset,
1272                   VkDeviceSize dataSize,
1273                   const void *pData)
1274{
1275   struct vn_command_buffer *cmd =
1276      vn_command_buffer_from_handle(commandBuffer);
1277   size_t cmd_size;
1278
1279   cmd_size = vn_sizeof_vkCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset,
1280                                          dataSize, pData);
1281   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1282      return;
1283
1284   vn_encode_vkCmdUpdateBuffer(&cmd->cs, 0, commandBuffer, dstBuffer,
1285                               dstOffset, dataSize, pData);
1286}
1287
1288void
1289vn_CmdFillBuffer(VkCommandBuffer commandBuffer,
1290                 VkBuffer dstBuffer,
1291                 VkDeviceSize dstOffset,
1292                 VkDeviceSize size,
1293                 uint32_t data)
1294{
1295   struct vn_command_buffer *cmd =
1296      vn_command_buffer_from_handle(commandBuffer);
1297   size_t cmd_size;
1298
1299   cmd_size = vn_sizeof_vkCmdFillBuffer(commandBuffer, dstBuffer, dstOffset,
1300                                        size, data);
1301   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1302      return;
1303
1304   vn_encode_vkCmdFillBuffer(&cmd->cs, 0, commandBuffer, dstBuffer, dstOffset,
1305                             size, data);
1306}
1307
1308void
1309vn_CmdClearColorImage(VkCommandBuffer commandBuffer,
1310                      VkImage image,
1311                      VkImageLayout imageLayout,
1312                      const VkClearColorValue *pColor,
1313                      uint32_t rangeCount,
1314                      const VkImageSubresourceRange *pRanges)
1315{
1316   struct vn_command_buffer *cmd =
1317      vn_command_buffer_from_handle(commandBuffer);
1318   size_t cmd_size;
1319
1320   cmd_size = vn_sizeof_vkCmdClearColorImage(
1321      commandBuffer, image, imageLayout, pColor, rangeCount, pRanges);
1322   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1323      return;
1324
1325   vn_encode_vkCmdClearColorImage(&cmd->cs, 0, commandBuffer, image,
1326                                  imageLayout, pColor, rangeCount, pRanges);
1327}
1328
1329void
1330vn_CmdClearDepthStencilImage(VkCommandBuffer commandBuffer,
1331                             VkImage image,
1332                             VkImageLayout imageLayout,
1333                             const VkClearDepthStencilValue *pDepthStencil,
1334                             uint32_t rangeCount,
1335                             const VkImageSubresourceRange *pRanges)
1336{
1337   struct vn_command_buffer *cmd =
1338      vn_command_buffer_from_handle(commandBuffer);
1339   size_t cmd_size;
1340
1341   cmd_size = vn_sizeof_vkCmdClearDepthStencilImage(
1342      commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges);
1343   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1344      return;
1345
1346   vn_encode_vkCmdClearDepthStencilImage(&cmd->cs, 0, commandBuffer, image,
1347                                         imageLayout, pDepthStencil,
1348                                         rangeCount, pRanges);
1349}
1350
1351void
1352vn_CmdClearAttachments(VkCommandBuffer commandBuffer,
1353                       uint32_t attachmentCount,
1354                       const VkClearAttachment *pAttachments,
1355                       uint32_t rectCount,
1356                       const VkClearRect *pRects)
1357{
1358   struct vn_command_buffer *cmd =
1359      vn_command_buffer_from_handle(commandBuffer);
1360   size_t cmd_size;
1361
1362   cmd_size = vn_sizeof_vkCmdClearAttachments(
1363      commandBuffer, attachmentCount, pAttachments, rectCount, pRects);
1364   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1365      return;
1366
1367   vn_encode_vkCmdClearAttachments(&cmd->cs, 0, commandBuffer,
1368                                   attachmentCount, pAttachments, rectCount,
1369                                   pRects);
1370}
1371
1372void
1373vn_CmdResolveImage(VkCommandBuffer commandBuffer,
1374                   VkImage srcImage,
1375                   VkImageLayout srcImageLayout,
1376                   VkImage dstImage,
1377                   VkImageLayout dstImageLayout,
1378                   uint32_t regionCount,
1379                   const VkImageResolve *pRegions)
1380{
1381   struct vn_command_buffer *cmd =
1382      vn_command_buffer_from_handle(commandBuffer);
1383   size_t cmd_size;
1384
1385   cmd_size = vn_sizeof_vkCmdResolveImage(
1386      commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout,
1387      regionCount, pRegions);
1388   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1389      return;
1390
1391   vn_encode_vkCmdResolveImage(&cmd->cs, 0, commandBuffer, srcImage,
1392                               srcImageLayout, dstImage, dstImageLayout,
1393                               regionCount, pRegions);
1394}
1395
1396void
1397vn_CmdSetEvent(VkCommandBuffer commandBuffer,
1398               VkEvent event,
1399               VkPipelineStageFlags stageMask)
1400{
1401   struct vn_command_buffer *cmd =
1402      vn_command_buffer_from_handle(commandBuffer);
1403   size_t cmd_size;
1404
1405   cmd_size = vn_sizeof_vkCmdSetEvent(commandBuffer, event, stageMask);
1406   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1407      return;
1408
1409   vn_encode_vkCmdSetEvent(&cmd->cs, 0, commandBuffer, event, stageMask);
1410}
1411
1412void
1413vn_CmdResetEvent(VkCommandBuffer commandBuffer,
1414                 VkEvent event,
1415                 VkPipelineStageFlags stageMask)
1416{
1417   struct vn_command_buffer *cmd =
1418      vn_command_buffer_from_handle(commandBuffer);
1419   size_t cmd_size;
1420
1421   cmd_size = vn_sizeof_vkCmdResetEvent(commandBuffer, event, stageMask);
1422   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1423      return;
1424
1425   vn_encode_vkCmdResetEvent(&cmd->cs, 0, commandBuffer, event, stageMask);
1426}
1427
1428void
1429vn_CmdWaitEvents(VkCommandBuffer commandBuffer,
1430                 uint32_t eventCount,
1431                 const VkEvent *pEvents,
1432                 VkPipelineStageFlags srcStageMask,
1433                 VkPipelineStageFlags dstStageMask,
1434                 uint32_t memoryBarrierCount,
1435                 const VkMemoryBarrier *pMemoryBarriers,
1436                 uint32_t bufferMemoryBarrierCount,
1437                 const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1438                 uint32_t imageMemoryBarrierCount,
1439                 const VkImageMemoryBarrier *pImageMemoryBarriers)
1440{
1441   struct vn_command_buffer *cmd =
1442      vn_command_buffer_from_handle(commandBuffer);
1443   size_t cmd_size;
1444
1445   uint32_t transfer_count;
1446   pImageMemoryBarriers = vn_cmd_wait_events_fix_image_memory_barriers(
1447      cmd, pImageMemoryBarriers, imageMemoryBarrierCount, &transfer_count);
1448   imageMemoryBarrierCount -= transfer_count;
1449
1450   cmd_size = vn_sizeof_vkCmdWaitEvents(
1451      commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask,
1452      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
1453      pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1454   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1455      return;
1456
1457   vn_encode_vkCmdWaitEvents(&cmd->cs, 0, commandBuffer, eventCount, pEvents,
1458                             srcStageMask, dstStageMask, memoryBarrierCount,
1459                             pMemoryBarriers, bufferMemoryBarrierCount,
1460                             pBufferMemoryBarriers, imageMemoryBarrierCount,
1461                             pImageMemoryBarriers);
1462
1463   if (transfer_count) {
1464      pImageMemoryBarriers += imageMemoryBarrierCount;
1465      vn_cmd_encode_memory_barriers(cmd, srcStageMask, dstStageMask, 0, NULL,
1466                                    transfer_count, pImageMemoryBarriers);
1467   }
1468}
1469
1470void
1471vn_CmdPipelineBarrier(VkCommandBuffer commandBuffer,
1472                      VkPipelineStageFlags srcStageMask,
1473                      VkPipelineStageFlags dstStageMask,
1474                      VkDependencyFlags dependencyFlags,
1475                      uint32_t memoryBarrierCount,
1476                      const VkMemoryBarrier *pMemoryBarriers,
1477                      uint32_t bufferMemoryBarrierCount,
1478                      const VkBufferMemoryBarrier *pBufferMemoryBarriers,
1479                      uint32_t imageMemoryBarrierCount,
1480                      const VkImageMemoryBarrier *pImageMemoryBarriers)
1481{
1482   struct vn_command_buffer *cmd =
1483      vn_command_buffer_from_handle(commandBuffer);
1484   size_t cmd_size;
1485
1486   pImageMemoryBarriers = vn_cmd_pipeline_barrier_fix_image_memory_barriers(
1487      cmd, pImageMemoryBarriers, imageMemoryBarrierCount);
1488
1489   cmd_size = vn_sizeof_vkCmdPipelineBarrier(
1490      commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
1491      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
1492      pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1493   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1494      return;
1495
1496   vn_encode_vkCmdPipelineBarrier(
1497      &cmd->cs, 0, commandBuffer, srcStageMask, dstStageMask, dependencyFlags,
1498      memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount,
1499      pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers);
1500}
1501
1502void
1503vn_CmdBeginQuery(VkCommandBuffer commandBuffer,
1504                 VkQueryPool queryPool,
1505                 uint32_t query,
1506                 VkQueryControlFlags flags)
1507{
1508   struct vn_command_buffer *cmd =
1509      vn_command_buffer_from_handle(commandBuffer);
1510   size_t cmd_size;
1511
1512   cmd_size =
1513      vn_sizeof_vkCmdBeginQuery(commandBuffer, queryPool, query, flags);
1514   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1515      return;
1516
1517   vn_encode_vkCmdBeginQuery(&cmd->cs, 0, commandBuffer, queryPool, query,
1518                             flags);
1519}
1520
1521void
1522vn_CmdEndQuery(VkCommandBuffer commandBuffer,
1523               VkQueryPool queryPool,
1524               uint32_t query)
1525{
1526   struct vn_command_buffer *cmd =
1527      vn_command_buffer_from_handle(commandBuffer);
1528   size_t cmd_size;
1529
1530   cmd_size = vn_sizeof_vkCmdEndQuery(commandBuffer, queryPool, query);
1531   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1532      return;
1533
1534   vn_encode_vkCmdEndQuery(&cmd->cs, 0, commandBuffer, queryPool, query);
1535}
1536
1537void
1538vn_CmdResetQueryPool(VkCommandBuffer commandBuffer,
1539                     VkQueryPool queryPool,
1540                     uint32_t firstQuery,
1541                     uint32_t queryCount)
1542{
1543   struct vn_command_buffer *cmd =
1544      vn_command_buffer_from_handle(commandBuffer);
1545   size_t cmd_size;
1546
1547   cmd_size = vn_sizeof_vkCmdResetQueryPool(commandBuffer, queryPool,
1548                                            firstQuery, queryCount);
1549   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1550      return;
1551
1552   vn_encode_vkCmdResetQueryPool(&cmd->cs, 0, commandBuffer, queryPool,
1553                                 firstQuery, queryCount);
1554}
1555
1556void
1557vn_CmdWriteTimestamp(VkCommandBuffer commandBuffer,
1558                     VkPipelineStageFlagBits pipelineStage,
1559                     VkQueryPool queryPool,
1560                     uint32_t query)
1561{
1562   struct vn_command_buffer *cmd =
1563      vn_command_buffer_from_handle(commandBuffer);
1564   size_t cmd_size;
1565
1566   cmd_size = vn_sizeof_vkCmdWriteTimestamp(commandBuffer, pipelineStage,
1567                                            queryPool, query);
1568   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1569      return;
1570
1571   vn_encode_vkCmdWriteTimestamp(&cmd->cs, 0, commandBuffer, pipelineStage,
1572                                 queryPool, query);
1573}
1574
1575void
1576vn_CmdCopyQueryPoolResults(VkCommandBuffer commandBuffer,
1577                           VkQueryPool queryPool,
1578                           uint32_t firstQuery,
1579                           uint32_t queryCount,
1580                           VkBuffer dstBuffer,
1581                           VkDeviceSize dstOffset,
1582                           VkDeviceSize stride,
1583                           VkQueryResultFlags flags)
1584{
1585   struct vn_command_buffer *cmd =
1586      vn_command_buffer_from_handle(commandBuffer);
1587   size_t cmd_size;
1588
1589   cmd_size = vn_sizeof_vkCmdCopyQueryPoolResults(
1590      commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset,
1591      stride, flags);
1592   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1593      return;
1594
1595   vn_encode_vkCmdCopyQueryPoolResults(&cmd->cs, 0, commandBuffer, queryPool,
1596                                       firstQuery, queryCount, dstBuffer,
1597                                       dstOffset, stride, flags);
1598}
1599
1600void
1601vn_CmdPushConstants(VkCommandBuffer commandBuffer,
1602                    VkPipelineLayout layout,
1603                    VkShaderStageFlags stageFlags,
1604                    uint32_t offset,
1605                    uint32_t size,
1606                    const void *pValues)
1607{
1608   struct vn_command_buffer *cmd =
1609      vn_command_buffer_from_handle(commandBuffer);
1610   size_t cmd_size;
1611
1612   cmd_size = vn_sizeof_vkCmdPushConstants(commandBuffer, layout, stageFlags,
1613                                           offset, size, pValues);
1614   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1615      return;
1616
1617   vn_encode_vkCmdPushConstants(&cmd->cs, 0, commandBuffer, layout,
1618                                stageFlags, offset, size, pValues);
1619}
1620
1621void
1622vn_CmdBeginRenderPass(VkCommandBuffer commandBuffer,
1623                      const VkRenderPassBeginInfo *pRenderPassBegin,
1624                      VkSubpassContents contents)
1625{
1626   struct vn_command_buffer *cmd =
1627      vn_command_buffer_from_handle(commandBuffer);
1628   size_t cmd_size;
1629
1630   vn_cmd_begin_render_pass(
1631      cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1632      vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1633      pRenderPassBegin);
1634
1635   cmd_size = vn_sizeof_vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin,
1636                                             contents);
1637   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1638      return;
1639
1640   vn_encode_vkCmdBeginRenderPass(&cmd->cs, 0, commandBuffer,
1641                                  pRenderPassBegin, contents);
1642}
1643
1644void
1645vn_CmdNextSubpass(VkCommandBuffer commandBuffer, VkSubpassContents contents)
1646{
1647   struct vn_command_buffer *cmd =
1648      vn_command_buffer_from_handle(commandBuffer);
1649   size_t cmd_size;
1650
1651   cmd_size = vn_sizeof_vkCmdNextSubpass(commandBuffer, contents);
1652   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1653      return;
1654
1655   vn_encode_vkCmdNextSubpass(&cmd->cs, 0, commandBuffer, contents);
1656}
1657
1658void
1659vn_CmdEndRenderPass(VkCommandBuffer commandBuffer)
1660{
1661   struct vn_command_buffer *cmd =
1662      vn_command_buffer_from_handle(commandBuffer);
1663   size_t cmd_size;
1664
1665   cmd_size = vn_sizeof_vkCmdEndRenderPass(commandBuffer);
1666   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1667      return;
1668
1669   vn_encode_vkCmdEndRenderPass(&cmd->cs, 0, commandBuffer);
1670
1671   vn_cmd_end_render_pass(cmd);
1672}
1673
1674void
1675vn_CmdBeginRenderPass2(VkCommandBuffer commandBuffer,
1676                       const VkRenderPassBeginInfo *pRenderPassBegin,
1677                       const VkSubpassBeginInfo *pSubpassBeginInfo)
1678{
1679   struct vn_command_buffer *cmd =
1680      vn_command_buffer_from_handle(commandBuffer);
1681   size_t cmd_size;
1682
1683   vn_cmd_begin_render_pass(
1684      cmd, vn_render_pass_from_handle(pRenderPassBegin->renderPass),
1685      vn_framebuffer_from_handle(pRenderPassBegin->framebuffer),
1686      pRenderPassBegin);
1687
1688   cmd_size = vn_sizeof_vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin,
1689                                              pSubpassBeginInfo);
1690   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1691      return;
1692
1693   vn_encode_vkCmdBeginRenderPass2(&cmd->cs, 0, commandBuffer,
1694                                   pRenderPassBegin, pSubpassBeginInfo);
1695}
1696
1697void
1698vn_CmdNextSubpass2(VkCommandBuffer commandBuffer,
1699                   const VkSubpassBeginInfo *pSubpassBeginInfo,
1700                   const VkSubpassEndInfo *pSubpassEndInfo)
1701{
1702   struct vn_command_buffer *cmd =
1703      vn_command_buffer_from_handle(commandBuffer);
1704   size_t cmd_size;
1705
1706   cmd_size = vn_sizeof_vkCmdNextSubpass2(commandBuffer, pSubpassBeginInfo,
1707                                          pSubpassEndInfo);
1708   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1709      return;
1710
1711   vn_encode_vkCmdNextSubpass2(&cmd->cs, 0, commandBuffer, pSubpassBeginInfo,
1712                               pSubpassEndInfo);
1713}
1714
1715void
1716vn_CmdEndRenderPass2(VkCommandBuffer commandBuffer,
1717                     const VkSubpassEndInfo *pSubpassEndInfo)
1718{
1719   struct vn_command_buffer *cmd =
1720      vn_command_buffer_from_handle(commandBuffer);
1721   size_t cmd_size;
1722
1723   cmd_size = vn_sizeof_vkCmdEndRenderPass2(commandBuffer, pSubpassEndInfo);
1724   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1725      return;
1726
1727   vn_encode_vkCmdEndRenderPass2(&cmd->cs, 0, commandBuffer, pSubpassEndInfo);
1728
1729   vn_cmd_end_render_pass(cmd);
1730}
1731
1732void
1733vn_CmdExecuteCommands(VkCommandBuffer commandBuffer,
1734                      uint32_t commandBufferCount,
1735                      const VkCommandBuffer *pCommandBuffers)
1736{
1737   struct vn_command_buffer *cmd =
1738      vn_command_buffer_from_handle(commandBuffer);
1739   size_t cmd_size;
1740
1741   cmd_size = vn_sizeof_vkCmdExecuteCommands(
1742      commandBuffer, commandBufferCount, pCommandBuffers);
1743   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1744      return;
1745
1746   vn_encode_vkCmdExecuteCommands(&cmd->cs, 0, commandBuffer,
1747                                  commandBufferCount, pCommandBuffers);
1748}
1749
1750void
1751vn_CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask)
1752{
1753   struct vn_command_buffer *cmd =
1754      vn_command_buffer_from_handle(commandBuffer);
1755   size_t cmd_size;
1756
1757   cmd_size = vn_sizeof_vkCmdSetDeviceMask(commandBuffer, deviceMask);
1758   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1759      return;
1760
1761   vn_encode_vkCmdSetDeviceMask(&cmd->cs, 0, commandBuffer, deviceMask);
1762}
1763
1764void
1765vn_CmdDispatchBase(VkCommandBuffer commandBuffer,
1766                   uint32_t baseGroupX,
1767                   uint32_t baseGroupY,
1768                   uint32_t baseGroupZ,
1769                   uint32_t groupCountX,
1770                   uint32_t groupCountY,
1771                   uint32_t groupCountZ)
1772{
1773   struct vn_command_buffer *cmd =
1774      vn_command_buffer_from_handle(commandBuffer);
1775   size_t cmd_size;
1776
1777   cmd_size = vn_sizeof_vkCmdDispatchBase(commandBuffer, baseGroupX,
1778                                          baseGroupY, baseGroupZ, groupCountX,
1779                                          groupCountY, groupCountZ);
1780   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1781      return;
1782
1783   vn_encode_vkCmdDispatchBase(&cmd->cs, 0, commandBuffer, baseGroupX,
1784                               baseGroupY, baseGroupZ, groupCountX,
1785                               groupCountY, groupCountZ);
1786}
1787
1788void
1789vn_CmdBeginQueryIndexedEXT(VkCommandBuffer commandBuffer,
1790                           VkQueryPool queryPool,
1791                           uint32_t query,
1792                           VkQueryControlFlags flags,
1793                           uint32_t index)
1794{
1795   struct vn_command_buffer *cmd =
1796      vn_command_buffer_from_handle(commandBuffer);
1797   size_t cmd_size;
1798
1799   cmd_size = vn_sizeof_vkCmdBeginQueryIndexedEXT(commandBuffer, queryPool,
1800                                                  query, flags, index);
1801   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1802      return;
1803
1804   vn_encode_vkCmdBeginQueryIndexedEXT(&cmd->cs, 0, commandBuffer, queryPool,
1805                                       query, flags, index);
1806}
1807
1808void
1809vn_CmdEndQueryIndexedEXT(VkCommandBuffer commandBuffer,
1810                         VkQueryPool queryPool,
1811                         uint32_t query,
1812                         uint32_t index)
1813{
1814   struct vn_command_buffer *cmd =
1815      vn_command_buffer_from_handle(commandBuffer);
1816   size_t cmd_size;
1817
1818   cmd_size = vn_sizeof_vkCmdEndQueryIndexedEXT(commandBuffer, queryPool,
1819                                                query, index);
1820   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1821      return;
1822
1823   vn_encode_vkCmdEndQueryIndexedEXT(&cmd->cs, 0, commandBuffer, queryPool,
1824                                     query, index);
1825}
1826
1827void
1828vn_CmdBindTransformFeedbackBuffersEXT(VkCommandBuffer commandBuffer,
1829                                      uint32_t firstBinding,
1830                                      uint32_t bindingCount,
1831                                      const VkBuffer *pBuffers,
1832                                      const VkDeviceSize *pOffsets,
1833                                      const VkDeviceSize *pSizes)
1834{
1835   struct vn_command_buffer *cmd =
1836      vn_command_buffer_from_handle(commandBuffer);
1837   size_t cmd_size;
1838
1839   cmd_size = vn_sizeof_vkCmdBindTransformFeedbackBuffersEXT(
1840      commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes);
1841   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1842      return;
1843
1844   vn_encode_vkCmdBindTransformFeedbackBuffersEXT(&cmd->cs, 0, commandBuffer,
1845                                                  firstBinding, bindingCount,
1846                                                  pBuffers, pOffsets, pSizes);
1847}
1848
1849void
1850vn_CmdBeginTransformFeedbackEXT(VkCommandBuffer commandBuffer,
1851                                uint32_t firstCounterBuffer,
1852                                uint32_t counterBufferCount,
1853                                const VkBuffer *pCounterBuffers,
1854                                const VkDeviceSize *pCounterBufferOffsets)
1855{
1856   struct vn_command_buffer *cmd =
1857      vn_command_buffer_from_handle(commandBuffer);
1858   size_t cmd_size;
1859
1860   cmd_size = vn_sizeof_vkCmdBeginTransformFeedbackEXT(
1861      commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers,
1862      pCounterBufferOffsets);
1863   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1864      return;
1865
1866   vn_encode_vkCmdBeginTransformFeedbackEXT(
1867      &cmd->cs, 0, commandBuffer, firstCounterBuffer, counterBufferCount,
1868      pCounterBuffers, pCounterBufferOffsets);
1869}
1870
1871void
1872vn_CmdEndTransformFeedbackEXT(VkCommandBuffer commandBuffer,
1873                              uint32_t firstCounterBuffer,
1874                              uint32_t counterBufferCount,
1875                              const VkBuffer *pCounterBuffers,
1876                              const VkDeviceSize *pCounterBufferOffsets)
1877{
1878   struct vn_command_buffer *cmd =
1879      vn_command_buffer_from_handle(commandBuffer);
1880   size_t cmd_size;
1881
1882   cmd_size = vn_sizeof_vkCmdEndTransformFeedbackEXT(
1883      commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers,
1884      pCounterBufferOffsets);
1885   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1886      return;
1887
1888   vn_encode_vkCmdEndTransformFeedbackEXT(
1889      &cmd->cs, 0, commandBuffer, firstCounterBuffer, counterBufferCount,
1890      pCounterBuffers, pCounterBufferOffsets);
1891}
1892
1893void
1894vn_CmdDrawIndirectByteCountEXT(VkCommandBuffer commandBuffer,
1895                               uint32_t instanceCount,
1896                               uint32_t firstInstance,
1897                               VkBuffer counterBuffer,
1898                               VkDeviceSize counterBufferOffset,
1899                               uint32_t counterOffset,
1900                               uint32_t vertexStride)
1901{
1902   struct vn_command_buffer *cmd =
1903      vn_command_buffer_from_handle(commandBuffer);
1904   size_t cmd_size;
1905
1906   cmd_size = vn_sizeof_vkCmdDrawIndirectByteCountEXT(
1907      commandBuffer, instanceCount, firstInstance, counterBuffer,
1908      counterBufferOffset, counterOffset, vertexStride);
1909   if (!vn_cs_encoder_reserve(&cmd->cs, cmd_size))
1910      return;
1911
1912   vn_encode_vkCmdDrawIndirectByteCountEXT(
1913      &cmd->cs, 0, commandBuffer, instanceCount, firstInstance, counterBuffer,
1914      counterBufferOffset, counterOffset, vertexStride);
1915}
1916