1/*
2 * Copyright 2019 Google LLC
3 * SPDX-License-Identifier: MIT
4 *
5 * based in part on anv and radv which are:
6 * Copyright © 2015 Intel Corporation
7 * Copyright © 2016 Red Hat.
8 * Copyright © 2016 Bas Nieuwenhuizen
9 */
10
11#include "vn_image.h"
12
13#include "venus-protocol/vn_protocol_driver_image.h"
14#include "venus-protocol/vn_protocol_driver_image_view.h"
15#include "venus-protocol/vn_protocol_driver_sampler.h"
16#include "venus-protocol/vn_protocol_driver_sampler_ycbcr_conversion.h"
17
18#include "vn_android.h"
19#include "vn_device.h"
20#include "vn_device_memory.h"
21#include "vn_wsi.h"
22
23static void
24vn_image_init_memory_requirements(struct vn_image *img,
25                                  struct vn_device *dev,
26                                  const VkImageCreateInfo *create_info)
27{
28   uint32_t plane_count = 1;
29   if (create_info->flags & VK_IMAGE_CREATE_DISJOINT_BIT) {
30      /* TODO VkDrmFormatModifierPropertiesEXT::drmFormatModifierPlaneCount */
31      assert(create_info->tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT);
32
33      switch (create_info->format) {
34      case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM:
35      case VK_FORMAT_G8_B8R8_2PLANE_422_UNORM:
36      case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16:
37      case VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16:
38      case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16:
39      case VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16:
40      case VK_FORMAT_G16_B16R16_2PLANE_420_UNORM:
41      case VK_FORMAT_G16_B16R16_2PLANE_422_UNORM:
42         plane_count = 2;
43         break;
44      case VK_FORMAT_G8_B8_R8_3PLANE_420_UNORM:
45      case VK_FORMAT_G8_B8_R8_3PLANE_422_UNORM:
46      case VK_FORMAT_G8_B8_R8_3PLANE_444_UNORM:
47      case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_420_UNORM_3PACK16:
48      case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_422_UNORM_3PACK16:
49      case VK_FORMAT_G10X6_B10X6_R10X6_3PLANE_444_UNORM_3PACK16:
50      case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_420_UNORM_3PACK16:
51      case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_422_UNORM_3PACK16:
52      case VK_FORMAT_G12X4_B12X4_R12X4_3PLANE_444_UNORM_3PACK16:
53      case VK_FORMAT_G16_B16_R16_3PLANE_420_UNORM:
54      case VK_FORMAT_G16_B16_R16_3PLANE_422_UNORM:
55      case VK_FORMAT_G16_B16_R16_3PLANE_444_UNORM:
56         plane_count = 3;
57         break;
58      default:
59         plane_count = 1;
60         break;
61      }
62   }
63   assert(plane_count <= ARRAY_SIZE(img->memory_requirements));
64
65   /* TODO add a per-device cache for the requirements */
66   for (uint32_t i = 0; i < plane_count; i++) {
67      img->memory_requirements[i].sType =
68         VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2;
69      img->memory_requirements[i].pNext = &img->dedicated_requirements[i];
70      img->dedicated_requirements[i].sType =
71         VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS;
72      img->dedicated_requirements[i].pNext = NULL;
73   }
74
75   VkDevice dev_handle = vn_device_to_handle(dev);
76   VkImage img_handle = vn_image_to_handle(img);
77   if (plane_count == 1) {
78      vn_call_vkGetImageMemoryRequirements2(
79         dev->instance, dev_handle,
80         &(VkImageMemoryRequirementsInfo2){
81            .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
82            .image = img_handle,
83         },
84         &img->memory_requirements[0]);
85
86      /* AHB backed image requires dedicated allocation */
87      if (img->deferred_info) {
88         img->dedicated_requirements[0].prefersDedicatedAllocation = VK_TRUE;
89         img->dedicated_requirements[0].requiresDedicatedAllocation = VK_TRUE;
90      }
91   } else {
92      for (uint32_t i = 0; i < plane_count; i++) {
93         vn_call_vkGetImageMemoryRequirements2(
94            dev->instance, dev_handle,
95            &(VkImageMemoryRequirementsInfo2){
96               .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
97               .pNext =
98                  &(VkImagePlaneMemoryRequirementsInfo){
99                     .sType =
100                        VK_STRUCTURE_TYPE_IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO,
101                     .planeAspect = VK_IMAGE_ASPECT_PLANE_0_BIT << i,
102                  },
103               .image = img_handle,
104            },
105            &img->memory_requirements[i]);
106      }
107   }
108}
109
110static VkResult
111vn_image_store_deferred_create_info(
112   const VkImageCreateInfo *create_info,
113   const VkAllocationCallbacks *alloc,
114   struct vn_image_create_deferred_info **out_info)
115{
116   struct vn_image_create_deferred_info *info = NULL;
117   VkBaseOutStructure *dst = NULL;
118
119   info = vk_zalloc(alloc, sizeof(*info), VN_DEFAULT_ALIGN,
120                    VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
121   if (!info)
122      return VK_ERROR_OUT_OF_HOST_MEMORY;
123
124   info->create = *create_info;
125   dst = (void *)&info->create;
126
127   vk_foreach_struct_const(src, create_info->pNext) {
128      void *pnext = NULL;
129      switch (src->sType) {
130      case VK_STRUCTURE_TYPE_IMAGE_FORMAT_LIST_CREATE_INFO:
131         memcpy(&info->list, src, sizeof(info->list));
132         pnext = &info->list;
133         break;
134      case VK_STRUCTURE_TYPE_IMAGE_STENCIL_USAGE_CREATE_INFO:
135         memcpy(&info->stencil, src, sizeof(info->stencil));
136         pnext = &info->stencil;
137         break;
138      default:
139         break;
140      }
141
142      if (pnext) {
143         dst->pNext = pnext;
144         dst = pnext;
145      }
146   }
147   dst->pNext = NULL;
148
149   *out_info = info;
150
151   return VK_SUCCESS;
152}
153
154static VkResult
155vn_image_init(struct vn_device *dev,
156              const VkImageCreateInfo *create_info,
157              struct vn_image *img)
158{
159   VkDevice device = vn_device_to_handle(dev);
160   VkImage image = vn_image_to_handle(img);
161   VkResult result = VK_SUCCESS;
162
163   img->sharing_mode = create_info->sharingMode;
164
165   /* TODO async */
166   result =
167      vn_call_vkCreateImage(dev->instance, device, create_info, NULL, &image);
168   if (result != VK_SUCCESS)
169      return result;
170
171   vn_image_init_memory_requirements(img, dev, create_info);
172
173   return VK_SUCCESS;
174}
175
176VkResult
177vn_image_create(struct vn_device *dev,
178                const VkImageCreateInfo *create_info,
179                const VkAllocationCallbacks *alloc,
180                struct vn_image **out_img)
181{
182   struct vn_image *img = NULL;
183   VkResult result = VK_SUCCESS;
184
185   img = vk_zalloc(alloc, sizeof(*img), VN_DEFAULT_ALIGN,
186                   VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
187   if (!img)
188      return VK_ERROR_OUT_OF_HOST_MEMORY;
189
190   vn_object_base_init(&img->base, VK_OBJECT_TYPE_IMAGE, &dev->base);
191
192   result = vn_image_init(dev, create_info, img);
193   if (result != VK_SUCCESS) {
194      vn_object_base_fini(&img->base);
195      vk_free(alloc, img);
196      return result;
197   }
198
199   *out_img = img;
200
201   return VK_SUCCESS;
202}
203
204VkResult
205vn_image_init_deferred(struct vn_device *dev,
206                       const VkImageCreateInfo *create_info,
207                       struct vn_image *img)
208{
209   VkResult result = vn_image_init(dev, create_info, img);
210   img->deferred_info->initialized = result == VK_SUCCESS;
211   return result;
212}
213
214VkResult
215vn_image_create_deferred(struct vn_device *dev,
216                         const VkImageCreateInfo *create_info,
217                         const VkAllocationCallbacks *alloc,
218                         struct vn_image **out_img)
219{
220   struct vn_image *img = NULL;
221   VkResult result = VK_SUCCESS;
222
223   img = vk_zalloc(alloc, sizeof(*img), VN_DEFAULT_ALIGN,
224                   VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
225   if (!img)
226      return VK_ERROR_OUT_OF_HOST_MEMORY;
227
228   vn_object_base_init(&img->base, VK_OBJECT_TYPE_IMAGE, &dev->base);
229
230   result = vn_image_store_deferred_create_info(create_info, alloc,
231                                                &img->deferred_info);
232   if (result != VK_SUCCESS) {
233      vn_object_base_fini(&img->base);
234      vk_free(alloc, img);
235      return result;
236   }
237
238   *out_img = img;
239
240   return VK_SUCCESS;
241}
242
243/* image commands */
244
245VkResult
246vn_CreateImage(VkDevice device,
247               const VkImageCreateInfo *pCreateInfo,
248               const VkAllocationCallbacks *pAllocator,
249               VkImage *pImage)
250{
251   struct vn_device *dev = vn_device_from_handle(device);
252   const VkAllocationCallbacks *alloc =
253      pAllocator ? pAllocator : &dev->base.base.alloc;
254   struct vn_image *img;
255   VkResult result;
256
257   const struct wsi_image_create_info *wsi_info =
258      vn_wsi_find_wsi_image_create_info(pCreateInfo);
259   const VkNativeBufferANDROID *anb_info =
260      vn_android_find_native_buffer(pCreateInfo);
261   const VkExternalMemoryImageCreateInfo *external_info =
262      vk_find_struct_const(pCreateInfo->pNext,
263                           EXTERNAL_MEMORY_IMAGE_CREATE_INFO);
264   const bool ahb_info =
265      external_info &&
266      external_info->handleTypes ==
267         VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
268
269   if (wsi_info) {
270      result = vn_wsi_create_image(dev, pCreateInfo, wsi_info, alloc, &img);
271   } else if (anb_info) {
272      result =
273         vn_android_image_from_anb(dev, pCreateInfo, anb_info, alloc, &img);
274   } else if (ahb_info) {
275      result = vn_android_image_from_ahb(dev, pCreateInfo, alloc, &img);
276   } else {
277      result = vn_image_create(dev, pCreateInfo, alloc, &img);
278   }
279
280   if (result != VK_SUCCESS)
281      return vn_error(dev->instance, result);
282
283   *pImage = vn_image_to_handle(img);
284   return VK_SUCCESS;
285}
286
287void
288vn_DestroyImage(VkDevice device,
289                VkImage image,
290                const VkAllocationCallbacks *pAllocator)
291{
292   struct vn_device *dev = vn_device_from_handle(device);
293   struct vn_image *img = vn_image_from_handle(image);
294   const VkAllocationCallbacks *alloc =
295      pAllocator ? pAllocator : &dev->base.base.alloc;
296
297   if (!img)
298      return;
299
300   if (img->private_memory != VK_NULL_HANDLE)
301      vn_FreeMemory(device, img->private_memory, pAllocator);
302
303   /* must not ask renderer to destroy uninitialized deferred image */
304   if (!img->deferred_info || img->deferred_info->initialized)
305      vn_async_vkDestroyImage(dev->instance, device, image, NULL);
306
307   if (img->deferred_info)
308      vk_free(alloc, img->deferred_info);
309
310   vn_object_base_fini(&img->base);
311   vk_free(alloc, img);
312}
313
314void
315vn_GetImageMemoryRequirements(VkDevice device,
316                              VkImage image,
317                              VkMemoryRequirements *pMemoryRequirements)
318{
319   const struct vn_image *img = vn_image_from_handle(image);
320
321   *pMemoryRequirements = img->memory_requirements[0].memoryRequirements;
322}
323
324void
325vn_GetImageSparseMemoryRequirements(
326   VkDevice device,
327   VkImage image,
328   uint32_t *pSparseMemoryRequirementCount,
329   VkSparseImageMemoryRequirements *pSparseMemoryRequirements)
330{
331   struct vn_device *dev = vn_device_from_handle(device);
332
333   /* TODO per-device cache */
334   vn_call_vkGetImageSparseMemoryRequirements(dev->instance, device, image,
335                                              pSparseMemoryRequirementCount,
336                                              pSparseMemoryRequirements);
337}
338
339void
340vn_GetImageMemoryRequirements2(VkDevice device,
341                               const VkImageMemoryRequirementsInfo2 *pInfo,
342                               VkMemoryRequirements2 *pMemoryRequirements)
343{
344   const struct vn_image *img = vn_image_from_handle(pInfo->image);
345   union {
346      VkBaseOutStructure *pnext;
347      VkMemoryRequirements2 *two;
348      VkMemoryDedicatedRequirements *dedicated;
349   } u = { .two = pMemoryRequirements };
350
351   uint32_t plane = 0;
352   const VkImagePlaneMemoryRequirementsInfo *plane_info =
353      vk_find_struct_const(pInfo->pNext,
354                           IMAGE_PLANE_MEMORY_REQUIREMENTS_INFO);
355   if (plane_info) {
356      switch (plane_info->planeAspect) {
357      case VK_IMAGE_ASPECT_PLANE_1_BIT:
358         plane = 1;
359         break;
360      case VK_IMAGE_ASPECT_PLANE_2_BIT:
361         plane = 2;
362         break;
363      default:
364         plane = 0;
365         break;
366      }
367   }
368
369   while (u.pnext) {
370      switch (u.pnext->sType) {
371      case VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2:
372         u.two->memoryRequirements =
373            img->memory_requirements[plane].memoryRequirements;
374         break;
375      case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS:
376         u.dedicated->prefersDedicatedAllocation =
377            img->dedicated_requirements[plane].prefersDedicatedAllocation;
378         u.dedicated->requiresDedicatedAllocation =
379            img->dedicated_requirements[plane].requiresDedicatedAllocation;
380         break;
381      default:
382         break;
383      }
384      u.pnext = u.pnext->pNext;
385   }
386}
387
388void
389vn_GetImageSparseMemoryRequirements2(
390   VkDevice device,
391   const VkImageSparseMemoryRequirementsInfo2 *pInfo,
392   uint32_t *pSparseMemoryRequirementCount,
393   VkSparseImageMemoryRequirements2 *pSparseMemoryRequirements)
394{
395   struct vn_device *dev = vn_device_from_handle(device);
396
397   /* TODO per-device cache */
398   vn_call_vkGetImageSparseMemoryRequirements2(dev->instance, device, pInfo,
399                                               pSparseMemoryRequirementCount,
400                                               pSparseMemoryRequirements);
401}
402
403VkResult
404vn_BindImageMemory(VkDevice device,
405                   VkImage image,
406                   VkDeviceMemory memory,
407                   VkDeviceSize memoryOffset)
408{
409   struct vn_device *dev = vn_device_from_handle(device);
410   struct vn_device_memory *mem = vn_device_memory_from_handle(memory);
411
412   if (mem->base_memory) {
413      memory = vn_device_memory_to_handle(mem->base_memory);
414      memoryOffset += mem->base_offset;
415   }
416
417   vn_async_vkBindImageMemory(dev->instance, device, image, memory,
418                              memoryOffset);
419
420   return VK_SUCCESS;
421}
422
423VkResult
424vn_BindImageMemory2(VkDevice device,
425                    uint32_t bindInfoCount,
426                    const VkBindImageMemoryInfo *pBindInfos)
427{
428   struct vn_device *dev = vn_device_from_handle(device);
429   const VkAllocationCallbacks *alloc = &dev->base.base.alloc;
430
431   VkBindImageMemoryInfo *local_infos = NULL;
432   for (uint32_t i = 0; i < bindInfoCount; i++) {
433      const VkBindImageMemoryInfo *info = &pBindInfos[i];
434      struct vn_device_memory *mem =
435         vn_device_memory_from_handle(info->memory);
436      /* TODO handle VkBindImageMemorySwapchainInfoKHR */
437      if (!mem || !mem->base_memory)
438         continue;
439
440      if (!local_infos) {
441         const size_t size = sizeof(*local_infos) * bindInfoCount;
442         local_infos = vk_alloc(alloc, size, VN_DEFAULT_ALIGN,
443                                VK_SYSTEM_ALLOCATION_SCOPE_COMMAND);
444         if (!local_infos)
445            return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
446
447         memcpy(local_infos, pBindInfos, size);
448      }
449
450      local_infos[i].memory = vn_device_memory_to_handle(mem->base_memory);
451      local_infos[i].memoryOffset += mem->base_offset;
452   }
453   if (local_infos)
454      pBindInfos = local_infos;
455
456   vn_async_vkBindImageMemory2(dev->instance, device, bindInfoCount,
457                               pBindInfos);
458
459   vk_free(alloc, local_infos);
460
461   return VK_SUCCESS;
462}
463
464VkResult
465vn_GetImageDrmFormatModifierPropertiesEXT(
466   VkDevice device,
467   VkImage image,
468   VkImageDrmFormatModifierPropertiesEXT *pProperties)
469{
470   struct vn_device *dev = vn_device_from_handle(device);
471
472   /* TODO local cache */
473   return vn_call_vkGetImageDrmFormatModifierPropertiesEXT(
474      dev->instance, device, image, pProperties);
475}
476
477void
478vn_GetImageSubresourceLayout(VkDevice device,
479                             VkImage image,
480                             const VkImageSubresource *pSubresource,
481                             VkSubresourceLayout *pLayout)
482{
483   struct vn_device *dev = vn_device_from_handle(device);
484
485   /* TODO local cache */
486   vn_call_vkGetImageSubresourceLayout(dev->instance, device, image,
487                                       pSubresource, pLayout);
488}
489
490/* image view commands */
491
492VkResult
493vn_CreateImageView(VkDevice device,
494                   const VkImageViewCreateInfo *pCreateInfo,
495                   const VkAllocationCallbacks *pAllocator,
496                   VkImageView *pView)
497{
498   struct vn_device *dev = vn_device_from_handle(device);
499   const VkAllocationCallbacks *alloc =
500      pAllocator ? pAllocator : &dev->base.base.alloc;
501
502   struct vn_image_view *view =
503      vk_zalloc(alloc, sizeof(*view), VN_DEFAULT_ALIGN,
504                VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
505   if (!view)
506      return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
507
508   vn_object_base_init(&view->base, VK_OBJECT_TYPE_IMAGE_VIEW, &dev->base);
509   view->image = vn_image_from_handle(pCreateInfo->image);
510
511   VkImageView view_handle = vn_image_view_to_handle(view);
512   vn_async_vkCreateImageView(dev->instance, device, pCreateInfo, NULL,
513                              &view_handle);
514
515   *pView = view_handle;
516
517   return VK_SUCCESS;
518}
519
520void
521vn_DestroyImageView(VkDevice device,
522                    VkImageView imageView,
523                    const VkAllocationCallbacks *pAllocator)
524{
525   struct vn_device *dev = vn_device_from_handle(device);
526   struct vn_image_view *view = vn_image_view_from_handle(imageView);
527   const VkAllocationCallbacks *alloc =
528      pAllocator ? pAllocator : &dev->base.base.alloc;
529
530   if (!view)
531      return;
532
533   vn_async_vkDestroyImageView(dev->instance, device, imageView, NULL);
534
535   vn_object_base_fini(&view->base);
536   vk_free(alloc, view);
537}
538
539/* sampler commands */
540
541VkResult
542vn_CreateSampler(VkDevice device,
543                 const VkSamplerCreateInfo *pCreateInfo,
544                 const VkAllocationCallbacks *pAllocator,
545                 VkSampler *pSampler)
546{
547   struct vn_device *dev = vn_device_from_handle(device);
548   const VkAllocationCallbacks *alloc =
549      pAllocator ? pAllocator : &dev->base.base.alloc;
550
551   struct vn_sampler *sampler =
552      vk_zalloc(alloc, sizeof(*sampler), VN_DEFAULT_ALIGN,
553                VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
554   if (!sampler)
555      return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
556
557   vn_object_base_init(&sampler->base, VK_OBJECT_TYPE_SAMPLER, &dev->base);
558
559   VkSampler sampler_handle = vn_sampler_to_handle(sampler);
560   vn_async_vkCreateSampler(dev->instance, device, pCreateInfo, NULL,
561                            &sampler_handle);
562
563   *pSampler = sampler_handle;
564
565   return VK_SUCCESS;
566}
567
568void
569vn_DestroySampler(VkDevice device,
570                  VkSampler _sampler,
571                  const VkAllocationCallbacks *pAllocator)
572{
573   struct vn_device *dev = vn_device_from_handle(device);
574   struct vn_sampler *sampler = vn_sampler_from_handle(_sampler);
575   const VkAllocationCallbacks *alloc =
576      pAllocator ? pAllocator : &dev->base.base.alloc;
577
578   if (!sampler)
579      return;
580
581   vn_async_vkDestroySampler(dev->instance, device, _sampler, NULL);
582
583   vn_object_base_fini(&sampler->base);
584   vk_free(alloc, sampler);
585}
586
587/* sampler YCbCr conversion commands */
588
589VkResult
590vn_CreateSamplerYcbcrConversion(
591   VkDevice device,
592   const VkSamplerYcbcrConversionCreateInfo *pCreateInfo,
593   const VkAllocationCallbacks *pAllocator,
594   VkSamplerYcbcrConversion *pYcbcrConversion)
595{
596   struct vn_device *dev = vn_device_from_handle(device);
597   const VkAllocationCallbacks *alloc =
598      pAllocator ? pAllocator : &dev->base.base.alloc;
599   const VkExternalFormatANDROID *ext_info =
600      vk_find_struct_const(pCreateInfo->pNext, EXTERNAL_FORMAT_ANDROID);
601
602   VkSamplerYcbcrConversionCreateInfo local_info;
603   if (ext_info && ext_info->externalFormat) {
604      assert(pCreateInfo->format == VK_FORMAT_UNDEFINED);
605
606      local_info = *pCreateInfo;
607      local_info.format =
608         vn_android_drm_format_to_vk_format(ext_info->externalFormat);
609      local_info.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
610      local_info.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
611      local_info.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
612      local_info.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
613      pCreateInfo = &local_info;
614
615      assert(pCreateInfo->format != VK_FORMAT_UNDEFINED);
616   }
617
618   struct vn_sampler_ycbcr_conversion *conv =
619      vk_zalloc(alloc, sizeof(*conv), VN_DEFAULT_ALIGN,
620                VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
621   if (!conv)
622      return vn_error(dev->instance, VK_ERROR_OUT_OF_HOST_MEMORY);
623
624   vn_object_base_init(&conv->base, VK_OBJECT_TYPE_SAMPLER_YCBCR_CONVERSION,
625                       &dev->base);
626
627   VkSamplerYcbcrConversion conv_handle =
628      vn_sampler_ycbcr_conversion_to_handle(conv);
629   vn_async_vkCreateSamplerYcbcrConversion(dev->instance, device, pCreateInfo,
630                                           NULL, &conv_handle);
631
632   *pYcbcrConversion = conv_handle;
633
634   return VK_SUCCESS;
635}
636
637void
638vn_DestroySamplerYcbcrConversion(VkDevice device,
639                                 VkSamplerYcbcrConversion ycbcrConversion,
640                                 const VkAllocationCallbacks *pAllocator)
641{
642   struct vn_device *dev = vn_device_from_handle(device);
643   struct vn_sampler_ycbcr_conversion *conv =
644      vn_sampler_ycbcr_conversion_from_handle(ycbcrConversion);
645   const VkAllocationCallbacks *alloc =
646      pAllocator ? pAllocator : &dev->base.base.alloc;
647
648   if (!conv)
649      return;
650
651   vn_async_vkDestroySamplerYcbcrConversion(dev->instance, device,
652                                            ycbcrConversion, NULL);
653
654   vn_object_base_fini(&conv->base);
655   vk_free(alloc, conv);
656}
657