lvp_device.c revision 7ec681f3
1/*
2 * Copyright © 2019 Red Hat.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include "lvp_private.h"
25
26#include "pipe-loader/pipe_loader.h"
27#include "git_sha1.h"
28#include "vk_util.h"
29#include "pipe/p_config.h"
30#include "pipe/p_defines.h"
31#include "pipe/p_state.h"
32#include "pipe/p_context.h"
33#include "frontend/drisw_api.h"
34
35#include "util/u_inlines.h"
36#include "util/os_memory.h"
37#include "util/u_thread.h"
38#include "util/u_atomic.h"
39#include "util/timespec.h"
40#include "os_time.h"
41
42#if defined(VK_USE_PLATFORM_WAYLAND_KHR) || \
43    defined(VK_USE_PLATFORM_WIN32_KHR) || \
44    defined(VK_USE_PLATFORM_XCB_KHR) || \
45    defined(VK_USE_PLATFORM_XLIB_KHR)
46#define LVP_USE_WSI_PLATFORM
47#endif
48#define LVP_API_VERSION VK_MAKE_VERSION(1, 2, VK_HEADER_VERSION)
49
50VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceVersion(uint32_t* pApiVersion)
51{
52   *pApiVersion = LVP_API_VERSION;
53   return VK_SUCCESS;
54}
55
56static const struct vk_instance_extension_table lvp_instance_extensions_supported = {
57   .KHR_device_group_creation                = true,
58   .KHR_external_fence_capabilities          = true,
59   .KHR_external_memory_capabilities         = true,
60   .KHR_external_semaphore_capabilities      = true,
61   .KHR_get_physical_device_properties2      = true,
62   .EXT_debug_report                         = true,
63#ifdef LVP_USE_WSI_PLATFORM
64   .KHR_get_surface_capabilities2            = true,
65   .KHR_surface                              = true,
66   .KHR_surface_protected_capabilities       = true,
67#endif
68#ifdef VK_USE_PLATFORM_WAYLAND_KHR
69   .KHR_wayland_surface                      = true,
70#endif
71#ifdef VK_USE_PLATFORM_WIN32_KHR
72   .KHR_win32_surface                        = true,
73#endif
74#ifdef VK_USE_PLATFORM_XCB_KHR
75   .KHR_xcb_surface                          = true,
76#endif
77#ifdef VK_USE_PLATFORM_XLIB_KHR
78   .KHR_xlib_surface                         = true,
79#endif
80};
81
82static const struct vk_device_extension_table lvp_device_extensions_supported = {
83   .KHR_8bit_storage                      = true,
84   .KHR_16bit_storage                     = true,
85   .KHR_bind_memory2                      = true,
86   .KHR_buffer_device_address             = true,
87   .KHR_create_renderpass2                = true,
88   .KHR_copy_commands2                    = true,
89   .KHR_dedicated_allocation              = true,
90   .KHR_depth_stencil_resolve             = true,
91   .KHR_descriptor_update_template        = true,
92   .KHR_device_group                      = true,
93   .KHR_draw_indirect_count               = true,
94   .KHR_driver_properties                 = true,
95   .KHR_external_fence                    = true,
96   .KHR_external_memory                   = true,
97#ifdef PIPE_MEMORY_FD
98   .KHR_external_memory_fd                = true,
99#endif
100   .KHR_external_semaphore                = true,
101   .KHR_shader_float_controls             = true,
102   .KHR_get_memory_requirements2          = true,
103#ifdef LVP_USE_WSI_PLATFORM
104   .KHR_incremental_present               = true,
105#endif
106   .KHR_image_format_list                 = true,
107   .KHR_imageless_framebuffer             = true,
108   .KHR_maintenance1                      = true,
109   .KHR_maintenance2                      = true,
110   .KHR_maintenance3                      = true,
111   .KHR_multiview                         = true,
112   .KHR_push_descriptor                   = true,
113   .KHR_relaxed_block_layout              = true,
114   .KHR_sampler_mirror_clamp_to_edge      = true,
115   .KHR_separate_depth_stencil_layouts    = true,
116   .KHR_shader_atomic_int64               = true,
117   .KHR_shader_draw_parameters            = true,
118   .KHR_shader_float16_int8               = true,
119   .KHR_shader_subgroup_extended_types    = true,
120   .KHR_spirv_1_4                         = true,
121   .KHR_storage_buffer_storage_class      = true,
122#ifdef LVP_USE_WSI_PLATFORM
123   .KHR_swapchain                         = true,
124#endif
125   .KHR_timeline_semaphore                = true,
126   .KHR_uniform_buffer_standard_layout    = true,
127   .KHR_variable_pointers                 = true,
128   .EXT_4444_formats                      = true,
129   .EXT_calibrated_timestamps             = true,
130   .EXT_color_write_enable                = true,
131   .EXT_conditional_rendering             = true,
132   .EXT_depth_clip_enable                 = true,
133   .EXT_extended_dynamic_state            = true,
134   .EXT_extended_dynamic_state2           = true,
135   .EXT_external_memory_host              = true,
136   .EXT_host_query_reset                  = true,
137   .EXT_index_type_uint8                  = true,
138   .EXT_multi_draw                        = true,
139   .EXT_post_depth_coverage               = true,
140   .EXT_private_data                      = true,
141   .EXT_primitive_topology_list_restart   = true,
142   .EXT_sampler_filter_minmax             = true,
143   .EXT_scalar_block_layout               = true,
144   .EXT_separate_stencil_usage            = true,
145   .EXT_shader_stencil_export             = true,
146   .EXT_shader_viewport_index_layer       = true,
147   .EXT_transform_feedback                = true,
148   .EXT_vertex_attribute_divisor          = true,
149   .EXT_vertex_input_dynamic_state        = true,
150   .EXT_custom_border_color               = true,
151   .EXT_provoking_vertex                  = true,
152   .EXT_line_rasterization                = true,
153   .GOOGLE_decorate_string                = true,
154   .GOOGLE_hlsl_functionality1            = true,
155};
156
157static VkResult VKAPI_CALL
158lvp_physical_device_init(struct lvp_physical_device *device,
159                         struct lvp_instance *instance,
160                         struct pipe_loader_device *pld)
161{
162   VkResult result;
163
164   struct vk_physical_device_dispatch_table dispatch_table;
165   vk_physical_device_dispatch_table_from_entrypoints(
166      &dispatch_table, &lvp_physical_device_entrypoints, true);
167   vk_physical_device_dispatch_table_from_entrypoints(
168      &dispatch_table, &wsi_physical_device_entrypoints, false);
169   result = vk_physical_device_init(&device->vk, &instance->vk,
170                                    NULL, &dispatch_table);
171   if (result != VK_SUCCESS) {
172      vk_error(instance, result);
173      goto fail;
174   }
175   device->pld = pld;
176
177   device->pscreen = pipe_loader_create_screen_vk(device->pld, true);
178   if (!device->pscreen)
179      return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
180
181   device->max_images = device->pscreen->get_shader_param(device->pscreen, PIPE_SHADER_FRAGMENT, PIPE_SHADER_CAP_MAX_SHADER_IMAGES);
182   device->vk.supported_extensions = lvp_device_extensions_supported;
183   result = lvp_init_wsi(device);
184   if (result != VK_SUCCESS) {
185      vk_physical_device_finish(&device->vk);
186      vk_error(instance, result);
187      goto fail;
188   }
189
190   return VK_SUCCESS;
191 fail:
192   return result;
193}
194
195static void VKAPI_CALL
196lvp_physical_device_finish(struct lvp_physical_device *device)
197{
198   lvp_finish_wsi(device);
199   device->pscreen->destroy(device->pscreen);
200   vk_physical_device_finish(&device->vk);
201}
202
203VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateInstance(
204   const VkInstanceCreateInfo*                 pCreateInfo,
205   const VkAllocationCallbacks*                pAllocator,
206   VkInstance*                                 pInstance)
207{
208   struct lvp_instance *instance;
209   VkResult result;
210
211   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO);
212
213   if (pAllocator == NULL)
214      pAllocator = vk_default_allocator();
215
216   instance = vk_zalloc(pAllocator, sizeof(*instance), 8,
217                        VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE);
218   if (!instance)
219      return vk_error(NULL, VK_ERROR_OUT_OF_HOST_MEMORY);
220
221   struct vk_instance_dispatch_table dispatch_table;
222   vk_instance_dispatch_table_from_entrypoints(
223      &dispatch_table, &lvp_instance_entrypoints, true);
224   vk_instance_dispatch_table_from_entrypoints(
225      &dispatch_table, &wsi_instance_entrypoints, false);
226
227   result = vk_instance_init(&instance->vk,
228                             &lvp_instance_extensions_supported,
229                             &dispatch_table,
230                             pCreateInfo,
231                             pAllocator);
232   if (result != VK_SUCCESS) {
233      vk_free(pAllocator, instance);
234      return vk_error(instance, result);
235   }
236
237   instance->apiVersion = LVP_API_VERSION;
238   instance->physicalDeviceCount = -1;
239
240   //   _mesa_locale_init();
241   //   VG(VALGRIND_CREATE_MEMPOOL(instance, 0, false));
242
243   *pInstance = lvp_instance_to_handle(instance);
244
245   return VK_SUCCESS;
246}
247
248VKAPI_ATTR void VKAPI_CALL lvp_DestroyInstance(
249   VkInstance                                  _instance,
250   const VkAllocationCallbacks*                pAllocator)
251{
252   LVP_FROM_HANDLE(lvp_instance, instance, _instance);
253
254   if (!instance)
255      return;
256   if (instance->physicalDeviceCount > 0)
257      lvp_physical_device_finish(&instance->physicalDevice);
258   //   _mesa_locale_fini();
259
260   pipe_loader_release(&instance->devs, instance->num_devices);
261
262   vk_instance_finish(&instance->vk);
263   vk_free(&instance->vk.alloc, instance);
264}
265
266#if defined(HAVE_PIPE_LOADER_DRI)
267static void lvp_get_image(struct dri_drawable *dri_drawable,
268                          int x, int y, unsigned width, unsigned height, unsigned stride,
269                          void *data)
270{
271
272}
273
274static void lvp_put_image(struct dri_drawable *dri_drawable,
275                          void *data, unsigned width, unsigned height)
276{
277   fprintf(stderr, "put image %dx%d\n", width, height);
278}
279
280static void lvp_put_image2(struct dri_drawable *dri_drawable,
281                           void *data, int x, int y, unsigned width, unsigned height,
282                           unsigned stride)
283{
284   fprintf(stderr, "put image 2 %d,%d %dx%d\n", x, y, width, height);
285}
286
287static struct drisw_loader_funcs lvp_sw_lf = {
288   .get_image = lvp_get_image,
289   .put_image = lvp_put_image,
290   .put_image2 = lvp_put_image2,
291};
292#endif
293
294static VkResult
295lvp_enumerate_physical_devices(struct lvp_instance *instance)
296{
297   VkResult result;
298
299   if (instance->physicalDeviceCount != -1)
300      return VK_SUCCESS;
301
302   /* sw only for now */
303   instance->num_devices = pipe_loader_sw_probe(NULL, 0);
304
305   assert(instance->num_devices == 1);
306
307#if defined(HAVE_PIPE_LOADER_DRI)
308   pipe_loader_sw_probe_dri(&instance->devs, &lvp_sw_lf);
309#else
310   pipe_loader_sw_probe_null(&instance->devs);
311#endif
312
313   result = lvp_physical_device_init(&instance->physicalDevice,
314                                     instance, &instance->devs[0]);
315   if (result == VK_ERROR_INCOMPATIBLE_DRIVER) {
316      instance->physicalDeviceCount = 0;
317   } else if (result == VK_SUCCESS) {
318      instance->physicalDeviceCount = 1;
319   }
320
321   return result;
322}
323
324VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumeratePhysicalDevices(
325   VkInstance                                  _instance,
326   uint32_t*                                   pPhysicalDeviceCount,
327   VkPhysicalDevice*                           pPhysicalDevices)
328{
329   LVP_FROM_HANDLE(lvp_instance, instance, _instance);
330   VkResult result;
331
332   result = lvp_enumerate_physical_devices(instance);
333   if (result != VK_SUCCESS)
334      return result;
335
336   if (!pPhysicalDevices) {
337      *pPhysicalDeviceCount = instance->physicalDeviceCount;
338   } else if (*pPhysicalDeviceCount >= 1) {
339      pPhysicalDevices[0] = lvp_physical_device_to_handle(&instance->physicalDevice);
340      *pPhysicalDeviceCount = 1;
341   } else {
342      *pPhysicalDeviceCount = 0;
343   }
344
345   return VK_SUCCESS;
346}
347
348VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumeratePhysicalDeviceGroups(
349   VkInstance                                 _instance,
350   uint32_t*                                   pPhysicalDeviceGroupCount,
351   VkPhysicalDeviceGroupProperties*            pPhysicalDeviceGroupProperties)
352{
353   LVP_FROM_HANDLE(lvp_instance, instance, _instance);
354   VK_OUTARRAY_MAKE_TYPED(VkPhysicalDeviceGroupProperties, out,
355                          pPhysicalDeviceGroupProperties,
356                          pPhysicalDeviceGroupCount);
357
358   VkResult result = lvp_enumerate_physical_devices(instance);
359   if (result != VK_SUCCESS)
360      return result;
361
362   vk_outarray_append_typed(VkPhysicalDeviceGroupProperties, &out, p) {
363      p->physicalDeviceCount = 1;
364      memset(p->physicalDevices, 0, sizeof(p->physicalDevices));
365      p->physicalDevices[0] = lvp_physical_device_to_handle(&instance->physicalDevice);
366      p->subsetAllocation = false;
367   }
368
369   return vk_outarray_status(&out);
370}
371
372static int
373min_vertex_pipeline_param(struct pipe_screen *pscreen, enum pipe_shader_cap param)
374{
375   int val = INT_MAX;
376   for (int i = 0; i < PIPE_SHADER_COMPUTE; ++i) {
377      if (i == PIPE_SHADER_FRAGMENT ||
378          !pscreen->get_shader_param(pscreen, i,
379                                     PIPE_SHADER_CAP_MAX_INSTRUCTIONS))
380         continue;
381
382      val = MAX2(val, pscreen->get_shader_param(pscreen, i, param));
383   }
384   return val;
385}
386
387static int
388min_shader_param(struct pipe_screen *pscreen, enum pipe_shader_cap param)
389{
390   return MIN3(min_vertex_pipeline_param(pscreen, param),
391               pscreen->get_shader_param(pscreen, PIPE_SHADER_FRAGMENT, param),
392               pscreen->get_shader_param(pscreen, PIPE_SHADER_COMPUTE, param));
393}
394
395VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceFeatures(
396   VkPhysicalDevice                            physicalDevice,
397   VkPhysicalDeviceFeatures*                   pFeatures)
398{
399   LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
400   bool indirect = false;//pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_GLSL_FEATURE_LEVEL) >= 400;
401   memset(pFeatures, 0, sizeof(*pFeatures));
402   *pFeatures = (VkPhysicalDeviceFeatures) {
403      .robustBufferAccess                       = true,
404      .fullDrawIndexUint32                      = true,
405      .imageCubeArray                           = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_CUBE_MAP_ARRAY) != 0),
406      .independentBlend                         = true,
407      .geometryShader                           = (pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_GEOMETRY, PIPE_SHADER_CAP_MAX_INSTRUCTIONS) != 0),
408      .tessellationShader                       = (pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_TESS_EVAL, PIPE_SHADER_CAP_MAX_INSTRUCTIONS) != 0),
409      .sampleRateShading                        = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_SAMPLE_SHADING) != 0),
410      .dualSrcBlend                             = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_DUAL_SOURCE_RENDER_TARGETS) != 0),
411      .logicOp                                  = true,
412      .multiDrawIndirect                        = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MULTI_DRAW_INDIRECT) != 0),
413      .drawIndirectFirstInstance                = true,
414      .depthClamp                               = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_CLIP_DISABLE) != 0),
415      .depthBiasClamp                           = true,
416      .fillModeNonSolid                         = true,
417      .depthBounds                              = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_BOUNDS_TEST) != 0),
418      .wideLines                                = true,
419      .largePoints                              = true,
420      .alphaToOne                               = true,
421      .multiViewport                            = true,
422      .samplerAnisotropy                        = true,
423      .textureCompressionETC2                   = false,
424      .textureCompressionASTC_LDR               = false,
425      .textureCompressionBC                     = true,
426      .occlusionQueryPrecise                    = true,
427      .pipelineStatisticsQuery                  = true,
428      .vertexPipelineStoresAndAtomics           = (min_vertex_pipeline_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) != 0),
429      .fragmentStoresAndAtomics                 = (pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_FRAGMENT, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS) != 0),
430      .shaderTessellationAndGeometryPointSize   = true,
431      .shaderImageGatherExtended                = true,
432      .shaderStorageImageExtendedFormats        = (min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_IMAGES) != 0),
433      .shaderStorageImageMultisample            = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_TEXTURE_MULTISAMPLE) != 0),
434      .shaderUniformBufferArrayDynamicIndexing  = true,
435      .shaderSampledImageArrayDynamicIndexing   = indirect,
436      .shaderStorageBufferArrayDynamicIndexing  = true,
437      .shaderStorageImageArrayDynamicIndexing   = indirect,
438      .shaderStorageImageReadWithoutFormat      = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_IMAGE_LOAD_FORMATTED) != 0),
439      .shaderStorageImageWriteWithoutFormat     = (min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_IMAGES) != 0),
440      .shaderClipDistance                       = true,
441      .shaderCullDistance                       = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_CULL_DISTANCE) == 1),
442      .shaderFloat64                            = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DOUBLES) == 1),
443      .shaderInt64                              = (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_INT64) == 1),
444      .shaderInt16                              = (min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_INT16) == 1),
445      .variableMultisampleRate                  = false,
446      .inheritedQueries                         = false,
447   };
448}
449
450static void
451lvp_get_physical_device_features_1_1(struct lvp_physical_device *pdevice,
452                                     VkPhysicalDeviceVulkan11Features *f)
453{
454   assert(f->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES);
455
456   f->storageBuffer16BitAccess            = true;
457   f->uniformAndStorageBuffer16BitAccess  = true;
458   f->storagePushConstant16               = true;
459   f->storageInputOutput16                = false;
460   f->multiview                           = true;
461   f->multiviewGeometryShader             = true;
462   f->multiviewTessellationShader         = true;
463   f->variablePointersStorageBuffer       = true;
464   f->variablePointers                    = false;
465   f->protectedMemory                     = false;
466   f->samplerYcbcrConversion              = false;
467   f->shaderDrawParameters                = true;
468}
469
470static void
471lvp_get_physical_device_features_1_2(struct lvp_physical_device *pdevice,
472                                     VkPhysicalDeviceVulkan12Features *f)
473{
474   assert(f->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES);
475
476   f->samplerMirrorClampToEdge = true;
477   f->drawIndirectCount = true;
478   f->storageBuffer8BitAccess = true;
479   f->uniformAndStorageBuffer8BitAccess = true;
480   f->storagePushConstant8 = true;
481   f->shaderBufferInt64Atomics = true;
482   f->shaderSharedInt64Atomics = true;
483   f->shaderFloat16 = pdevice->pscreen->get_shader_param(pdevice->pscreen, PIPE_SHADER_FRAGMENT, PIPE_SHADER_CAP_FP16) != 0;
484   f->shaderInt8 = true;
485
486   f->descriptorIndexing = false;
487   f->shaderInputAttachmentArrayDynamicIndexing = false;
488   f->shaderUniformTexelBufferArrayDynamicIndexing = false;
489   f->shaderStorageTexelBufferArrayDynamicIndexing = false;
490   f->shaderUniformBufferArrayNonUniformIndexing = false;
491   f->shaderSampledImageArrayNonUniformIndexing = false;
492   f->shaderStorageBufferArrayNonUniformIndexing = false;
493   f->shaderStorageImageArrayNonUniformIndexing = false;
494   f->shaderInputAttachmentArrayNonUniformIndexing = false;
495   f->shaderUniformTexelBufferArrayNonUniformIndexing = false;
496   f->shaderStorageTexelBufferArrayNonUniformIndexing = false;
497   f->descriptorBindingUniformBufferUpdateAfterBind = false;
498   f->descriptorBindingSampledImageUpdateAfterBind = false;
499   f->descriptorBindingStorageImageUpdateAfterBind = false;
500   f->descriptorBindingStorageBufferUpdateAfterBind = false;
501   f->descriptorBindingUniformTexelBufferUpdateAfterBind = false;
502   f->descriptorBindingStorageTexelBufferUpdateAfterBind = false;
503   f->descriptorBindingUpdateUnusedWhilePending = false;
504   f->descriptorBindingPartiallyBound = false;
505   f->descriptorBindingVariableDescriptorCount = false;
506   f->runtimeDescriptorArray = false;
507
508   f->samplerFilterMinmax = true;
509   f->scalarBlockLayout = true;
510   f->imagelessFramebuffer = true;
511   f->uniformBufferStandardLayout = true;
512   f->shaderSubgroupExtendedTypes = true;
513   f->separateDepthStencilLayouts = true;
514   f->hostQueryReset = true;
515   f->timelineSemaphore = true;
516   f->bufferDeviceAddress = true;
517   f->bufferDeviceAddressCaptureReplay = false;
518   f->bufferDeviceAddressMultiDevice = false;
519   f->vulkanMemoryModel = false;
520   f->vulkanMemoryModelDeviceScope = false;
521   f->vulkanMemoryModelAvailabilityVisibilityChains = false;
522   f->shaderOutputViewportIndex = true;
523   f->shaderOutputLayer = true;
524   f->subgroupBroadcastDynamicId = true;
525}
526
527VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceFeatures2(
528   VkPhysicalDevice                            physicalDevice,
529   VkPhysicalDeviceFeatures2                  *pFeatures)
530{
531   LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
532   lvp_GetPhysicalDeviceFeatures(physicalDevice, &pFeatures->features);
533
534   VkPhysicalDeviceVulkan11Features core_1_1 = {
535      .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES,
536   };
537   lvp_get_physical_device_features_1_1(pdevice, &core_1_1);
538
539   VkPhysicalDeviceVulkan12Features core_1_2 = {
540      .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES,
541   };
542   lvp_get_physical_device_features_1_2(pdevice, &core_1_2);
543
544   vk_foreach_struct(ext, pFeatures->pNext) {
545
546      if (vk_get_physical_device_core_1_1_feature_ext(ext, &core_1_1))
547         continue;
548      if (vk_get_physical_device_core_1_2_feature_ext(ext, &core_1_2))
549         continue;
550
551      switch (ext->sType) {
552      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIVATE_DATA_FEATURES_EXT: {
553         VkPhysicalDevicePrivateDataFeaturesEXT *features =
554            (VkPhysicalDevicePrivateDataFeaturesEXT *)ext;
555         features->privateData = true;
556         break;
557      }
558      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_FEATURES_EXT: {
559         VkPhysicalDeviceLineRasterizationFeaturesEXT *features =
560            (VkPhysicalDeviceLineRasterizationFeaturesEXT *)ext;
561         features->rectangularLines = true;
562         features->bresenhamLines = true;
563         features->smoothLines = true;
564         features->stippledRectangularLines = true;
565         features->stippledBresenhamLines = true;
566         features->stippledSmoothLines = true;
567         break;
568      }
569      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_FEATURES_EXT: {
570         VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *features =
571            (VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT *)ext;
572         features->vertexAttributeInstanceRateZeroDivisor = false;
573         if (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_VERTEX_ELEMENT_INSTANCE_DIVISOR) != 0) {
574            features->vertexAttributeInstanceRateDivisor = true;
575         } else {
576            features->vertexAttributeInstanceRateDivisor = false;
577         }
578         break;
579      }
580
581      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INDEX_TYPE_UINT8_FEATURES_EXT: {
582         VkPhysicalDeviceIndexTypeUint8FeaturesEXT *features =
583            (VkPhysicalDeviceIndexTypeUint8FeaturesEXT *)ext;
584         features->indexTypeUint8 = true;
585         break;
586      }
587
588      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_INPUT_DYNAMIC_STATE_FEATURES_EXT: {
589         VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *features =
590            (VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT *)ext;
591         features->vertexInputDynamicState = true;
592         break;
593      }
594      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_FEATURES_EXT: {
595         VkPhysicalDeviceTransformFeedbackFeaturesEXT *features =
596            (VkPhysicalDeviceTransformFeedbackFeaturesEXT*)ext;
597
598         features->transformFeedback = true;
599         features->geometryStreams = true;
600         break;
601      }
602      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CONDITIONAL_RENDERING_FEATURES_EXT: {
603         VkPhysicalDeviceConditionalRenderingFeaturesEXT *features =
604            (VkPhysicalDeviceConditionalRenderingFeaturesEXT*)ext;
605         features->conditionalRendering = true;
606         features->inheritedConditionalRendering = false;
607         break;
608      }
609      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_FEATURES_EXT: {
610         VkPhysicalDeviceExtendedDynamicStateFeaturesEXT *features =
611            (VkPhysicalDeviceExtendedDynamicStateFeaturesEXT*)ext;
612         features->extendedDynamicState = true;
613         break;
614      }
615      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_4444_FORMATS_FEATURES_EXT: {
616         VkPhysicalDevice4444FormatsFeaturesEXT *features =
617            (VkPhysicalDevice4444FormatsFeaturesEXT*)ext;
618         features->formatA4R4G4B4 = true;
619         features->formatA4B4G4R4 = true;
620         break;
621      }
622      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_FEATURES_EXT: {
623         VkPhysicalDeviceCustomBorderColorFeaturesEXT *features =
624            (VkPhysicalDeviceCustomBorderColorFeaturesEXT *)ext;
625         features->customBorderColors = true;
626         features->customBorderColorWithoutFormat = true;
627         break;
628      }
629      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COLOR_WRITE_ENABLE_FEATURES_EXT: {
630         VkPhysicalDeviceColorWriteEnableFeaturesEXT *features =
631            (VkPhysicalDeviceColorWriteEnableFeaturesEXT *)ext;
632         features->colorWriteEnable = true;
633         break;
634      }
635      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_FEATURES_EXT: {
636         VkPhysicalDeviceProvokingVertexFeaturesEXT *features =
637            (VkPhysicalDeviceProvokingVertexFeaturesEXT*)ext;
638         features->provokingVertexLast = true;
639         features->transformFeedbackPreservesProvokingVertex = true;
640         break;
641      }
642      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_FEATURES_EXT: {
643         VkPhysicalDeviceMultiDrawFeaturesEXT *features = (VkPhysicalDeviceMultiDrawFeaturesEXT *)ext;
644         features->multiDraw = true;
645         break;
646      }
647      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DEPTH_CLIP_ENABLE_FEATURES_EXT: {
648         VkPhysicalDeviceDepthClipEnableFeaturesEXT *features =
649            (VkPhysicalDeviceDepthClipEnableFeaturesEXT *)ext;
650         if (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_DEPTH_CLAMP_ENABLE) != 0)
651            features->depthClipEnable = true;
652         else
653            features->depthClipEnable = false;
654         break;
655      }
656      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTENDED_DYNAMIC_STATE_2_FEATURES_EXT: {
657         VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *features = (VkPhysicalDeviceExtendedDynamicState2FeaturesEXT *)ext;
658         features->extendedDynamicState2 = true;
659         features->extendedDynamicState2LogicOp = true;
660         features->extendedDynamicState2PatchControlPoints = true;
661         break;
662      }
663      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRIMITIVE_TOPOLOGY_LIST_RESTART_FEATURES_EXT: {
664         VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *features = (VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT *)ext;
665         features->primitiveTopologyListRestart = true;
666         features->primitiveTopologyPatchListRestart = true;
667         break;
668      }
669      default:
670         break;
671      }
672   }
673}
674
675void
676lvp_device_get_cache_uuid(void *uuid)
677{
678   memset(uuid, 0, VK_UUID_SIZE);
679   snprintf(uuid, VK_UUID_SIZE, "val-%s", MESA_GIT_SHA1 + 4);
680}
681
682VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceProperties(VkPhysicalDevice physicalDevice,
683                                     VkPhysicalDeviceProperties *pProperties)
684{
685   LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
686
687   VkSampleCountFlags sample_counts = VK_SAMPLE_COUNT_1_BIT | VK_SAMPLE_COUNT_4_BIT;
688
689   uint64_t grid_size[3], block_size[3];
690   uint64_t max_threads_per_block, max_local_size;
691
692   pdevice->pscreen->get_compute_param(pdevice->pscreen, PIPE_SHADER_IR_NIR,
693                                       PIPE_COMPUTE_CAP_MAX_GRID_SIZE, grid_size);
694   pdevice->pscreen->get_compute_param(pdevice->pscreen, PIPE_SHADER_IR_NIR,
695                                       PIPE_COMPUTE_CAP_MAX_BLOCK_SIZE, block_size);
696   pdevice->pscreen->get_compute_param(pdevice->pscreen, PIPE_SHADER_IR_NIR,
697                                       PIPE_COMPUTE_CAP_MAX_THREADS_PER_BLOCK,
698                                       &max_threads_per_block);
699   pdevice->pscreen->get_compute_param(pdevice->pscreen, PIPE_SHADER_IR_NIR,
700                                       PIPE_COMPUTE_CAP_MAX_LOCAL_SIZE,
701                                       &max_local_size);
702
703   VkPhysicalDeviceLimits limits = {
704      .maxImageDimension1D                      = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
705      .maxImageDimension2D                      = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
706      .maxImageDimension3D                      = (1 << pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_3D_LEVELS)),
707      .maxImageDimensionCube                    = (1 << pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_CUBE_LEVELS)),
708      .maxImageArrayLayers                      = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_ARRAY_LAYERS),
709      .maxTexelBufferElements                   = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_BUFFER_SIZE),
710      .maxUniformBufferRange                    = min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_CONST_BUFFER_SIZE),
711      .maxStorageBufferRange                    = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_SHADER_BUFFER_SIZE),
712      .maxPushConstantsSize                     = MAX_PUSH_CONSTANTS_SIZE,
713      .maxMemoryAllocationCount                 = UINT32_MAX,
714      .maxSamplerAllocationCount                = 32 * 1024,
715      .bufferImageGranularity                   = 64, /* A cache line */
716      .sparseAddressSpaceSize                   = 0,
717      .maxBoundDescriptorSets                   = MAX_SETS,
718      .maxPerStageDescriptorSamplers            = min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_TEXTURE_SAMPLERS),
719      .maxPerStageDescriptorUniformBuffers      = min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_CONST_BUFFERS) - 1,
720      .maxPerStageDescriptorStorageBuffers      = min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_BUFFERS),
721      .maxPerStageDescriptorSampledImages       = min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SAMPLER_VIEWS),
722      .maxPerStageDescriptorStorageImages       = min_shader_param(pdevice->pscreen, PIPE_SHADER_CAP_MAX_SHADER_IMAGES),
723      .maxPerStageDescriptorInputAttachments    = 8,
724      .maxPerStageResources                     = 128,
725      .maxDescriptorSetSamplers                 = 32 * 1024,
726      .maxDescriptorSetUniformBuffers           = 256,
727      .maxDescriptorSetUniformBuffersDynamic    = 256,
728      .maxDescriptorSetStorageBuffers           = 256,
729      .maxDescriptorSetStorageBuffersDynamic    = 256,
730      .maxDescriptorSetSampledImages            = 256,
731      .maxDescriptorSetStorageImages            = 256,
732      .maxDescriptorSetInputAttachments         = 256,
733      .maxVertexInputAttributes                 = 32,
734      .maxVertexInputBindings                   = 32,
735      .maxVertexInputAttributeOffset            = 2047,
736      .maxVertexInputBindingStride              = 2048,
737      .maxVertexOutputComponents                = 128,
738      .maxTessellationGenerationLevel           = 64,
739      .maxTessellationPatchSize                 = 32,
740      .maxTessellationControlPerVertexInputComponents = 128,
741      .maxTessellationControlPerVertexOutputComponents = 128,
742      .maxTessellationControlPerPatchOutputComponents = 128,
743      .maxTessellationControlTotalOutputComponents = 4096,
744      .maxTessellationEvaluationInputComponents = 128,
745      .maxTessellationEvaluationOutputComponents = 128,
746      .maxGeometryShaderInvocations             = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_GS_INVOCATIONS),
747      .maxGeometryInputComponents               = 64,
748      .maxGeometryOutputComponents              = 128,
749      .maxGeometryOutputVertices                = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_GEOMETRY_OUTPUT_VERTICES),
750      .maxGeometryTotalOutputComponents         = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_GEOMETRY_TOTAL_OUTPUT_COMPONENTS),
751      .maxFragmentInputComponents               = 128,
752      .maxFragmentOutputAttachments             = 8,
753      .maxFragmentDualSrcAttachments            = 2,
754      .maxFragmentCombinedOutputResources       = 8,
755      .maxComputeSharedMemorySize               = max_local_size,
756      .maxComputeWorkGroupCount                 = { grid_size[0], grid_size[1], grid_size[2] },
757      .maxComputeWorkGroupInvocations           = max_threads_per_block,
758      .maxComputeWorkGroupSize = { block_size[0], block_size[1], block_size[2] },
759      .subPixelPrecisionBits                    = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_RASTERIZER_SUBPIXEL_BITS),
760      .subTexelPrecisionBits                    = 8,
761      .mipmapPrecisionBits                      = 4,
762      .maxDrawIndexedIndexValue                 = UINT32_MAX,
763      .maxDrawIndirectCount                     = UINT32_MAX,
764      .maxSamplerLodBias                        = 16,
765      .maxSamplerAnisotropy                     = 16,
766      .maxViewports                             = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_VIEWPORTS),
767      .maxViewportDimensions                    = { (1 << 14), (1 << 14) },
768      .viewportBoundsRange                      = { -32768.0, 32768.0 },
769      .viewportSubPixelBits                     = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_VIEWPORT_SUBPIXEL_BITS),
770      .minMemoryMapAlignment                    = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MIN_MAP_BUFFER_ALIGNMENT),
771      .minTexelBufferOffsetAlignment            = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_TEXTURE_BUFFER_OFFSET_ALIGNMENT),
772      .minUniformBufferOffsetAlignment          = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_CONSTANT_BUFFER_OFFSET_ALIGNMENT),
773      .minStorageBufferOffsetAlignment          = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_SHADER_BUFFER_OFFSET_ALIGNMENT),
774      .minTexelOffset                           = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MIN_TEXEL_OFFSET),
775      .maxTexelOffset                           = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXEL_OFFSET),
776      .minTexelGatherOffset                     = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MIN_TEXTURE_GATHER_OFFSET),
777      .maxTexelGatherOffset                     = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_GATHER_OFFSET),
778      .minInterpolationOffset                   = -2, /* FIXME */
779      .maxInterpolationOffset                   = 2, /* FIXME */
780      .subPixelInterpolationOffsetBits          = 8, /* FIXME */
781      .maxFramebufferWidth                      = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
782      .maxFramebufferHeight                     = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_2D_SIZE),
783      .maxFramebufferLayers                     = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_TEXTURE_ARRAY_LAYERS),
784      .framebufferColorSampleCounts             = sample_counts,
785      .framebufferDepthSampleCounts             = sample_counts,
786      .framebufferStencilSampleCounts           = sample_counts,
787      .framebufferNoAttachmentsSampleCounts     = sample_counts,
788      .maxColorAttachments                      = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_RENDER_TARGETS),
789      .sampledImageColorSampleCounts            = sample_counts,
790      .sampledImageIntegerSampleCounts          = sample_counts,
791      .sampledImageDepthSampleCounts            = sample_counts,
792      .sampledImageStencilSampleCounts          = sample_counts,
793      .storageImageSampleCounts                 = sample_counts,
794      .maxSampleMaskWords                       = 1,
795      .timestampComputeAndGraphics              = true,
796      .timestampPeriod                          = 1,
797      .maxClipDistances                         = 8,
798      .maxCullDistances                         = 8,
799      .maxCombinedClipAndCullDistances          = 8,
800      .discreteQueuePriorities                  = 2,
801      .pointSizeRange                           = { 0.0, pdevice->pscreen->get_paramf(pdevice->pscreen, PIPE_CAPF_MAX_POINT_WIDTH) },
802      .lineWidthRange                           = { 1.0, pdevice->pscreen->get_paramf(pdevice->pscreen, PIPE_CAPF_MAX_LINE_WIDTH) },
803      .pointSizeGranularity                     = (1.0 / 8.0),
804      .lineWidthGranularity                     = 1.0 / 128.0,
805      .strictLines                              = true,
806      .standardSampleLocations                  = true,
807      .optimalBufferCopyOffsetAlignment         = 128,
808      .optimalBufferCopyRowPitchAlignment       = 128,
809      .nonCoherentAtomSize                      = 64,
810   };
811
812   *pProperties = (VkPhysicalDeviceProperties) {
813      .apiVersion = LVP_API_VERSION,
814      .driverVersion = 1,
815      .vendorID = VK_VENDOR_ID_MESA,
816      .deviceID = 0,
817      .deviceType = VK_PHYSICAL_DEVICE_TYPE_CPU,
818      .limits = limits,
819      .sparseProperties = {0},
820   };
821
822   strcpy(pProperties->deviceName, pdevice->pscreen->get_name(pdevice->pscreen));
823   lvp_device_get_cache_uuid(pProperties->pipelineCacheUUID);
824
825}
826
827extern unsigned lp_native_vector_width;
828static void
829lvp_get_physical_device_properties_1_1(struct lvp_physical_device *pdevice,
830                                       VkPhysicalDeviceVulkan11Properties *p)
831{
832   assert(p->sType == VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES);
833
834   memset(p->deviceUUID, 0, VK_UUID_SIZE);
835   memset(p->driverUUID, 0, VK_UUID_SIZE);
836   memset(p->deviceLUID, 0, VK_LUID_SIZE);
837   /* The LUID is for Windows. */
838   p->deviceLUIDValid = false;
839   p->deviceNodeMask = 0;
840
841   p->subgroupSize = lp_native_vector_width / 32;
842   p->subgroupSupportedStages = VK_SHADER_STAGE_FRAGMENT_BIT | VK_SHADER_STAGE_COMPUTE_BIT;
843   p->subgroupSupportedOperations = VK_SUBGROUP_FEATURE_BASIC_BIT | VK_SUBGROUP_FEATURE_VOTE_BIT | VK_SUBGROUP_FEATURE_ARITHMETIC_BIT | VK_SUBGROUP_FEATURE_BALLOT_BIT;
844   p->subgroupQuadOperationsInAllStages = false;
845
846   p->pointClippingBehavior = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
847   p->maxMultiviewViewCount = 6;
848   p->maxMultiviewInstanceIndex = INT_MAX;
849   p->protectedNoFault = false;
850   p->maxPerSetDescriptors = 1024;
851   p->maxMemoryAllocationSize = (1u << 31);
852}
853
854static void
855lvp_get_physical_device_properties_1_2(struct lvp_physical_device *pdevice,
856                                       VkPhysicalDeviceVulkan12Properties *p)
857{
858   p->driverID = VK_DRIVER_ID_MESA_LLVMPIPE;
859   snprintf(p->driverName, VK_MAX_DRIVER_NAME_SIZE, "llvmpipe");
860   snprintf(p->driverInfo, VK_MAX_DRIVER_INFO_SIZE, "Mesa " PACKAGE_VERSION MESA_GIT_SHA1
861#ifdef MESA_LLVM_VERSION_STRING
862                  " (LLVM " MESA_LLVM_VERSION_STRING ")"
863#endif
864            );
865
866   p->conformanceVersion = (VkConformanceVersion){
867      .major = 0,
868      .minor = 0,
869      .subminor = 0,
870      .patch = 0,
871   };
872
873   p->denormBehaviorIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR;
874   p->roundingModeIndependence = VK_SHADER_FLOAT_CONTROLS_INDEPENDENCE_ALL_KHR;
875   p->shaderDenormFlushToZeroFloat16 = false;
876   p->shaderDenormPreserveFloat16 = false;
877   p->shaderRoundingModeRTEFloat16 = true;
878   p->shaderRoundingModeRTZFloat16 = false;
879   p->shaderSignedZeroInfNanPreserveFloat16 = true;
880
881   p->shaderDenormFlushToZeroFloat32 = false;
882   p->shaderDenormPreserveFloat32 = false;
883   p->shaderRoundingModeRTEFloat32 = true;
884   p->shaderRoundingModeRTZFloat32 = false;
885   p->shaderSignedZeroInfNanPreserveFloat32 = true;
886
887   p->shaderDenormFlushToZeroFloat64 = false;
888   p->shaderDenormPreserveFloat64 = false;
889   p->shaderRoundingModeRTEFloat64 = true;
890   p->shaderRoundingModeRTZFloat64 = false;
891   p->shaderSignedZeroInfNanPreserveFloat64 = true;
892
893   p->maxUpdateAfterBindDescriptorsInAllPools = UINT32_MAX / 64;
894   p->shaderUniformBufferArrayNonUniformIndexingNative = false;
895   p->shaderSampledImageArrayNonUniformIndexingNative = false;
896   p->shaderStorageBufferArrayNonUniformIndexingNative = false;
897   p->shaderStorageImageArrayNonUniformIndexingNative = false;
898   p->shaderInputAttachmentArrayNonUniformIndexingNative = false;
899   p->robustBufferAccessUpdateAfterBind = true;
900   p->quadDivergentImplicitLod = false;
901
902   size_t max_descriptor_set_size = 65536; //TODO
903   p->maxPerStageDescriptorUpdateAfterBindSamplers = max_descriptor_set_size;
904   p->maxPerStageDescriptorUpdateAfterBindUniformBuffers = max_descriptor_set_size;
905   p->maxPerStageDescriptorUpdateAfterBindStorageBuffers = max_descriptor_set_size;
906   p->maxPerStageDescriptorUpdateAfterBindSampledImages = max_descriptor_set_size;
907   p->maxPerStageDescriptorUpdateAfterBindStorageImages = max_descriptor_set_size;
908   p->maxPerStageDescriptorUpdateAfterBindInputAttachments = max_descriptor_set_size;
909   p->maxPerStageUpdateAfterBindResources = max_descriptor_set_size;
910   p->maxDescriptorSetUpdateAfterBindSamplers = max_descriptor_set_size;
911   p->maxDescriptorSetUpdateAfterBindUniformBuffers = max_descriptor_set_size;
912   p->maxDescriptorSetUpdateAfterBindUniformBuffersDynamic = 16;
913   p->maxDescriptorSetUpdateAfterBindStorageBuffers = max_descriptor_set_size;
914   p->maxDescriptorSetUpdateAfterBindStorageBuffersDynamic = 16;
915   p->maxDescriptorSetUpdateAfterBindSampledImages = max_descriptor_set_size;
916   p->maxDescriptorSetUpdateAfterBindStorageImages = max_descriptor_set_size;
917   p->maxDescriptorSetUpdateAfterBindInputAttachments = max_descriptor_set_size;
918
919   p->supportedDepthResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT | VK_RESOLVE_MODE_AVERAGE_BIT;
920   p->supportedStencilResolveModes = VK_RESOLVE_MODE_SAMPLE_ZERO_BIT;
921   p->independentResolveNone = false;
922   p->independentResolve = false;
923
924   p->filterMinmaxImageComponentMapping = true;
925   p->filterMinmaxSingleComponentFormats = true;
926
927   p->maxTimelineSemaphoreValueDifference = UINT64_MAX;
928   p->framebufferIntegerColorSampleCounts = VK_SAMPLE_COUNT_1_BIT;
929}
930
931VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceProperties2(
932   VkPhysicalDevice                            physicalDevice,
933   VkPhysicalDeviceProperties2                *pProperties)
934{
935   LVP_FROM_HANDLE(lvp_physical_device, pdevice, physicalDevice);
936   lvp_GetPhysicalDeviceProperties(physicalDevice, &pProperties->properties);
937
938   VkPhysicalDeviceVulkan11Properties core_1_1 = {
939      .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_PROPERTIES,
940   };
941   lvp_get_physical_device_properties_1_1(pdevice, &core_1_1);
942
943   VkPhysicalDeviceVulkan12Properties core_1_2 = {
944      .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_PROPERTIES,
945   };
946   lvp_get_physical_device_properties_1_2(pdevice, &core_1_2);
947
948   vk_foreach_struct(ext, pProperties->pNext) {
949
950      if (vk_get_physical_device_core_1_1_property_ext(ext, &core_1_1))
951         continue;
952      if (vk_get_physical_device_core_1_2_property_ext(ext, &core_1_2))
953         continue;
954      switch (ext->sType) {
955      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PUSH_DESCRIPTOR_PROPERTIES_KHR: {
956         VkPhysicalDevicePushDescriptorPropertiesKHR *properties =
957            (VkPhysicalDevicePushDescriptorPropertiesKHR *) ext;
958         properties->maxPushDescriptors = MAX_PUSH_DESCRIPTORS;
959         break;
960      }
961      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_POINT_CLIPPING_PROPERTIES: {
962         VkPhysicalDevicePointClippingProperties *properties =
963            (VkPhysicalDevicePointClippingProperties*)ext;
964         properties->pointClippingBehavior = VK_POINT_CLIPPING_BEHAVIOR_ALL_CLIP_PLANES;
965         break;
966      }
967      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VERTEX_ATTRIBUTE_DIVISOR_PROPERTIES_EXT: {
968         VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *props =
969            (VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT *)ext;
970         if (pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_VERTEX_ELEMENT_INSTANCE_DIVISOR) != 0)
971            props->maxVertexAttribDivisor = UINT32_MAX;
972         else
973            props->maxVertexAttribDivisor = 1;
974         break;
975      }
976      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TRANSFORM_FEEDBACK_PROPERTIES_EXT: {
977         VkPhysicalDeviceTransformFeedbackPropertiesEXT *properties =
978            (VkPhysicalDeviceTransformFeedbackPropertiesEXT*)ext;
979         properties->maxTransformFeedbackStreams = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_VERTEX_STREAMS);
980         properties->maxTransformFeedbackBuffers = pdevice->pscreen->get_param(pdevice->pscreen, PIPE_CAP_MAX_STREAM_OUTPUT_BUFFERS);
981         properties->maxTransformFeedbackBufferSize = UINT32_MAX;
982         properties->maxTransformFeedbackStreamDataSize = 512;
983         properties->maxTransformFeedbackBufferDataSize = 512;
984         properties->maxTransformFeedbackBufferDataStride = 512;
985         properties->transformFeedbackQueries = true;
986         properties->transformFeedbackStreamsLinesTriangles = false;
987         properties->transformFeedbackRasterizationStreamSelect = false;
988         properties->transformFeedbackDraw = true;
989         break;
990      }
991      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_LINE_RASTERIZATION_PROPERTIES_EXT: {
992         VkPhysicalDeviceLineRasterizationPropertiesEXT *properties =
993            (VkPhysicalDeviceLineRasterizationPropertiesEXT *)ext;
994         properties->lineSubPixelPrecisionBits =
995            pdevice->pscreen->get_param(pdevice->pscreen,
996                                        PIPE_CAP_RASTERIZER_SUBPIXEL_BITS);
997         break;
998      }
999      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT: {
1000         VkPhysicalDeviceExternalMemoryHostPropertiesEXT *properties =
1001            (VkPhysicalDeviceExternalMemoryHostPropertiesEXT *)ext;
1002         properties->minImportedHostPointerAlignment = 4096;
1003         break;
1004      }
1005      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_CUSTOM_BORDER_COLOR_PROPERTIES_EXT: {
1006         VkPhysicalDeviceCustomBorderColorPropertiesEXT *properties =
1007            (VkPhysicalDeviceCustomBorderColorPropertiesEXT *)ext;
1008         properties->maxCustomBorderColorSamplers = 32 * 1024;
1009         break;
1010      }
1011      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROVOKING_VERTEX_PROPERTIES_EXT: {
1012         VkPhysicalDeviceProvokingVertexPropertiesEXT *properties =
1013            (VkPhysicalDeviceProvokingVertexPropertiesEXT*)ext;
1014         properties->provokingVertexModePerPipeline = true;
1015         properties->transformFeedbackPreservesTriangleFanProvokingVertex = true;
1016         break;
1017      }
1018      case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MULTI_DRAW_PROPERTIES_EXT: {
1019         VkPhysicalDeviceMultiDrawPropertiesEXT *props = (VkPhysicalDeviceMultiDrawPropertiesEXT *)ext;
1020         props->maxMultiDrawCount = 2048;
1021         break;
1022      }
1023      default:
1024         break;
1025      }
1026   }
1027}
1028
1029static void lvp_get_physical_device_queue_family_properties(
1030   VkQueueFamilyProperties*                    pQueueFamilyProperties)
1031{
1032   *pQueueFamilyProperties = (VkQueueFamilyProperties) {
1033      .queueFlags = VK_QUEUE_GRAPHICS_BIT |
1034      VK_QUEUE_COMPUTE_BIT |
1035      VK_QUEUE_TRANSFER_BIT,
1036      .queueCount = 1,
1037      .timestampValidBits = 64,
1038      .minImageTransferGranularity = (VkExtent3D) { 1, 1, 1 },
1039   };
1040}
1041
1042VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceQueueFamilyProperties(
1043   VkPhysicalDevice                            physicalDevice,
1044   uint32_t*                                   pCount,
1045   VkQueueFamilyProperties*                    pQueueFamilyProperties)
1046{
1047   if (pQueueFamilyProperties == NULL) {
1048      *pCount = 1;
1049      return;
1050   }
1051
1052   assert(*pCount >= 1);
1053   lvp_get_physical_device_queue_family_properties(pQueueFamilyProperties);
1054}
1055
1056VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceQueueFamilyProperties2(
1057   VkPhysicalDevice                            physicalDevice,
1058   uint32_t*                                   pCount,
1059   VkQueueFamilyProperties2                   *pQueueFamilyProperties)
1060{
1061   if (pQueueFamilyProperties == NULL) {
1062      *pCount = 1;
1063      return;
1064   }
1065
1066   assert(*pCount >= 1);
1067   lvp_get_physical_device_queue_family_properties(&pQueueFamilyProperties->queueFamilyProperties);
1068}
1069
1070VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceMemoryProperties(
1071   VkPhysicalDevice                            physicalDevice,
1072   VkPhysicalDeviceMemoryProperties*           pMemoryProperties)
1073{
1074   pMemoryProperties->memoryTypeCount = 1;
1075   pMemoryProperties->memoryTypes[0] = (VkMemoryType) {
1076      .propertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
1077      VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
1078      VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
1079      VK_MEMORY_PROPERTY_HOST_CACHED_BIT,
1080      .heapIndex = 0,
1081   };
1082
1083   pMemoryProperties->memoryHeapCount = 1;
1084   pMemoryProperties->memoryHeaps[0] = (VkMemoryHeap) {
1085      .size = 2ULL*1024*1024*1024,
1086      .flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT,
1087   };
1088}
1089
1090VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceMemoryProperties2(
1091   VkPhysicalDevice                            physicalDevice,
1092   VkPhysicalDeviceMemoryProperties2          *pMemoryProperties)
1093{
1094   lvp_GetPhysicalDeviceMemoryProperties(physicalDevice,
1095                                         &pMemoryProperties->memoryProperties);
1096}
1097
1098VKAPI_ATTR VkResult VKAPI_CALL
1099lvp_GetMemoryHostPointerPropertiesEXT(
1100   VkDevice _device,
1101   VkExternalMemoryHandleTypeFlagBits handleType,
1102   const void *pHostPointer,
1103   VkMemoryHostPointerPropertiesEXT *pMemoryHostPointerProperties)
1104{
1105   switch (handleType) {
1106   case VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT: {
1107      pMemoryHostPointerProperties->memoryTypeBits = 1;
1108      return VK_SUCCESS;
1109   }
1110   default:
1111      return VK_ERROR_INVALID_EXTERNAL_HANDLE;
1112   }
1113}
1114
1115VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL lvp_GetInstanceProcAddr(
1116   VkInstance                                  _instance,
1117   const char*                                 pName)
1118{
1119   LVP_FROM_HANDLE(lvp_instance, instance, _instance);
1120   return vk_instance_get_proc_addr(&instance->vk,
1121                                    &lvp_instance_entrypoints,
1122                                    pName);
1123}
1124
1125/* Windows will use a dll definition file to avoid build errors. */
1126#ifdef _WIN32
1127#undef PUBLIC
1128#define PUBLIC
1129#endif
1130
1131/* The loader wants us to expose a second GetInstanceProcAddr function
1132 * to work around certain LD_PRELOAD issues seen in apps.
1133 */
1134PUBLIC
1135VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(
1136   VkInstance                                  instance,
1137   const char*                                 pName);
1138
1139PUBLIC
1140VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(
1141   VkInstance                                  instance,
1142   const char*                                 pName)
1143{
1144   return lvp_GetInstanceProcAddr(instance, pName);
1145}
1146
1147PUBLIC
1148VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(
1149   VkInstance                                  _instance,
1150   const char*                                 pName);
1151
1152PUBLIC
1153VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(
1154   VkInstance                                  _instance,
1155   const char*                                 pName)
1156{
1157   LVP_FROM_HANDLE(lvp_instance, instance, _instance);
1158   return vk_instance_get_physical_device_proc_addr(&instance->vk, pName);
1159}
1160
1161static void
1162set_last_fence(struct lvp_device *device, struct pipe_fence_handle *handle, uint64_t timeline)
1163{
1164   simple_mtx_lock(&device->queue.last_lock);
1165   device->queue.last_fence_timeline = timeline;
1166   device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, handle);
1167   simple_mtx_unlock(&device->queue.last_lock);
1168}
1169
1170static void
1171thread_flush(struct lvp_device *device, struct lvp_fence *fence, uint64_t timeline,
1172             unsigned num_timelines, struct lvp_semaphore_timeline **timelines)
1173{
1174   struct pipe_fence_handle *handle = NULL;
1175   device->queue.ctx->flush(device->queue.ctx, &handle, 0);
1176   if (fence)
1177      device->pscreen->fence_reference(device->pscreen, &fence->handle, handle);
1178   set_last_fence(device, handle, timeline);
1179   /* this is the array of signaling timeline semaphore links */
1180   for (unsigned i = 0; i < num_timelines; i++)
1181      device->pscreen->fence_reference(device->pscreen, &timelines[i]->fence, handle);
1182
1183   device->pscreen->fence_reference(device->pscreen, &handle, NULL);
1184}
1185
1186/* get a new timeline link for creating a new signal event
1187 * sema->lock MUST be locked before calling
1188 */
1189static struct lvp_semaphore_timeline *
1190get_semaphore_link(struct lvp_semaphore *sema)
1191{
1192   if (!util_dynarray_num_elements(&sema->links, struct lvp_semaphore_timeline*)) {
1193#define NUM_LINKS 50
1194      /* bucket allocate using the ralloc ctx because I like buckets */
1195      struct lvp_semaphore_timeline *link = ralloc_array(sema->mem, struct lvp_semaphore_timeline, NUM_LINKS);
1196      for (unsigned i = 0; i < NUM_LINKS; i++) {
1197         link[i].next = NULL;
1198         link[i].fence = NULL;
1199         util_dynarray_append(&sema->links, struct lvp_semaphore_timeline*, &link[i]);
1200      }
1201   }
1202   struct lvp_semaphore_timeline *tl = util_dynarray_pop(&sema->links, struct lvp_semaphore_timeline*);
1203   if (sema->timeline)
1204      sema->latest->next = tl;
1205   else
1206      sema->timeline = tl;
1207   sema->latest = tl;
1208   return tl;
1209}
1210
1211/* prune any timeline links which are older than the current device timeline id
1212 * sema->lock MUST be locked before calling
1213 */
1214static void
1215prune_semaphore_links(struct lvp_device *device,
1216                      struct lvp_semaphore *sema, uint64_t timeline)
1217{
1218   if (!timeline)
1219      /* zero isn't a valid id to prune with */
1220      return;
1221   struct lvp_semaphore_timeline *tl = sema->timeline;
1222   /* walk the timeline links and pop all the ones that are old */
1223   while (tl && ((tl->timeline <= timeline) || (tl->signal <= sema->current))) {
1224      struct lvp_semaphore_timeline *cur = tl;
1225      /* only update current timeline id if the update is monotonic */
1226      if (sema->current < tl->signal)
1227         sema->current = tl->signal;
1228      util_dynarray_append(&sema->links, struct lvp_semaphore_timeline*, tl);
1229      tl = tl->next;
1230      cur->next = NULL;
1231      device->pscreen->fence_reference(device->pscreen, &cur->fence, NULL);
1232   }
1233   /* this is now the current timeline link */
1234   sema->timeline = tl;
1235}
1236
1237/* find a timeline id that can be waited on to satisfy the signal condition
1238 * sema->lock MUST be locked before calling
1239 */
1240static struct lvp_semaphore_timeline *
1241find_semaphore_timeline(struct lvp_semaphore *sema, uint64_t signal)
1242{
1243   for (struct lvp_semaphore_timeline *tl = sema->timeline; tl; tl = tl->next) {
1244      if (tl->signal >= signal)
1245         return tl;
1246   }
1247   /* never submitted or is completed */
1248   return NULL;
1249}
1250
1251struct timeline_wait {
1252   bool done;
1253   struct lvp_semaphore_timeline *tl;
1254};
1255
1256static VkResult wait_semaphores(struct lvp_device *device,
1257    const VkSemaphoreWaitInfo*                  pWaitInfo,
1258    uint64_t                                    timeout)
1259{
1260   /* build array of timeline links to poll */
1261   VkResult ret = VK_TIMEOUT;
1262   bool any = (pWaitInfo->flags & VK_SEMAPHORE_WAIT_ANY_BIT) == VK_SEMAPHORE_WAIT_ANY_BIT;
1263   unsigned num_remaining = any ? 1 : pWaitInfo->semaphoreCount;
1264   /* just allocate an array for simplicity */
1265   struct timeline_wait *tl_array = calloc(pWaitInfo->semaphoreCount, sizeof(struct timeline_wait));
1266
1267   int64_t abs_timeout = os_time_get_absolute_timeout(timeout);
1268   /* UINT64_MAX will always overflow, so special case it
1269    * otherwise, calculate ((timeout / num_semaphores) / 10) to allow waiting 10 times on every semaphore
1270    */
1271   uint64_t wait_interval = timeout == UINT64_MAX ? 5000 : timeout / pWaitInfo->semaphoreCount / 10;
1272   while (num_remaining) {
1273      for (unsigned i = 0; num_remaining && i < pWaitInfo->semaphoreCount; i++) {
1274         if (tl_array[i].done) //completed
1275            continue;
1276         if (timeout && timeout != UINT64_MAX) {
1277            /* update remaining timeout on every loop */
1278            int64_t time_ns = os_time_get_nano();
1279            if (abs_timeout <= time_ns)
1280               goto end;
1281            timeout = abs_timeout > time_ns ? abs_timeout - time_ns : 0;
1282         }
1283         const uint64_t waitval = pWaitInfo->pValues[i];
1284         LVP_FROM_HANDLE(lvp_semaphore, sema, pWaitInfo->pSemaphores[i]);
1285         if (sema->current >= waitval) {
1286            tl_array[i].done = true;
1287            num_remaining--;
1288            continue;
1289         }
1290         if (!tl_array[i].tl) {
1291            /* no timeline link was available yet: try to find one */
1292            simple_mtx_lock(&sema->lock);
1293            /* always prune first to update current timeline id */
1294            prune_semaphore_links(device, sema, device->queue.last_finished);
1295            tl_array[i].tl = find_semaphore_timeline(sema, waitval);
1296            if (timeout && !tl_array[i].tl) {
1297               /* still no timeline link available:
1298                * try waiting on the conditional for a broadcast instead of melting the cpu
1299                */
1300               mtx_lock(&sema->submit_lock);
1301               struct timespec t;
1302               t.tv_nsec = wait_interval % 1000000000u;
1303               t.tv_sec = (wait_interval - t.tv_nsec) / 1000000000u;
1304               cnd_timedwait(&sema->submit, &sema->submit_lock, &t);
1305               mtx_unlock(&sema->submit_lock);
1306               tl_array[i].tl = find_semaphore_timeline(sema, waitval);
1307            }
1308            simple_mtx_unlock(&sema->lock);
1309         }
1310         /* mark semaphore as done if:
1311          * - timeline id comparison passes
1312          * - fence for timeline id exists and completes
1313          */
1314         if (sema->current >= waitval ||
1315             (tl_array[i].tl &&
1316              tl_array[i].tl->fence &&
1317              device->pscreen->fence_finish(device->pscreen, NULL, tl_array[i].tl->fence, wait_interval))) {
1318            tl_array[i].done = true;
1319            num_remaining--;
1320         }
1321      }
1322      if (!timeout)
1323         break;
1324   }
1325   if (!num_remaining)
1326      ret = VK_SUCCESS;
1327
1328end:
1329   free(tl_array);
1330   return ret;
1331}
1332
1333void
1334queue_thread_noop(void *data, void *gdata, int thread_index)
1335{
1336   struct lvp_device *device = gdata;
1337   struct lvp_fence *fence = data;
1338   thread_flush(device, fence, fence->timeline, 0, NULL);
1339}
1340
1341static void
1342queue_thread(void *data, void *gdata, int thread_index)
1343{
1344   struct lvp_queue_work *task = data;
1345   struct lvp_device *device = gdata;
1346   struct lvp_queue *queue = &device->queue;
1347
1348   if (task->wait_count) {
1349      /* identical to WaitSemaphores */
1350      VkSemaphoreWaitInfo wait;
1351      wait.flags = 0; //wait on all semaphores
1352      wait.semaphoreCount = task->wait_count;
1353      wait.pSemaphores = task->waits;
1354      wait.pValues = task->wait_vals;
1355      //wait
1356      wait_semaphores(device, &wait, UINT64_MAX);
1357   }
1358
1359   //execute
1360   for (unsigned i = 0; i < task->cmd_buffer_count; i++) {
1361      lvp_execute_cmds(queue->device, queue, task->cmd_buffers[i]);
1362   }
1363
1364   thread_flush(device, task->fence, task->timeline, task->timeline_count, task->timelines);
1365   free(task);
1366}
1367
1368static VkResult
1369lvp_queue_init(struct lvp_device *device, struct lvp_queue *queue,
1370               const VkDeviceQueueCreateInfo *create_info,
1371               uint32_t index_in_family)
1372{
1373   VkResult result = vk_queue_init(&queue->vk, &device->vk, create_info,
1374                                   index_in_family);
1375   if (result != VK_SUCCESS)
1376      return result;
1377
1378   queue->device = device;
1379
1380   simple_mtx_init(&queue->last_lock, mtx_plain);
1381   queue->timeline = 0;
1382   queue->ctx = device->pscreen->context_create(device->pscreen, NULL, PIPE_CONTEXT_ROBUST_BUFFER_ACCESS);
1383   queue->cso = cso_create_context(queue->ctx, CSO_NO_VBUF);
1384   util_queue_init(&queue->queue, "lavapipe", 8, 1, UTIL_QUEUE_INIT_RESIZE_IF_FULL, device);
1385   p_atomic_set(&queue->count, 0);
1386
1387   return VK_SUCCESS;
1388}
1389
1390static void
1391lvp_queue_finish(struct lvp_queue *queue)
1392{
1393   util_queue_finish(&queue->queue);
1394   util_queue_destroy(&queue->queue);
1395
1396   cso_destroy_context(queue->cso);
1397   queue->ctx->destroy(queue->ctx);
1398   simple_mtx_destroy(&queue->last_lock);
1399
1400   vk_queue_finish(&queue->vk);
1401}
1402
1403VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateDevice(
1404   VkPhysicalDevice                            physicalDevice,
1405   const VkDeviceCreateInfo*                   pCreateInfo,
1406   const VkAllocationCallbacks*                pAllocator,
1407   VkDevice*                                   pDevice)
1408{
1409   fprintf(stderr, "WARNING: lavapipe is not a conformant vulkan implementation, testing use only.\n");
1410
1411   LVP_FROM_HANDLE(lvp_physical_device, physical_device, physicalDevice);
1412   struct lvp_device *device;
1413   struct lvp_instance *instance = (struct lvp_instance *)physical_device->vk.instance;
1414
1415   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO);
1416
1417   device = vk_zalloc2(&physical_device->vk.instance->alloc, pAllocator,
1418                       sizeof(*device), 8,
1419                       VK_SYSTEM_ALLOCATION_SCOPE_DEVICE);
1420   if (!device)
1421      return vk_error(instance, VK_ERROR_OUT_OF_HOST_MEMORY);
1422
1423   struct vk_device_dispatch_table dispatch_table;
1424   vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1425      &lvp_device_entrypoints, true);
1426   vk_device_dispatch_table_from_entrypoints(&dispatch_table,
1427      &wsi_device_entrypoints, false);
1428   VkResult result = vk_device_init(&device->vk,
1429                                    &physical_device->vk,
1430                                    &dispatch_table, pCreateInfo,
1431                                    pAllocator);
1432   if (result != VK_SUCCESS) {
1433      vk_free(&device->vk.alloc, device);
1434      return result;
1435   }
1436
1437   device->instance = (struct lvp_instance *)physical_device->vk.instance;
1438   device->physical_device = physical_device;
1439
1440   device->pscreen = physical_device->pscreen;
1441
1442   assert(pCreateInfo->queueCreateInfoCount == 1);
1443   assert(pCreateInfo->pQueueCreateInfos[0].queueFamilyIndex == 0);
1444   assert(pCreateInfo->pQueueCreateInfos[0].queueCount == 1);
1445   lvp_queue_init(device, &device->queue, pCreateInfo->pQueueCreateInfos, 0);
1446
1447   *pDevice = lvp_device_to_handle(device);
1448
1449   return VK_SUCCESS;
1450
1451}
1452
1453VKAPI_ATTR void VKAPI_CALL lvp_DestroyDevice(
1454   VkDevice                                    _device,
1455   const VkAllocationCallbacks*                pAllocator)
1456{
1457   LVP_FROM_HANDLE(lvp_device, device, _device);
1458
1459   if (device->queue.last_fence)
1460      device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
1461   lvp_queue_finish(&device->queue);
1462   vk_device_finish(&device->vk);
1463   vk_free(&device->vk.alloc, device);
1464}
1465
1466VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceExtensionProperties(
1467   const char*                                 pLayerName,
1468   uint32_t*                                   pPropertyCount,
1469   VkExtensionProperties*                      pProperties)
1470{
1471   if (pLayerName)
1472      return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1473
1474   return vk_enumerate_instance_extension_properties(
1475      &lvp_instance_extensions_supported, pPropertyCount, pProperties);
1476}
1477
1478VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateInstanceLayerProperties(
1479   uint32_t*                                   pPropertyCount,
1480   VkLayerProperties*                          pProperties)
1481{
1482   if (pProperties == NULL) {
1483      *pPropertyCount = 0;
1484      return VK_SUCCESS;
1485   }
1486
1487   /* None supported at this time */
1488   return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1489}
1490
1491VKAPI_ATTR VkResult VKAPI_CALL lvp_EnumerateDeviceLayerProperties(
1492   VkPhysicalDevice                            physicalDevice,
1493   uint32_t*                                   pPropertyCount,
1494   VkLayerProperties*                          pProperties)
1495{
1496   if (pProperties == NULL) {
1497      *pPropertyCount = 0;
1498      return VK_SUCCESS;
1499   }
1500
1501   /* None supported at this time */
1502   return vk_error(NULL, VK_ERROR_LAYER_NOT_PRESENT);
1503}
1504
1505VKAPI_ATTR VkResult VKAPI_CALL lvp_QueueSubmit(
1506   VkQueue                                     _queue,
1507   uint32_t                                    submitCount,
1508   const VkSubmitInfo*                         pSubmits,
1509   VkFence                                     _fence)
1510{
1511   LVP_FROM_HANDLE(lvp_queue, queue, _queue);
1512   LVP_FROM_HANDLE(lvp_fence, fence, _fence);
1513
1514   /* each submit is a separate job to simplify/streamline semaphore waits */
1515   for (uint32_t i = 0; i < submitCount; i++) {
1516      uint64_t timeline = ++queue->timeline;
1517      struct lvp_queue_work *task = malloc(sizeof(struct lvp_queue_work) +
1518                                           pSubmits[i].commandBufferCount * sizeof(struct lvp_cmd_buffer *) +
1519                                           pSubmits[i].signalSemaphoreCount * sizeof(struct lvp_semaphore_timeline*) +
1520                                           pSubmits[i].waitSemaphoreCount * (sizeof(VkSemaphore) + sizeof(uint64_t)));
1521      task->cmd_buffer_count = pSubmits[i].commandBufferCount;
1522      task->timeline_count = pSubmits[i].signalSemaphoreCount;
1523      task->wait_count = pSubmits[i].waitSemaphoreCount;
1524      task->fence = fence;
1525      task->timeline = timeline;
1526      task->cmd_buffers = (struct lvp_cmd_buffer **)(task + 1);
1527      task->timelines = (struct lvp_semaphore_timeline**)((uint8_t*)task->cmd_buffers + pSubmits[i].commandBufferCount * sizeof(struct lvp_cmd_buffer *));
1528      task->waits = (VkSemaphore*)((uint8_t*)task->timelines + pSubmits[i].signalSemaphoreCount * sizeof(struct lvp_semaphore_timeline *));
1529      task->wait_vals = (uint64_t*)((uint8_t*)task->waits + pSubmits[i].waitSemaphoreCount * sizeof(VkSemaphore));
1530
1531      unsigned c = 0;
1532      for (uint32_t j = 0; j < pSubmits[i].commandBufferCount; j++) {
1533         task->cmd_buffers[c++] = lvp_cmd_buffer_from_handle(pSubmits[i].pCommandBuffers[j]);
1534      }
1535      const VkTimelineSemaphoreSubmitInfo *info = vk_find_struct_const(pSubmits[i].pNext, TIMELINE_SEMAPHORE_SUBMIT_INFO);
1536      unsigned s = 0;
1537      for (unsigned j = 0; j < pSubmits[i].signalSemaphoreCount; j++) {
1538         LVP_FROM_HANDLE(lvp_semaphore, sema, pSubmits[i].pSignalSemaphores[j]);
1539         if (!sema->is_timeline) {
1540            /* non-timeline semaphores never matter to lavapipe */
1541            task->timeline_count--;
1542            continue;
1543         }
1544         simple_mtx_lock(&sema->lock);
1545         /* always prune first to make links available and update timeline id */
1546         prune_semaphore_links(queue->device, sema, queue->last_finished);
1547         if (sema->current < info->pSignalSemaphoreValues[j]) {
1548            /* only signal semaphores if the new id is >= the current one */
1549            struct lvp_semaphore_timeline *tl = get_semaphore_link(sema);
1550            tl->signal = info->pSignalSemaphoreValues[j];
1551            tl->timeline = timeline;
1552            task->timelines[s] = tl;
1553            s++;
1554         } else
1555            task->timeline_count--;
1556         simple_mtx_unlock(&sema->lock);
1557      }
1558      unsigned w = 0;
1559      for (unsigned j = 0; j < pSubmits[i].waitSemaphoreCount; j++) {
1560         LVP_FROM_HANDLE(lvp_semaphore, sema, pSubmits[i].pWaitSemaphores[j]);
1561         if (!sema->is_timeline) {
1562            /* non-timeline semaphores never matter to lavapipe */
1563            task->wait_count--;
1564            continue;
1565         }
1566         simple_mtx_lock(&sema->lock);
1567         /* always prune first to update timeline id */
1568         prune_semaphore_links(queue->device, sema, queue->last_finished);
1569         if (info->pWaitSemaphoreValues[j] &&
1570             pSubmits[i].pWaitDstStageMask && pSubmits[i].pWaitDstStageMask[j] &&
1571             sema->current < info->pWaitSemaphoreValues[j]) {
1572            /* only wait on semaphores if the new id is > the current one and a wait mask is set
1573             *
1574             * technically the mask could be used to check whether there's gfx/compute ops on a cmdbuf and no-op,
1575             * but probably that's not worth the complexity
1576             */
1577            task->waits[w] = pSubmits[i].pWaitSemaphores[j];
1578            task->wait_vals[w] = info->pWaitSemaphoreValues[j];
1579            w++;
1580         } else
1581            task->wait_count--;
1582         simple_mtx_unlock(&sema->lock);
1583      }
1584      if (fence && i == submitCount - 1) {
1585         /* u_queue fences should only be signaled for the last submit, as this is the one that
1586          * the vk fence represents
1587          */
1588         fence->timeline = timeline;
1589         util_queue_add_job(&queue->queue, task, &fence->fence, queue_thread, NULL, 0);
1590      } else
1591         util_queue_add_job(&queue->queue, task, NULL, queue_thread, NULL, 0);
1592   }
1593   if (!submitCount && fence) {
1594      /* special case where a fence is created to use as a synchronization point */
1595      fence->timeline = p_atomic_inc_return(&queue->timeline);
1596      util_queue_add_job(&queue->queue, fence, &fence->fence, queue_thread_noop, NULL, 0);
1597   }
1598
1599   return VK_SUCCESS;
1600}
1601
1602VKAPI_ATTR VkResult VKAPI_CALL lvp_QueueWaitIdle(
1603   VkQueue                                     _queue)
1604{
1605   LVP_FROM_HANDLE(lvp_queue, queue, _queue);
1606
1607   util_queue_finish(&queue->queue);
1608   simple_mtx_lock(&queue->last_lock);
1609   uint64_t timeline = queue->last_fence_timeline;
1610   if (queue->last_fence) {
1611      queue->device->pscreen->fence_finish(queue->device->pscreen, NULL, queue->last_fence, PIPE_TIMEOUT_INFINITE);
1612      queue->device->pscreen->fence_reference(queue->device->pscreen, &queue->device->queue.last_fence, NULL);
1613      queue->last_finished = timeline;
1614   }
1615   simple_mtx_unlock(&queue->last_lock);
1616   return VK_SUCCESS;
1617}
1618
1619VKAPI_ATTR VkResult VKAPI_CALL lvp_DeviceWaitIdle(
1620   VkDevice                                    _device)
1621{
1622   LVP_FROM_HANDLE(lvp_device, device, _device);
1623
1624   lvp_QueueWaitIdle(lvp_queue_to_handle(&device->queue));
1625
1626   return VK_SUCCESS;
1627}
1628
1629VKAPI_ATTR VkResult VKAPI_CALL lvp_AllocateMemory(
1630   VkDevice                                    _device,
1631   const VkMemoryAllocateInfo*                 pAllocateInfo,
1632   const VkAllocationCallbacks*                pAllocator,
1633   VkDeviceMemory*                             pMem)
1634{
1635   LVP_FROM_HANDLE(lvp_device, device, _device);
1636   struct lvp_device_memory *mem;
1637   const VkExportMemoryAllocateInfo *export_info = NULL;
1638   const VkImportMemoryFdInfoKHR *import_info = NULL;
1639   const VkImportMemoryHostPointerInfoEXT *host_ptr_info = NULL;
1640   VkResult error = VK_ERROR_OUT_OF_DEVICE_MEMORY;
1641   assert(pAllocateInfo->sType == VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO);
1642
1643   if (pAllocateInfo->allocationSize == 0) {
1644      /* Apparently, this is allowed */
1645      *pMem = VK_NULL_HANDLE;
1646      return VK_SUCCESS;
1647   }
1648
1649   vk_foreach_struct_const(ext, pAllocateInfo->pNext) {
1650      switch ((unsigned)ext->sType) {
1651      case VK_STRUCTURE_TYPE_IMPORT_MEMORY_HOST_POINTER_INFO_EXT:
1652         host_ptr_info = (VkImportMemoryHostPointerInfoEXT*)ext;
1653         assert(host_ptr_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_HOST_ALLOCATION_BIT_EXT);
1654         break;
1655      case VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO:
1656         export_info = (VkExportMemoryAllocateInfo*)ext;
1657         assert(export_info->handleTypes == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1658         break;
1659      case VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR:
1660         import_info = (VkImportMemoryFdInfoKHR*)ext;
1661         assert(import_info->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
1662         break;
1663      default:
1664         break;
1665      }
1666   }
1667
1668#ifdef PIPE_MEMORY_FD
1669   if (import_info != NULL && import_info->fd < 0) {
1670      return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
1671   }
1672#endif
1673
1674   mem = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*mem), 8,
1675                   VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
1676   if (mem == NULL)
1677      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
1678
1679   vk_object_base_init(&device->vk, &mem->base,
1680                       VK_OBJECT_TYPE_DEVICE_MEMORY);
1681
1682   mem->memory_type = LVP_DEVICE_MEMORY_TYPE_DEFAULT;
1683   mem->backed_fd = -1;
1684
1685   if (host_ptr_info) {
1686      mem->pmem = host_ptr_info->pHostPointer;
1687      mem->memory_type = LVP_DEVICE_MEMORY_TYPE_USER_PTR;
1688   }
1689#ifdef PIPE_MEMORY_FD
1690   else if(import_info) {
1691      uint64_t size;
1692      if(!device->pscreen->import_memory_fd(device->pscreen, import_info->fd, &mem->pmem, &size)) {
1693         close(import_info->fd);
1694         error = VK_ERROR_INVALID_EXTERNAL_HANDLE;
1695         goto fail;
1696      }
1697      if(size < pAllocateInfo->allocationSize) {
1698         device->pscreen->free_memory_fd(device->pscreen, mem->pmem);
1699         close(import_info->fd);
1700         goto fail;
1701      }
1702      if (export_info) {
1703         mem->backed_fd = import_info->fd;
1704      }
1705      else {
1706         close(import_info->fd);
1707      }
1708      mem->memory_type = LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD;
1709   }
1710   else if (export_info) {
1711      mem->pmem = device->pscreen->allocate_memory_fd(device->pscreen, pAllocateInfo->allocationSize, &mem->backed_fd);
1712      if (!mem->pmem || mem->backed_fd < 0) {
1713         goto fail;
1714      }
1715      mem->memory_type = LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD;
1716   }
1717#endif
1718   else {
1719      mem->pmem = device->pscreen->allocate_memory(device->pscreen, pAllocateInfo->allocationSize);
1720      if (!mem->pmem) {
1721         goto fail;
1722      }
1723   }
1724
1725   mem->type_index = pAllocateInfo->memoryTypeIndex;
1726
1727   *pMem = lvp_device_memory_to_handle(mem);
1728
1729   return VK_SUCCESS;
1730
1731fail:
1732   vk_free2(&device->vk.alloc, pAllocator, mem);
1733   return vk_error(device, error);
1734}
1735
1736VKAPI_ATTR void VKAPI_CALL lvp_FreeMemory(
1737   VkDevice                                    _device,
1738   VkDeviceMemory                              _mem,
1739   const VkAllocationCallbacks*                pAllocator)
1740{
1741   LVP_FROM_HANDLE(lvp_device, device, _device);
1742   LVP_FROM_HANDLE(lvp_device_memory, mem, _mem);
1743
1744   if (mem == NULL)
1745      return;
1746
1747   switch(mem->memory_type) {
1748   case LVP_DEVICE_MEMORY_TYPE_DEFAULT:
1749      device->pscreen->free_memory(device->pscreen, mem->pmem);
1750      break;
1751#ifdef PIPE_MEMORY_FD
1752   case LVP_DEVICE_MEMORY_TYPE_OPAQUE_FD:
1753      device->pscreen->free_memory_fd(device->pscreen, mem->pmem);
1754      if(mem->backed_fd >= 0)
1755         close(mem->backed_fd);
1756      break;
1757#endif
1758   case LVP_DEVICE_MEMORY_TYPE_USER_PTR:
1759   default:
1760      break;
1761   }
1762   vk_object_base_finish(&mem->base);
1763   vk_free2(&device->vk.alloc, pAllocator, mem);
1764
1765}
1766
1767VKAPI_ATTR VkResult VKAPI_CALL lvp_MapMemory(
1768   VkDevice                                    _device,
1769   VkDeviceMemory                              _memory,
1770   VkDeviceSize                                offset,
1771   VkDeviceSize                                size,
1772   VkMemoryMapFlags                            flags,
1773   void**                                      ppData)
1774{
1775   LVP_FROM_HANDLE(lvp_device, device, _device);
1776   LVP_FROM_HANDLE(lvp_device_memory, mem, _memory);
1777   void *map;
1778   if (mem == NULL) {
1779      *ppData = NULL;
1780      return VK_SUCCESS;
1781   }
1782
1783   map = device->pscreen->map_memory(device->pscreen, mem->pmem);
1784
1785   *ppData = (char *)map + offset;
1786   return VK_SUCCESS;
1787}
1788
1789VKAPI_ATTR void VKAPI_CALL lvp_UnmapMemory(
1790   VkDevice                                    _device,
1791   VkDeviceMemory                              _memory)
1792{
1793   LVP_FROM_HANDLE(lvp_device, device, _device);
1794   LVP_FROM_HANDLE(lvp_device_memory, mem, _memory);
1795
1796   if (mem == NULL)
1797      return;
1798
1799   device->pscreen->unmap_memory(device->pscreen, mem->pmem);
1800}
1801
1802VKAPI_ATTR VkResult VKAPI_CALL lvp_FlushMappedMemoryRanges(
1803   VkDevice                                    _device,
1804   uint32_t                                    memoryRangeCount,
1805   const VkMappedMemoryRange*                  pMemoryRanges)
1806{
1807   return VK_SUCCESS;
1808}
1809
1810VKAPI_ATTR VkResult VKAPI_CALL lvp_InvalidateMappedMemoryRanges(
1811   VkDevice                                    _device,
1812   uint32_t                                    memoryRangeCount,
1813   const VkMappedMemoryRange*                  pMemoryRanges)
1814{
1815   return VK_SUCCESS;
1816}
1817
1818VKAPI_ATTR void VKAPI_CALL lvp_GetBufferMemoryRequirements(
1819   VkDevice                                    device,
1820   VkBuffer                                    _buffer,
1821   VkMemoryRequirements*                       pMemoryRequirements)
1822{
1823   LVP_FROM_HANDLE(lvp_buffer, buffer, _buffer);
1824
1825   /* The Vulkan spec (git aaed022) says:
1826    *
1827    *    memoryTypeBits is a bitfield and contains one bit set for every
1828    *    supported memory type for the resource. The bit `1<<i` is set if and
1829    *    only if the memory type `i` in the VkPhysicalDeviceMemoryProperties
1830    *    structure for the physical device is supported.
1831    *
1832    * We support exactly one memory type.
1833    */
1834   pMemoryRequirements->memoryTypeBits = 1;
1835
1836   pMemoryRequirements->size = buffer->total_size;
1837   pMemoryRequirements->alignment = 64;
1838}
1839
1840VKAPI_ATTR void VKAPI_CALL lvp_GetBufferMemoryRequirements2(
1841   VkDevice                                     device,
1842   const VkBufferMemoryRequirementsInfo2       *pInfo,
1843   VkMemoryRequirements2                       *pMemoryRequirements)
1844{
1845   lvp_GetBufferMemoryRequirements(device, pInfo->buffer,
1846                                   &pMemoryRequirements->memoryRequirements);
1847   vk_foreach_struct(ext, pMemoryRequirements->pNext) {
1848      switch (ext->sType) {
1849      case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
1850         VkMemoryDedicatedRequirements *req =
1851            (VkMemoryDedicatedRequirements *) ext;
1852         req->requiresDedicatedAllocation = false;
1853         req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
1854         break;
1855      }
1856      default:
1857         break;
1858      }
1859   }
1860}
1861
1862VKAPI_ATTR void VKAPI_CALL lvp_GetImageMemoryRequirements(
1863   VkDevice                                    device,
1864   VkImage                                     _image,
1865   VkMemoryRequirements*                       pMemoryRequirements)
1866{
1867   LVP_FROM_HANDLE(lvp_image, image, _image);
1868   pMemoryRequirements->memoryTypeBits = 1;
1869
1870   pMemoryRequirements->size = image->size;
1871   pMemoryRequirements->alignment = image->alignment;
1872}
1873
1874VKAPI_ATTR void VKAPI_CALL lvp_GetImageMemoryRequirements2(
1875   VkDevice                                    device,
1876   const VkImageMemoryRequirementsInfo2       *pInfo,
1877   VkMemoryRequirements2                      *pMemoryRequirements)
1878{
1879   lvp_GetImageMemoryRequirements(device, pInfo->image,
1880                                  &pMemoryRequirements->memoryRequirements);
1881
1882   vk_foreach_struct(ext, pMemoryRequirements->pNext) {
1883      switch (ext->sType) {
1884      case VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS: {
1885         VkMemoryDedicatedRequirements *req =
1886            (VkMemoryDedicatedRequirements *) ext;
1887         req->requiresDedicatedAllocation = false;
1888         req->prefersDedicatedAllocation = req->requiresDedicatedAllocation;
1889         break;
1890      }
1891      default:
1892         break;
1893      }
1894   }
1895}
1896
1897VKAPI_ATTR void VKAPI_CALL lvp_GetImageSparseMemoryRequirements(
1898   VkDevice                                    device,
1899   VkImage                                     image,
1900   uint32_t*                                   pSparseMemoryRequirementCount,
1901   VkSparseImageMemoryRequirements*            pSparseMemoryRequirements)
1902{
1903   stub();
1904}
1905
1906VKAPI_ATTR void VKAPI_CALL lvp_GetImageSparseMemoryRequirements2(
1907   VkDevice                                    device,
1908   const VkImageSparseMemoryRequirementsInfo2* pInfo,
1909   uint32_t* pSparseMemoryRequirementCount,
1910   VkSparseImageMemoryRequirements2* pSparseMemoryRequirements)
1911{
1912   stub();
1913}
1914
1915VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceMemoryCommitment(
1916   VkDevice                                    device,
1917   VkDeviceMemory                              memory,
1918   VkDeviceSize*                               pCommittedMemoryInBytes)
1919{
1920   *pCommittedMemoryInBytes = 0;
1921}
1922
1923VKAPI_ATTR VkResult VKAPI_CALL lvp_BindBufferMemory2(VkDevice _device,
1924                               uint32_t bindInfoCount,
1925                               const VkBindBufferMemoryInfo *pBindInfos)
1926{
1927   LVP_FROM_HANDLE(lvp_device, device, _device);
1928   for (uint32_t i = 0; i < bindInfoCount; ++i) {
1929      LVP_FROM_HANDLE(lvp_device_memory, mem, pBindInfos[i].memory);
1930      LVP_FROM_HANDLE(lvp_buffer, buffer, pBindInfos[i].buffer);
1931
1932      buffer->pmem = mem->pmem;
1933      device->pscreen->resource_bind_backing(device->pscreen,
1934                                             buffer->bo,
1935                                             mem->pmem,
1936                                             pBindInfos[i].memoryOffset);
1937   }
1938   return VK_SUCCESS;
1939}
1940
1941VKAPI_ATTR VkResult VKAPI_CALL lvp_BindImageMemory2(VkDevice _device,
1942                              uint32_t bindInfoCount,
1943                              const VkBindImageMemoryInfo *pBindInfos)
1944{
1945   LVP_FROM_HANDLE(lvp_device, device, _device);
1946   for (uint32_t i = 0; i < bindInfoCount; ++i) {
1947      const VkBindImageMemoryInfo *bind_info = &pBindInfos[i];
1948      LVP_FROM_HANDLE(lvp_device_memory, mem, bind_info->memory);
1949      LVP_FROM_HANDLE(lvp_image, image, bind_info->image);
1950      bool did_bind = false;
1951
1952      vk_foreach_struct_const(s, bind_info->pNext) {
1953         switch (s->sType) {
1954         case VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_SWAPCHAIN_INFO_KHR: {
1955            const VkBindImageMemorySwapchainInfoKHR *swapchain_info =
1956               (const VkBindImageMemorySwapchainInfoKHR *) s;
1957            struct lvp_image *swapchain_image =
1958               lvp_swapchain_get_image(swapchain_info->swapchain,
1959                                       swapchain_info->imageIndex);
1960
1961            image->pmem = swapchain_image->pmem;
1962            image->memory_offset = swapchain_image->memory_offset;
1963            device->pscreen->resource_bind_backing(device->pscreen,
1964                                                   image->bo,
1965                                                   image->pmem,
1966                                                   image->memory_offset);
1967            did_bind = true;
1968         }
1969         default:
1970            break;
1971         }
1972      }
1973
1974      if (!did_bind) {
1975         if (!device->pscreen->resource_bind_backing(device->pscreen,
1976                                                     image->bo,
1977                                                     mem->pmem,
1978                                                     bind_info->memoryOffset)) {
1979            /* This is probably caused by the texture being too large, so let's
1980             * report this as the *closest* allowed error-code. It's not ideal,
1981             * but it's unlikely that anyone will care too much.
1982             */
1983            return vk_error(device, VK_ERROR_OUT_OF_DEVICE_MEMORY);
1984         }
1985         image->pmem = mem->pmem;
1986         image->memory_offset = bind_info->memoryOffset;
1987      }
1988   }
1989   return VK_SUCCESS;
1990}
1991
1992#ifdef PIPE_MEMORY_FD
1993
1994VkResult
1995lvp_GetMemoryFdKHR(VkDevice _device, const VkMemoryGetFdInfoKHR *pGetFdInfo, int *pFD)
1996{
1997   LVP_FROM_HANDLE(lvp_device_memory, memory, pGetFdInfo->memory);
1998
1999   assert(pGetFdInfo->sType == VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR);
2000   assert(pGetFdInfo->handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
2001
2002   *pFD = dup(memory->backed_fd);
2003   assert(*pFD >= 0);
2004   return VK_SUCCESS;
2005}
2006
2007VkResult
2008lvp_GetMemoryFdPropertiesKHR(VkDevice _device,
2009                             VkExternalMemoryHandleTypeFlagBits handleType,
2010                             int fd,
2011                             VkMemoryFdPropertiesKHR *pMemoryFdProperties)
2012{
2013   LVP_FROM_HANDLE(lvp_device, device, _device);
2014
2015   assert(pMemoryFdProperties->sType == VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR);
2016
2017   if(handleType == VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT) {
2018      // There is only one memoryType so select this one
2019      pMemoryFdProperties->memoryTypeBits = 1;
2020   }
2021   else
2022      return vk_error(device->instance, VK_ERROR_INVALID_EXTERNAL_HANDLE);
2023   return VK_SUCCESS;
2024}
2025
2026#endif
2027
2028VKAPI_ATTR VkResult VKAPI_CALL lvp_QueueBindSparse(
2029   VkQueue                                     queue,
2030   uint32_t                                    bindInfoCount,
2031   const VkBindSparseInfo*                     pBindInfo,
2032   VkFence                                     fence)
2033{
2034   stub_return(VK_ERROR_INCOMPATIBLE_DRIVER);
2035}
2036
2037
2038VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateFence(
2039   VkDevice                                    _device,
2040   const VkFenceCreateInfo*                    pCreateInfo,
2041   const VkAllocationCallbacks*                pAllocator,
2042   VkFence*                                    pFence)
2043{
2044   LVP_FROM_HANDLE(lvp_device, device, _device);
2045   struct lvp_fence *fence;
2046
2047   fence = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*fence), 8,
2048                     VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2049   if (fence == NULL)
2050      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2051   vk_object_base_init(&device->vk, &fence->base, VK_OBJECT_TYPE_FENCE);
2052   util_queue_fence_init(&fence->fence);
2053   fence->signalled = (pCreateInfo->flags & VK_FENCE_CREATE_SIGNALED_BIT) == VK_FENCE_CREATE_SIGNALED_BIT;
2054
2055   fence->handle = NULL;
2056   fence->timeline = 0;
2057   *pFence = lvp_fence_to_handle(fence);
2058
2059   return VK_SUCCESS;
2060}
2061
2062VKAPI_ATTR void VKAPI_CALL lvp_DestroyFence(
2063   VkDevice                                    _device,
2064   VkFence                                     _fence,
2065   const VkAllocationCallbacks*                pAllocator)
2066{
2067   LVP_FROM_HANDLE(lvp_device, device, _device);
2068   LVP_FROM_HANDLE(lvp_fence, fence, _fence);
2069
2070   if (!_fence)
2071      return;
2072   /* evade annoying destroy assert */
2073   util_queue_fence_init(&fence->fence);
2074   util_queue_fence_destroy(&fence->fence);
2075   if (fence->handle)
2076      device->pscreen->fence_reference(device->pscreen, &fence->handle, NULL);
2077
2078   vk_object_base_finish(&fence->base);
2079   vk_free2(&device->vk.alloc, pAllocator, fence);
2080}
2081
2082VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetFences(
2083   VkDevice                                    _device,
2084   uint32_t                                    fenceCount,
2085   const VkFence*                              pFences)
2086{
2087   LVP_FROM_HANDLE(lvp_device, device, _device);
2088   for (unsigned i = 0; i < fenceCount; i++) {
2089      struct lvp_fence *fence = lvp_fence_from_handle(pFences[i]);
2090      /* ensure u_queue doesn't explode when submitting a completed lvp_fence
2091       * which has not yet signalled its u_queue fence
2092       */
2093      util_queue_fence_wait(&fence->fence);
2094
2095      if (fence->handle) {
2096         simple_mtx_lock(&device->queue.last_lock);
2097         if (fence->handle == device->queue.last_fence)
2098            device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
2099         simple_mtx_unlock(&device->queue.last_lock);
2100         device->pscreen->fence_reference(device->pscreen, &fence->handle, NULL);
2101      }
2102      fence->signalled = false;
2103   }
2104   return VK_SUCCESS;
2105}
2106
2107VKAPI_ATTR VkResult VKAPI_CALL lvp_GetFenceStatus(
2108   VkDevice                                    _device,
2109   VkFence                                     _fence)
2110{
2111   LVP_FROM_HANDLE(lvp_device, device, _device);
2112   LVP_FROM_HANDLE(lvp_fence, fence, _fence);
2113
2114   if (fence->signalled)
2115      return VK_SUCCESS;
2116
2117   if (!util_queue_fence_is_signalled(&fence->fence) ||
2118       !fence->handle ||
2119       !device->pscreen->fence_finish(device->pscreen, NULL, fence->handle, 0))
2120      return VK_NOT_READY;
2121
2122   fence->signalled = true;
2123   simple_mtx_lock(&device->queue.last_lock);
2124   if (fence->handle == device->queue.last_fence) {
2125      device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
2126      device->queue.last_finished = fence->timeline;
2127   }
2128   simple_mtx_unlock(&device->queue.last_lock);
2129   return VK_SUCCESS;
2130}
2131
2132VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateFramebuffer(
2133   VkDevice                                    _device,
2134   const VkFramebufferCreateInfo*              pCreateInfo,
2135   const VkAllocationCallbacks*                pAllocator,
2136   VkFramebuffer*                              pFramebuffer)
2137{
2138   LVP_FROM_HANDLE(lvp_device, device, _device);
2139   struct lvp_framebuffer *framebuffer;
2140   const VkFramebufferAttachmentsCreateInfo *imageless_create_info =
2141      vk_find_struct_const(pCreateInfo->pNext,
2142                           FRAMEBUFFER_ATTACHMENTS_CREATE_INFO);
2143
2144   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO);
2145
2146   size_t size = sizeof(*framebuffer);
2147
2148   if (!imageless_create_info)
2149      size += sizeof(struct lvp_image_view *) * pCreateInfo->attachmentCount;
2150   framebuffer = vk_alloc2(&device->vk.alloc, pAllocator, size, 8,
2151                           VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2152   if (framebuffer == NULL)
2153      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2154
2155   vk_object_base_init(&device->vk, &framebuffer->base,
2156                       VK_OBJECT_TYPE_FRAMEBUFFER);
2157
2158   if (!imageless_create_info) {
2159      framebuffer->attachment_count = pCreateInfo->attachmentCount;
2160      for (uint32_t i = 0; i < pCreateInfo->attachmentCount; i++) {
2161         VkImageView _iview = pCreateInfo->pAttachments[i];
2162         framebuffer->attachments[i] = lvp_image_view_from_handle(_iview);
2163      }
2164   }
2165
2166   framebuffer->width = pCreateInfo->width;
2167   framebuffer->height = pCreateInfo->height;
2168   framebuffer->layers = pCreateInfo->layers;
2169   framebuffer->imageless = !!imageless_create_info;
2170
2171   *pFramebuffer = lvp_framebuffer_to_handle(framebuffer);
2172
2173   return VK_SUCCESS;
2174}
2175
2176VKAPI_ATTR void VKAPI_CALL lvp_DestroyFramebuffer(
2177   VkDevice                                    _device,
2178   VkFramebuffer                               _fb,
2179   const VkAllocationCallbacks*                pAllocator)
2180{
2181   LVP_FROM_HANDLE(lvp_device, device, _device);
2182   LVP_FROM_HANDLE(lvp_framebuffer, fb, _fb);
2183
2184   if (!fb)
2185      return;
2186   vk_object_base_finish(&fb->base);
2187   vk_free2(&device->vk.alloc, pAllocator, fb);
2188}
2189
2190VKAPI_ATTR VkResult VKAPI_CALL lvp_WaitForFences(
2191   VkDevice                                    _device,
2192   uint32_t                                    fenceCount,
2193   const VkFence*                              pFences,
2194   VkBool32                                    waitAll,
2195   uint64_t                                    timeout)
2196{
2197   LVP_FROM_HANDLE(lvp_device, device, _device);
2198   struct lvp_fence *fence = NULL;
2199
2200   /* lavapipe is completely synchronous, so only one fence needs to be waited on */
2201   if (waitAll) {
2202      /* find highest timeline id */
2203      for (unsigned i = 0; i < fenceCount; i++) {
2204         struct lvp_fence *f = lvp_fence_from_handle(pFences[i]);
2205
2206         /* this is an unsubmitted fence: immediately bail out */
2207         if (!f->timeline && !f->signalled)
2208            return VK_TIMEOUT;
2209         if (!fence || f->timeline > fence->timeline)
2210            fence = f;
2211      }
2212   } else {
2213      /* find lowest timeline id */
2214      for (unsigned i = 0; i < fenceCount; i++) {
2215         struct lvp_fence *f = lvp_fence_from_handle(pFences[i]);
2216         if (f->signalled)
2217            return VK_SUCCESS;
2218         if (f->timeline && (!fence || f->timeline < fence->timeline))
2219            fence = f;
2220      }
2221   }
2222   if (!fence)
2223      return VK_TIMEOUT;
2224   if (fence->signalled)
2225      return VK_SUCCESS;
2226
2227   if (!util_queue_fence_is_signalled(&fence->fence)) {
2228      int64_t abs_timeout = os_time_get_absolute_timeout(timeout);
2229      if (!util_queue_fence_wait_timeout(&fence->fence, abs_timeout))
2230         return VK_TIMEOUT;
2231
2232      int64_t time_ns = os_time_get_nano();
2233      timeout = abs_timeout > time_ns ? abs_timeout - time_ns : 0;
2234   }
2235
2236   if (!fence->handle ||
2237       !device->pscreen->fence_finish(device->pscreen, NULL, fence->handle, timeout))
2238      return VK_TIMEOUT;
2239   simple_mtx_lock(&device->queue.last_lock);
2240   if (fence->handle == device->queue.last_fence) {
2241      device->pscreen->fence_reference(device->pscreen, &device->queue.last_fence, NULL);
2242      device->queue.last_finished = fence->timeline;
2243   }
2244   simple_mtx_unlock(&device->queue.last_lock);
2245   fence->signalled = true;
2246   return VK_SUCCESS;
2247}
2248
2249VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateSemaphore(
2250   VkDevice                                    _device,
2251   const VkSemaphoreCreateInfo*                pCreateInfo,
2252   const VkAllocationCallbacks*                pAllocator,
2253   VkSemaphore*                                pSemaphore)
2254{
2255   LVP_FROM_HANDLE(lvp_device, device, _device);
2256
2257   struct lvp_semaphore *sema = vk_alloc2(&device->vk.alloc, pAllocator,
2258                                          sizeof(*sema), 8,
2259                                          VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2260
2261   if (!sema)
2262      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2263   vk_object_base_init(&device->vk, &sema->base,
2264                       VK_OBJECT_TYPE_SEMAPHORE);
2265
2266   const VkSemaphoreTypeCreateInfo *info = vk_find_struct_const(pCreateInfo->pNext, SEMAPHORE_TYPE_CREATE_INFO);
2267   sema->is_timeline = info && info->semaphoreType == VK_SEMAPHORE_TYPE_TIMELINE;
2268   if (sema->is_timeline) {
2269      sema->is_timeline = true;
2270      sema->timeline = NULL;
2271      sema->current = info->initialValue;
2272      sema->mem = ralloc_context(NULL);
2273      util_dynarray_init(&sema->links, sema->mem);
2274      simple_mtx_init(&sema->lock, mtx_plain);
2275      mtx_init(&sema->submit_lock, mtx_plain);
2276      cnd_init(&sema->submit);
2277   }
2278
2279   *pSemaphore = lvp_semaphore_to_handle(sema);
2280
2281   return VK_SUCCESS;
2282}
2283
2284VKAPI_ATTR void VKAPI_CALL lvp_DestroySemaphore(
2285   VkDevice                                    _device,
2286   VkSemaphore                                 _semaphore,
2287   const VkAllocationCallbacks*                pAllocator)
2288{
2289   LVP_FROM_HANDLE(lvp_device, device, _device);
2290   LVP_FROM_HANDLE(lvp_semaphore, sema, _semaphore);
2291
2292   if (!_semaphore)
2293      return;
2294   if (sema->is_timeline) {
2295      ralloc_free(sema->mem);
2296      simple_mtx_destroy(&sema->lock);
2297      mtx_destroy(&sema->submit_lock);
2298      cnd_destroy(&sema->submit);
2299   }
2300   vk_object_base_finish(&sema->base);
2301   vk_free2(&device->vk.alloc, pAllocator, sema);
2302}
2303
2304VKAPI_ATTR VkResult VKAPI_CALL lvp_WaitSemaphores(
2305    VkDevice                                    _device,
2306    const VkSemaphoreWaitInfo*                  pWaitInfo,
2307    uint64_t                                    timeout)
2308{
2309   LVP_FROM_HANDLE(lvp_device, device, _device);
2310   /* same mechanism as used by queue submit */
2311   return wait_semaphores(device, pWaitInfo, timeout);
2312}
2313
2314VKAPI_ATTR VkResult VKAPI_CALL lvp_GetSemaphoreCounterValue(
2315    VkDevice                                    _device,
2316    VkSemaphore                                 _semaphore,
2317    uint64_t*                                   pValue)
2318{
2319   LVP_FROM_HANDLE(lvp_device, device, _device);
2320   LVP_FROM_HANDLE(lvp_semaphore, sema, _semaphore);
2321   simple_mtx_lock(&sema->lock);
2322   prune_semaphore_links(device, sema, device->queue.last_finished);
2323   *pValue = sema->current;
2324   simple_mtx_unlock(&sema->lock);
2325   return VK_SUCCESS;
2326}
2327
2328VKAPI_ATTR VkResult VKAPI_CALL lvp_SignalSemaphore(
2329    VkDevice                                    _device,
2330    const VkSemaphoreSignalInfo*                pSignalInfo)
2331{
2332   LVP_FROM_HANDLE(lvp_device, device, _device);
2333   LVP_FROM_HANDLE(lvp_semaphore, sema, pSignalInfo->semaphore);
2334
2335   /* try to remain monotonic */
2336   if (sema->current < pSignalInfo->value)
2337      sema->current = pSignalInfo->value;
2338   cnd_broadcast(&sema->submit);
2339   simple_mtx_lock(&sema->lock);
2340   prune_semaphore_links(device, sema, device->queue.last_finished);
2341   simple_mtx_unlock(&sema->lock);
2342   return VK_SUCCESS;
2343}
2344
2345VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateEvent(
2346   VkDevice                                    _device,
2347   const VkEventCreateInfo*                    pCreateInfo,
2348   const VkAllocationCallbacks*                pAllocator,
2349   VkEvent*                                    pEvent)
2350{
2351   LVP_FROM_HANDLE(lvp_device, device, _device);
2352   struct lvp_event *event = vk_alloc2(&device->vk.alloc, pAllocator,
2353                                       sizeof(*event), 8,
2354                                       VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2355
2356   if (!event)
2357      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2358
2359   vk_object_base_init(&device->vk, &event->base, VK_OBJECT_TYPE_EVENT);
2360   *pEvent = lvp_event_to_handle(event);
2361   event->event_storage = 0;
2362
2363   return VK_SUCCESS;
2364}
2365
2366VKAPI_ATTR void VKAPI_CALL lvp_DestroyEvent(
2367   VkDevice                                    _device,
2368   VkEvent                                     _event,
2369   const VkAllocationCallbacks*                pAllocator)
2370{
2371   LVP_FROM_HANDLE(lvp_device, device, _device);
2372   LVP_FROM_HANDLE(lvp_event, event, _event);
2373
2374   if (!event)
2375      return;
2376
2377   vk_object_base_finish(&event->base);
2378   vk_free2(&device->vk.alloc, pAllocator, event);
2379}
2380
2381VKAPI_ATTR VkResult VKAPI_CALL lvp_GetEventStatus(
2382   VkDevice                                    _device,
2383   VkEvent                                     _event)
2384{
2385   LVP_FROM_HANDLE(lvp_event, event, _event);
2386   if (event->event_storage == 1)
2387      return VK_EVENT_SET;
2388   return VK_EVENT_RESET;
2389}
2390
2391VKAPI_ATTR VkResult VKAPI_CALL lvp_SetEvent(
2392   VkDevice                                    _device,
2393   VkEvent                                     _event)
2394{
2395   LVP_FROM_HANDLE(lvp_event, event, _event);
2396   event->event_storage = 1;
2397
2398   return VK_SUCCESS;
2399}
2400
2401VKAPI_ATTR VkResult VKAPI_CALL lvp_ResetEvent(
2402   VkDevice                                    _device,
2403   VkEvent                                     _event)
2404{
2405   LVP_FROM_HANDLE(lvp_event, event, _event);
2406   event->event_storage = 0;
2407
2408   return VK_SUCCESS;
2409}
2410
2411VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateSampler(
2412   VkDevice                                    _device,
2413   const VkSamplerCreateInfo*                  pCreateInfo,
2414   const VkAllocationCallbacks*                pAllocator,
2415   VkSampler*                                  pSampler)
2416{
2417   LVP_FROM_HANDLE(lvp_device, device, _device);
2418   struct lvp_sampler *sampler;
2419   const VkSamplerReductionModeCreateInfo *reduction_mode_create_info =
2420      vk_find_struct_const(pCreateInfo->pNext,
2421                           SAMPLER_REDUCTION_MODE_CREATE_INFO);
2422   const VkSamplerCustomBorderColorCreateInfoEXT *custom_border_color_create_info =
2423      vk_find_struct_const(pCreateInfo->pNext,
2424                           SAMPLER_CUSTOM_BORDER_COLOR_CREATE_INFO_EXT);
2425
2426   assert(pCreateInfo->sType == VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO);
2427
2428   sampler = vk_alloc2(&device->vk.alloc, pAllocator, sizeof(*sampler), 8,
2429                       VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2430   if (!sampler)
2431      return vk_error(device, VK_ERROR_OUT_OF_HOST_MEMORY);
2432
2433   vk_object_base_init(&device->vk, &sampler->base,
2434                       VK_OBJECT_TYPE_SAMPLER);
2435   sampler->create_info = *pCreateInfo;
2436
2437   switch (pCreateInfo->borderColor) {
2438   case VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK:
2439   case VK_BORDER_COLOR_INT_TRANSPARENT_BLACK:
2440   default:
2441      memset(&sampler->border_color, 0, sizeof(union pipe_color_union));
2442      break;
2443   case VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK:
2444      sampler->border_color.f[0] = sampler->border_color.f[1] =
2445      sampler->border_color.f[2] = 0.0f;
2446      sampler->border_color.f[3] = 1.0f;
2447      break;
2448   case VK_BORDER_COLOR_INT_OPAQUE_BLACK:
2449      sampler->border_color.i[0] = sampler->border_color.i[1] =
2450      sampler->border_color.i[2] = 0;
2451      sampler->border_color.i[3] = 1;
2452      break;
2453   case VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE:
2454      sampler->border_color.f[0] = sampler->border_color.f[1] =
2455      sampler->border_color.f[2] = 1.0f;
2456      sampler->border_color.f[3] = 1.0f;
2457      break;
2458   case VK_BORDER_COLOR_INT_OPAQUE_WHITE:
2459      sampler->border_color.i[0] = sampler->border_color.i[1] =
2460      sampler->border_color.i[2] = 1;
2461      sampler->border_color.i[3] = 1;
2462      break;
2463   case VK_BORDER_COLOR_FLOAT_CUSTOM_EXT:
2464   case VK_BORDER_COLOR_INT_CUSTOM_EXT:
2465      assert(custom_border_color_create_info != NULL);
2466      memcpy(&sampler->border_color,
2467             &custom_border_color_create_info->customBorderColor,
2468             sizeof(union pipe_color_union));
2469      break;
2470   }
2471
2472   sampler->reduction_mode = VK_SAMPLER_REDUCTION_MODE_WEIGHTED_AVERAGE;
2473   if (reduction_mode_create_info)
2474      sampler->reduction_mode = reduction_mode_create_info->reductionMode;
2475
2476   *pSampler = lvp_sampler_to_handle(sampler);
2477
2478   return VK_SUCCESS;
2479}
2480
2481VKAPI_ATTR void VKAPI_CALL lvp_DestroySampler(
2482   VkDevice                                    _device,
2483   VkSampler                                   _sampler,
2484   const VkAllocationCallbacks*                pAllocator)
2485{
2486   LVP_FROM_HANDLE(lvp_device, device, _device);
2487   LVP_FROM_HANDLE(lvp_sampler, sampler, _sampler);
2488
2489   if (!_sampler)
2490      return;
2491   vk_object_base_finish(&sampler->base);
2492   vk_free2(&device->vk.alloc, pAllocator, sampler);
2493}
2494
2495VKAPI_ATTR VkResult VKAPI_CALL lvp_CreateSamplerYcbcrConversionKHR(
2496    VkDevice                                    device,
2497    const VkSamplerYcbcrConversionCreateInfo*   pCreateInfo,
2498    const VkAllocationCallbacks*                pAllocator,
2499    VkSamplerYcbcrConversion*                   pYcbcrConversion)
2500{
2501   return VK_ERROR_OUT_OF_HOST_MEMORY;
2502}
2503
2504VKAPI_ATTR void VKAPI_CALL lvp_DestroySamplerYcbcrConversionKHR(
2505    VkDevice                                    device,
2506    VkSamplerYcbcrConversion                    ycbcrConversion,
2507    const VkAllocationCallbacks*                pAllocator)
2508{
2509}
2510
2511/* vk_icd.h does not declare this function, so we declare it here to
2512 * suppress Wmissing-prototypes.
2513 */
2514PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2515vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion);
2516
2517PUBLIC VKAPI_ATTR VkResult VKAPI_CALL
2518vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pSupportedVersion)
2519{
2520   /* For the full details on loader interface versioning, see
2521    * <https://github.com/KhronosGroup/Vulkan-LoaderAndValidationLayers/blob/master/loader/LoaderAndLayerInterface.md>.
2522    * What follows is a condensed summary, to help you navigate the large and
2523    * confusing official doc.
2524    *
2525    *   - Loader interface v0 is incompatible with later versions. We don't
2526    *     support it.
2527    *
2528    *   - In loader interface v1:
2529    *       - The first ICD entrypoint called by the loader is
2530    *         vk_icdGetInstanceProcAddr(). The ICD must statically expose this
2531    *         entrypoint.
2532    *       - The ICD must statically expose no other Vulkan symbol unless it is
2533    *         linked with -Bsymbolic.
2534    *       - Each dispatchable Vulkan handle created by the ICD must be
2535    *         a pointer to a struct whose first member is VK_LOADER_DATA. The
2536    *         ICD must initialize VK_LOADER_DATA.loadMagic to ICD_LOADER_MAGIC.
2537    *       - The loader implements vkCreate{PLATFORM}SurfaceKHR() and
2538    *         vkDestroySurfaceKHR(). The ICD must be capable of working with
2539    *         such loader-managed surfaces.
2540    *
2541    *    - Loader interface v2 differs from v1 in:
2542    *       - The first ICD entrypoint called by the loader is
2543    *         vk_icdNegotiateLoaderICDInterfaceVersion(). The ICD must
2544    *         statically expose this entrypoint.
2545    *
2546    *    - Loader interface v3 differs from v2 in:
2547    *        - The ICD must implement vkCreate{PLATFORM}SurfaceKHR(),
2548    *          vkDestroySurfaceKHR(), and other API which uses VKSurfaceKHR,
2549    *          because the loader no longer does so.
2550    *
2551    *    - Loader interface v4 differs from v3 in:
2552    *        - The ICD must implement vk_icdGetPhysicalDeviceProcAddr().
2553    */
2554   *pSupportedVersion = MIN2(*pSupportedVersion, 4u);
2555   return VK_SUCCESS;
2556}
2557
2558VKAPI_ATTR VkResult VKAPI_CALL lvp_CreatePrivateDataSlotEXT(
2559   VkDevice                                    _device,
2560   const VkPrivateDataSlotCreateInfoEXT*       pCreateInfo,
2561   const VkAllocationCallbacks*                pAllocator,
2562   VkPrivateDataSlotEXT*                       pPrivateDataSlot)
2563{
2564   LVP_FROM_HANDLE(lvp_device, device, _device);
2565   return vk_private_data_slot_create(&device->vk, pCreateInfo, pAllocator,
2566                                      pPrivateDataSlot);
2567}
2568
2569VKAPI_ATTR void VKAPI_CALL lvp_DestroyPrivateDataSlotEXT(
2570   VkDevice                                    _device,
2571   VkPrivateDataSlotEXT                        privateDataSlot,
2572   const VkAllocationCallbacks*                pAllocator)
2573{
2574   LVP_FROM_HANDLE(lvp_device, device, _device);
2575   vk_private_data_slot_destroy(&device->vk, privateDataSlot, pAllocator);
2576}
2577
2578VKAPI_ATTR VkResult VKAPI_CALL lvp_SetPrivateDataEXT(
2579   VkDevice                                    _device,
2580   VkObjectType                                objectType,
2581   uint64_t                                    objectHandle,
2582   VkPrivateDataSlotEXT                        privateDataSlot,
2583   uint64_t                                    data)
2584{
2585   LVP_FROM_HANDLE(lvp_device, device, _device);
2586   return vk_object_base_set_private_data(&device->vk, objectType,
2587                                          objectHandle, privateDataSlot,
2588                                          data);
2589}
2590
2591VKAPI_ATTR void VKAPI_CALL lvp_GetPrivateDataEXT(
2592   VkDevice                                    _device,
2593   VkObjectType                                objectType,
2594   uint64_t                                    objectHandle,
2595   VkPrivateDataSlotEXT                        privateDataSlot,
2596   uint64_t*                                   pData)
2597{
2598   LVP_FROM_HANDLE(lvp_device, device, _device);
2599   vk_object_base_get_private_data(&device->vk, objectType, objectHandle,
2600                                   privateDataSlot, pData);
2601}
2602
2603VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceExternalFenceProperties(
2604   VkPhysicalDevice                           physicalDevice,
2605   const VkPhysicalDeviceExternalFenceInfo    *pExternalFenceInfo,
2606   VkExternalFenceProperties                  *pExternalFenceProperties)
2607{
2608   pExternalFenceProperties->exportFromImportedHandleTypes = 0;
2609   pExternalFenceProperties->compatibleHandleTypes = 0;
2610   pExternalFenceProperties->externalFenceFeatures = 0;
2611}
2612
2613VKAPI_ATTR void VKAPI_CALL lvp_GetPhysicalDeviceExternalSemaphoreProperties(
2614   VkPhysicalDevice                            physicalDevice,
2615   const VkPhysicalDeviceExternalSemaphoreInfo *pExternalSemaphoreInfo,
2616   VkExternalSemaphoreProperties               *pExternalSemaphoreProperties)
2617{
2618   pExternalSemaphoreProperties->exportFromImportedHandleTypes = 0;
2619   pExternalSemaphoreProperties->compatibleHandleTypes = 0;
2620   pExternalSemaphoreProperties->externalSemaphoreFeatures = 0;
2621}
2622
2623static const VkTimeDomainEXT lvp_time_domains[] = {
2624        VK_TIME_DOMAIN_DEVICE_EXT,
2625        VK_TIME_DOMAIN_CLOCK_MONOTONIC_EXT,
2626};
2627
2628VKAPI_ATTR VkResult VKAPI_CALL lvp_GetPhysicalDeviceCalibrateableTimeDomainsEXT(
2629   VkPhysicalDevice physicalDevice,
2630   uint32_t *pTimeDomainCount,
2631   VkTimeDomainEXT *pTimeDomains)
2632{
2633   int d;
2634   VK_OUTARRAY_MAKE_TYPED(VkTimeDomainEXT, out, pTimeDomains,
2635                          pTimeDomainCount);
2636
2637   for (d = 0; d < ARRAY_SIZE(lvp_time_domains); d++) {
2638      vk_outarray_append_typed(VkTimeDomainEXT, &out, i) {
2639         *i = lvp_time_domains[d];
2640      }
2641    }
2642
2643    return vk_outarray_status(&out);
2644}
2645
2646VKAPI_ATTR VkResult VKAPI_CALL lvp_GetCalibratedTimestampsEXT(
2647   VkDevice device,
2648   uint32_t timestampCount,
2649   const VkCalibratedTimestampInfoEXT *pTimestampInfos,
2650   uint64_t *pTimestamps,
2651   uint64_t *pMaxDeviation)
2652{
2653   *pMaxDeviation = 1;
2654
2655   uint64_t now = os_time_get_nano();
2656   for (unsigned i = 0; i < timestampCount; i++) {
2657      pTimestamps[i] = now;
2658   }
2659   return VK_SUCCESS;
2660}
2661
2662VKAPI_ATTR void VKAPI_CALL lvp_GetDeviceGroupPeerMemoryFeaturesKHR(
2663    VkDevice device,
2664    uint32_t heapIndex,
2665    uint32_t localDeviceIndex,
2666    uint32_t remoteDeviceIndex,
2667    VkPeerMemoryFeatureFlags *pPeerMemoryFeatures)
2668{
2669   *pPeerMemoryFeatures = 0;
2670}
2671