1/* 2 * Copyright © 2017, Google Inc. 3 * 4 * Permission is hereby granted, free of charge, to any person obtaining a 5 * copy of this software and associated documentation files (the "Software"), 6 * to deal in the Software without restriction, including without limitation 7 * the rights to use, copy, modify, merge, publish, distribute, sublicense, 8 * and/or sell copies of the Software, and to permit persons to whom the 9 * Software is furnished to do so, subject to the following conditions: 10 * 11 * The above copyright notice and this permission notice (including the next 12 * paragraph) shall be included in all copies or substantial portions of the 13 * Software. 14 * 15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 21 * IN THE SOFTWARE. 22 */ 23 24#include <hardware/gralloc.h> 25 26#if ANDROID_API_LEVEL >= 26 27#include <hardware/gralloc1.h> 28#endif 29 30#include <hardware/hardware.h> 31#include <hardware/hwvulkan.h> 32#include <vulkan/vk_android_native_buffer.h> 33#include <vulkan/vk_icd.h> 34#include <sync/sync.h> 35 36#include "anv_private.h" 37#include "vk_common_entrypoints.h" 38#include "vk_util.h" 39 40static int anv_hal_open(const struct hw_module_t* mod, const char* id, struct hw_device_t** dev); 41static int anv_hal_close(struct hw_device_t *dev); 42 43static void UNUSED 44static_asserts(void) 45{ 46 STATIC_ASSERT(HWVULKAN_DISPATCH_MAGIC == ICD_LOADER_MAGIC); 47} 48 49PUBLIC struct hwvulkan_module_t HAL_MODULE_INFO_SYM = { 50 .common = { 51 .tag = HARDWARE_MODULE_TAG, 52 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1, 53 .hal_api_version = HARDWARE_MAKE_API_VERSION(1, 0), 54 .id = HWVULKAN_HARDWARE_MODULE_ID, 55 .name = "Intel Vulkan HAL", 56 .author = "Intel", 57 .methods = &(hw_module_methods_t) { 58 .open = anv_hal_open, 59 }, 60 }, 61}; 62 63/* If any bits in test_mask are set, then unset them and return true. */ 64static inline bool 65unmask32(uint32_t *inout_mask, uint32_t test_mask) 66{ 67 uint32_t orig_mask = *inout_mask; 68 *inout_mask &= ~test_mask; 69 return *inout_mask != orig_mask; 70} 71 72static int 73anv_hal_open(const struct hw_module_t* mod, const char* id, 74 struct hw_device_t** dev) 75{ 76 assert(mod == &HAL_MODULE_INFO_SYM.common); 77 assert(strcmp(id, HWVULKAN_DEVICE_0) == 0); 78 79 hwvulkan_device_t *hal_dev = malloc(sizeof(*hal_dev)); 80 if (!hal_dev) 81 return -1; 82 83 *hal_dev = (hwvulkan_device_t) { 84 .common = { 85 .tag = HARDWARE_DEVICE_TAG, 86 .version = HWVULKAN_DEVICE_API_VERSION_0_1, 87 .module = &HAL_MODULE_INFO_SYM.common, 88 .close = anv_hal_close, 89 }, 90 .EnumerateInstanceExtensionProperties = anv_EnumerateInstanceExtensionProperties, 91 .CreateInstance = anv_CreateInstance, 92 .GetInstanceProcAddr = anv_GetInstanceProcAddr, 93 }; 94 95 *dev = &hal_dev->common; 96 return 0; 97} 98 99static int 100anv_hal_close(struct hw_device_t *dev) 101{ 102 /* hwvulkan.h claims that hw_device_t::close() is never called. */ 103 return -1; 104} 105 106#if ANDROID_API_LEVEL >= 26 107#include <vndk/hardware_buffer.h> 108/* See i915_private_android_types.h in minigbm. */ 109#define HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL 0x100 110 111enum { 112 /* Usage bit equal to GRALLOC_USAGE_HW_CAMERA_MASK */ 113 BUFFER_USAGE_CAMERA_MASK = 0x00060000U, 114}; 115 116inline VkFormat 117vk_format_from_android(unsigned android_format, unsigned android_usage) 118{ 119 switch (android_format) { 120 case AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM: 121 return VK_FORMAT_R8G8B8A8_UNORM; 122 case AHARDWAREBUFFER_FORMAT_R8G8B8X8_UNORM: 123 case AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM: 124 return VK_FORMAT_R8G8B8_UNORM; 125 case AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM: 126 return VK_FORMAT_R5G6B5_UNORM_PACK16; 127 case AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT: 128 return VK_FORMAT_R16G16B16A16_SFLOAT; 129 case AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM: 130 return VK_FORMAT_A2B10G10R10_UNORM_PACK32; 131 case AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420: 132 case HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL: 133 return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM; 134 case AHARDWAREBUFFER_FORMAT_IMPLEMENTATION_DEFINED: 135 if (android_usage & BUFFER_USAGE_CAMERA_MASK) 136 return VK_FORMAT_G8_B8R8_2PLANE_420_UNORM; 137 else 138 return VK_FORMAT_R8G8B8_UNORM; 139 case AHARDWAREBUFFER_FORMAT_BLOB: 140 default: 141 return VK_FORMAT_UNDEFINED; 142 } 143} 144 145static inline unsigned 146android_format_from_vk(unsigned vk_format) 147{ 148 switch (vk_format) { 149 case VK_FORMAT_R8G8B8A8_UNORM: 150 return AHARDWAREBUFFER_FORMAT_R8G8B8A8_UNORM; 151 case VK_FORMAT_R8G8B8_UNORM: 152 return AHARDWAREBUFFER_FORMAT_R8G8B8_UNORM; 153 case VK_FORMAT_R5G6B5_UNORM_PACK16: 154 return AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM; 155 case VK_FORMAT_R16G16B16A16_SFLOAT: 156 return AHARDWAREBUFFER_FORMAT_R16G16B16A16_FLOAT; 157 case VK_FORMAT_A2B10G10R10_UNORM_PACK32: 158 return AHARDWAREBUFFER_FORMAT_R10G10B10A2_UNORM; 159 case VK_FORMAT_G8_B8R8_2PLANE_420_UNORM: 160#ifdef HAVE_CROS_GRALLOC 161 return AHARDWAREBUFFER_FORMAT_Y8Cb8Cr8_420; 162#else 163 return HAL_PIXEL_FORMAT_NV12_Y_TILED_INTEL; 164#endif 165 default: 166 return AHARDWAREBUFFER_FORMAT_BLOB; 167 } 168} 169 170static VkFormatFeatureFlags 171features2_to_features(VkFormatFeatureFlags2KHR features2) 172{ 173 return features2 & VK_ALL_FORMAT_FEATURE_FLAG_BITS; 174} 175 176static VkResult 177get_ahw_buffer_format_properties2( 178 VkDevice device_h, 179 const struct AHardwareBuffer *buffer, 180 VkAndroidHardwareBufferFormatProperties2ANDROID *pProperties) 181{ 182 ANV_FROM_HANDLE(anv_device, device, device_h); 183 184 /* Get a description of buffer contents . */ 185 AHardwareBuffer_Desc desc; 186 AHardwareBuffer_describe(buffer, &desc); 187 188 /* Verify description. */ 189 uint64_t gpu_usage = 190 AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE | 191 AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT | 192 AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER; 193 194 /* "Buffer must be a valid Android hardware buffer object with at least 195 * one of the AHARDWAREBUFFER_USAGE_GPU_* usage flags." 196 */ 197 if (!(desc.usage & (gpu_usage))) 198 return VK_ERROR_INVALID_EXTERNAL_HANDLE; 199 200 /* Fill properties fields based on description. */ 201 VkAndroidHardwareBufferFormatProperties2ANDROID *p = pProperties; 202 203 p->format = vk_format_from_android(desc.format, desc.usage); 204 205 const struct anv_format *anv_format = anv_get_format(p->format); 206 p->externalFormat = (uint64_t) (uintptr_t) anv_format; 207 208 /* Default to OPTIMAL tiling but set to linear in case 209 * of AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER usage. 210 */ 211 VkImageTiling tiling = VK_IMAGE_TILING_OPTIMAL; 212 213 if (desc.usage & AHARDWAREBUFFER_USAGE_GPU_DATA_BUFFER) 214 tiling = VK_IMAGE_TILING_LINEAR; 215 216 p->formatFeatures = 217 anv_get_image_format_features2(&device->info, p->format, anv_format, 218 tiling, NULL); 219 220 /* "Images can be created with an external format even if the Android hardware 221 * buffer has a format which has an equivalent Vulkan format to enable 222 * consistent handling of images from sources that might use either category 223 * of format. However, all images created with an external format are subject 224 * to the valid usage requirements associated with external formats, even if 225 * the Android hardware buffer’s format has a Vulkan equivalent." 226 * 227 * "The formatFeatures member *must* include 228 * VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT and at least one of 229 * VK_FORMAT_FEATURE_MIDPOINT_CHROMA_SAMPLES_BIT or 230 * VK_FORMAT_FEATURE_COSITED_CHROMA_SAMPLES_BIT" 231 */ 232 p->formatFeatures |= 233 VK_FORMAT_FEATURE_2_MIDPOINT_CHROMA_SAMPLES_BIT_KHR; 234 235 /* "Implementations may not always be able to determine the color model, 236 * numerical range, or chroma offsets of the image contents, so the values 237 * in VkAndroidHardwareBufferFormatPropertiesANDROID are only suggestions. 238 * Applications should treat these values as sensible defaults to use in 239 * the absence of more reliable information obtained through some other 240 * means." 241 */ 242 p->samplerYcbcrConversionComponents.r = VK_COMPONENT_SWIZZLE_IDENTITY; 243 p->samplerYcbcrConversionComponents.g = VK_COMPONENT_SWIZZLE_IDENTITY; 244 p->samplerYcbcrConversionComponents.b = VK_COMPONENT_SWIZZLE_IDENTITY; 245 p->samplerYcbcrConversionComponents.a = VK_COMPONENT_SWIZZLE_IDENTITY; 246 247 p->suggestedYcbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601; 248 p->suggestedYcbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_FULL; 249 250 p->suggestedXChromaOffset = VK_CHROMA_LOCATION_MIDPOINT; 251 p->suggestedYChromaOffset = VK_CHROMA_LOCATION_MIDPOINT; 252 253 return VK_SUCCESS; 254} 255 256VkResult 257anv_GetAndroidHardwareBufferPropertiesANDROID( 258 VkDevice device_h, 259 const struct AHardwareBuffer *buffer, 260 VkAndroidHardwareBufferPropertiesANDROID *pProperties) 261{ 262 ANV_FROM_HANDLE(anv_device, dev, device_h); 263 264 VkAndroidHardwareBufferFormatPropertiesANDROID *format_prop = 265 vk_find_struct(pProperties->pNext, 266 ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_ANDROID); 267 /* Fill format properties of an Android hardware buffer. */ 268 if (format_prop) { 269 VkAndroidHardwareBufferFormatProperties2ANDROID format_prop2 = { 270 .sType = VK_STRUCTURE_TYPE_ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID, 271 }; 272 get_ahw_buffer_format_properties2(device_h, buffer, &format_prop2); 273 274 format_prop->format = format_prop2.format; 275 format_prop->externalFormat = format_prop2.externalFormat; 276 format_prop->formatFeatures = 277 features2_to_features(format_prop2.formatFeatures); 278 format_prop->samplerYcbcrConversionComponents = 279 format_prop2.samplerYcbcrConversionComponents; 280 format_prop->suggestedYcbcrModel = format_prop2.suggestedYcbcrModel; 281 format_prop->suggestedYcbcrRange = format_prop2.suggestedYcbcrRange; 282 format_prop->suggestedXChromaOffset = format_prop2.suggestedXChromaOffset; 283 format_prop->suggestedYChromaOffset = format_prop2.suggestedYChromaOffset; 284 } 285 286 VkAndroidHardwareBufferFormatProperties2ANDROID *format_prop2 = 287 vk_find_struct(pProperties->pNext, 288 ANDROID_HARDWARE_BUFFER_FORMAT_PROPERTIES_2_ANDROID); 289 if (format_prop2) 290 get_ahw_buffer_format_properties2(device_h, buffer, format_prop2); 291 292 /* NOTE - We support buffers with only one handle but do not error on 293 * multiple handle case. Reason is that we want to support YUV formats 294 * where we have many logical planes but they all point to the same 295 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM. 296 */ 297 const native_handle_t *handle = 298 AHardwareBuffer_getNativeHandle(buffer); 299 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1; 300 if (dma_buf < 0) 301 return VK_ERROR_INVALID_EXTERNAL_HANDLE; 302 303 /* All memory types. */ 304 uint32_t memory_types = (1ull << dev->physical->memory.type_count) - 1; 305 306 pProperties->allocationSize = lseek(dma_buf, 0, SEEK_END); 307 pProperties->memoryTypeBits = memory_types; 308 309 return VK_SUCCESS; 310} 311 312VkResult 313anv_GetMemoryAndroidHardwareBufferANDROID( 314 VkDevice device_h, 315 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo, 316 struct AHardwareBuffer **pBuffer) 317{ 318 ANV_FROM_HANDLE(anv_device_memory, mem, pInfo->memory); 319 320 /* Some quotes from Vulkan spec: 321 * 322 * "If the device memory was created by importing an Android hardware 323 * buffer, vkGetMemoryAndroidHardwareBufferANDROID must return that same 324 * Android hardware buffer object." 325 * 326 * "VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID must 327 * have been included in VkExportMemoryAllocateInfo::handleTypes when 328 * memory was created." 329 */ 330 if (mem->ahw) { 331 *pBuffer = mem->ahw; 332 /* Increase refcount. */ 333 AHardwareBuffer_acquire(mem->ahw); 334 return VK_SUCCESS; 335 } 336 337 return VK_ERROR_OUT_OF_HOST_MEMORY; 338} 339 340#endif 341 342/* Construct ahw usage mask from image usage bits, see 343 * 'AHardwareBuffer Usage Equivalence' in Vulkan spec. 344 */ 345uint64_t 346anv_ahw_usage_from_vk_usage(const VkImageCreateFlags vk_create, 347 const VkImageUsageFlags vk_usage) 348{ 349 uint64_t ahw_usage = 0; 350#if ANDROID_API_LEVEL >= 26 351 if (vk_usage & VK_IMAGE_USAGE_SAMPLED_BIT) 352 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; 353 354 if (vk_usage & VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT) 355 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; 356 357 if (vk_usage & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) 358 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_COLOR_OUTPUT; 359 360 if (vk_create & VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT) 361 ahw_usage |= AHARDWAREBUFFER_USAGE_GPU_CUBE_MAP; 362 363 if (vk_create & VK_IMAGE_CREATE_PROTECTED_BIT) 364 ahw_usage |= AHARDWAREBUFFER_USAGE_PROTECTED_CONTENT; 365 366 /* No usage bits set - set at least one GPU usage. */ 367 if (ahw_usage == 0) 368 ahw_usage = AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE; 369#endif 370 return ahw_usage; 371} 372 373/* 374 * Called from anv_AllocateMemory when import AHardwareBuffer. 375 */ 376VkResult 377anv_import_ahw_memory(VkDevice device_h, 378 struct anv_device_memory *mem, 379 const VkImportAndroidHardwareBufferInfoANDROID *info) 380{ 381#if ANDROID_API_LEVEL >= 26 382 ANV_FROM_HANDLE(anv_device, device, device_h); 383 384 /* Import from AHardwareBuffer to anv_device_memory. */ 385 const native_handle_t *handle = 386 AHardwareBuffer_getNativeHandle(info->buffer); 387 388 /* NOTE - We support buffers with only one handle but do not error on 389 * multiple handle case. Reason is that we want to support YUV formats 390 * where we have many logical planes but they all point to the same 391 * buffer, like is the case with VK_FORMAT_G8_B8R8_2PLANE_420_UNORM. 392 */ 393 int dma_buf = (handle && handle->numFds) ? handle->data[0] : -1; 394 if (dma_buf < 0) 395 return VK_ERROR_INVALID_EXTERNAL_HANDLE; 396 397 VkResult result = anv_device_import_bo(device, dma_buf, 0, 398 0 /* client_address */, 399 &mem->bo); 400 assert(result == VK_SUCCESS); 401 402 /* "If the vkAllocateMemory command succeeds, the implementation must 403 * acquire a reference to the imported hardware buffer, which it must 404 * release when the device memory object is freed. If the command fails, 405 * the implementation must not retain a reference." 406 */ 407 AHardwareBuffer_acquire(info->buffer); 408 mem->ahw = info->buffer; 409 410 return VK_SUCCESS; 411#else 412 return VK_ERROR_EXTENSION_NOT_PRESENT; 413#endif 414} 415 416VkResult 417anv_create_ahw_memory(VkDevice device_h, 418 struct anv_device_memory *mem, 419 const VkMemoryAllocateInfo *pAllocateInfo) 420{ 421#if ANDROID_API_LEVEL >= 26 422 const VkMemoryDedicatedAllocateInfo *dedicated_info = 423 vk_find_struct_const(pAllocateInfo->pNext, 424 MEMORY_DEDICATED_ALLOCATE_INFO); 425 426 uint32_t w = 0; 427 uint32_t h = 1; 428 uint32_t layers = 1; 429 uint32_t format = 0; 430 uint64_t usage = 0; 431 432 /* If caller passed dedicated information. */ 433 if (dedicated_info && dedicated_info->image) { 434 ANV_FROM_HANDLE(anv_image, image, dedicated_info->image); 435 w = image->vk.extent.width; 436 h = image->vk.extent.height; 437 layers = image->vk.array_layers; 438 format = android_format_from_vk(image->vk.format); 439 usage = anv_ahw_usage_from_vk_usage(image->vk.create_flags, image->vk.usage); 440 } else if (dedicated_info && dedicated_info->buffer) { 441 ANV_FROM_HANDLE(anv_buffer, buffer, dedicated_info->buffer); 442 w = buffer->size; 443 format = AHARDWAREBUFFER_FORMAT_BLOB; 444 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | 445 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN; 446 } else { 447 w = pAllocateInfo->allocationSize; 448 format = AHARDWAREBUFFER_FORMAT_BLOB; 449 usage = AHARDWAREBUFFER_USAGE_CPU_READ_OFTEN | 450 AHARDWAREBUFFER_USAGE_CPU_WRITE_OFTEN; 451 } 452 453 struct AHardwareBuffer *ahw = NULL; 454 struct AHardwareBuffer_Desc desc = { 455 .width = w, 456 .height = h, 457 .layers = layers, 458 .format = format, 459 .usage = usage, 460 }; 461 462 if (AHardwareBuffer_allocate(&desc, &ahw) != 0) 463 return VK_ERROR_OUT_OF_HOST_MEMORY; 464 465 const VkImportAndroidHardwareBufferInfoANDROID import_info = { 466 .buffer = ahw, 467 }; 468 VkResult result = anv_import_ahw_memory(device_h, mem, &import_info); 469 470 /* Release a reference to avoid leak for AHB allocation. */ 471 AHardwareBuffer_release(ahw); 472 473 return result; 474#else 475 return VK_ERROR_EXTENSION_NOT_PRESENT; 476#endif 477 478} 479 480VkResult 481anv_image_init_from_gralloc(struct anv_device *device, 482 struct anv_image *image, 483 const VkImageCreateInfo *base_info, 484 const VkNativeBufferANDROID *gralloc_info) 485{ 486 struct anv_bo *bo = NULL; 487 VkResult result; 488 489 struct anv_image_create_info anv_info = { 490 .vk_info = base_info, 491 .isl_extra_usage_flags = ISL_SURF_USAGE_DISABLE_AUX_BIT, 492 }; 493 494 if (gralloc_info->handle->numFds != 1) { 495 return vk_errorf(device, VK_ERROR_INVALID_EXTERNAL_HANDLE, 496 "VkNativeBufferANDROID::handle::numFds is %d, " 497 "expected 1", gralloc_info->handle->numFds); 498 } 499 500 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf 501 * must exceed that of the gralloc handle, and we do not own the gralloc 502 * handle. 503 */ 504 int dma_buf = gralloc_info->handle->data[0]; 505 506 /* We need to set the WRITE flag on window system buffers so that GEM will 507 * know we're writing to them and synchronize uses on other rings (for 508 * example, if the display server uses the blitter ring). 509 * 510 * If this function fails and if the imported bo was resident in the cache, 511 * we should avoid updating the bo's flags. Therefore, we defer updating 512 * the flags until success is certain. 513 * 514 */ 515 result = anv_device_import_bo(device, dma_buf, 516 ANV_BO_ALLOC_IMPLICIT_SYNC | 517 ANV_BO_ALLOC_IMPLICIT_WRITE, 518 0 /* client_address */, 519 &bo); 520 if (result != VK_SUCCESS) { 521 return vk_errorf(device, result, 522 "failed to import dma-buf from VkNativeBufferANDROID"); 523 } 524 525 int i915_tiling = anv_gem_get_tiling(device, bo->gem_handle); 526 switch (i915_tiling) { 527 case I915_TILING_NONE: 528 anv_info.isl_tiling_flags = ISL_TILING_LINEAR_BIT; 529 break; 530 case I915_TILING_X: 531 anv_info.isl_tiling_flags = ISL_TILING_X_BIT; 532 break; 533 case I915_TILING_Y: 534 anv_info.isl_tiling_flags = ISL_TILING_Y0_BIT; 535 break; 536 case -1: 537 result = vk_errorf(device, VK_ERROR_INVALID_EXTERNAL_HANDLE, 538 "DRM_IOCTL_I915_GEM_GET_TILING failed for " 539 "VkNativeBufferANDROID"); 540 goto fail_tiling; 541 default: 542 result = vk_errorf(device, VK_ERROR_INVALID_EXTERNAL_HANDLE, 543 "DRM_IOCTL_I915_GEM_GET_TILING returned unknown " 544 "tiling %d for VkNativeBufferANDROID", i915_tiling); 545 goto fail_tiling; 546 } 547 548 enum isl_format format = anv_get_isl_format(&device->info, 549 base_info->format, 550 VK_IMAGE_ASPECT_COLOR_BIT, 551 base_info->tiling); 552 assert(format != ISL_FORMAT_UNSUPPORTED); 553 554 result = anv_image_init(device, image, &anv_info); 555 if (result != VK_SUCCESS) 556 goto fail_init; 557 558 VkMemoryRequirements2 mem_reqs = { 559 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2, 560 }; 561 562 anv_image_get_memory_requirements(device, image, image->vk.aspects, 563 &mem_reqs); 564 565 VkDeviceSize aligned_image_size = 566 align_u64(mem_reqs.memoryRequirements.size, 567 mem_reqs.memoryRequirements.alignment); 568 569 if (bo->size < aligned_image_size) { 570 result = vk_errorf(device, VK_ERROR_INVALID_EXTERNAL_HANDLE, 571 "dma-buf from VkNativeBufferANDROID is too small for " 572 "VkImage: %"PRIu64"B < %"PRIu64"B", 573 bo->size, aligned_image_size); 574 goto fail_size; 575 } 576 577 assert(!image->disjoint); 578 assert(image->n_planes == 1); 579 assert(image->planes[0].primary_surface.memory_range.binding == 580 ANV_IMAGE_MEMORY_BINDING_MAIN); 581 assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo == NULL); 582 assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.offset == 0); 583 image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo = bo; 584 image->from_gralloc = true; 585 586 return VK_SUCCESS; 587 588 fail_size: 589 anv_image_finish(image); 590 fail_init: 591 fail_tiling: 592 anv_device_release_bo(device, bo); 593 594 return result; 595} 596 597VkResult 598anv_image_bind_from_gralloc(struct anv_device *device, 599 struct anv_image *image, 600 const VkNativeBufferANDROID *gralloc_info) 601{ 602 /* Do not close the gralloc handle's dma_buf. The lifetime of the dma_buf 603 * must exceed that of the gralloc handle, and we do not own the gralloc 604 * handle. 605 */ 606 int dma_buf = gralloc_info->handle->data[0]; 607 608 /* We need to set the WRITE flag on window system buffers so that GEM will 609 * know we're writing to them and synchronize uses on other rings (for 610 * example, if the display server uses the blitter ring). 611 * 612 * If this function fails and if the imported bo was resident in the cache, 613 * we should avoid updating the bo's flags. Therefore, we defer updating 614 * the flags until success is certain. 615 * 616 */ 617 struct anv_bo *bo = NULL; 618 VkResult result = anv_device_import_bo(device, dma_buf, 619 ANV_BO_ALLOC_IMPLICIT_SYNC | 620 ANV_BO_ALLOC_IMPLICIT_WRITE, 621 0 /* client_address */, 622 &bo); 623 if (result != VK_SUCCESS) { 624 return vk_errorf(device, result, 625 "failed to import dma-buf from VkNativeBufferANDROID"); 626 } 627 628 uint64_t img_size = image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].memory_range.size; 629 if (img_size < bo->size) { 630 result = vk_errorf(device, VK_ERROR_INVALID_EXTERNAL_HANDLE, 631 "dma-buf from VkNativeBufferANDROID is too small for " 632 "VkImage: %"PRIu64"B < %"PRIu64"B", 633 bo->size, img_size); 634 anv_device_release_bo(device, bo); 635 return result; 636 } 637 638 assert(!image->disjoint); 639 assert(image->n_planes == 1); 640 assert(image->planes[0].primary_surface.memory_range.binding == 641 ANV_IMAGE_MEMORY_BINDING_MAIN); 642 assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo == NULL); 643 assert(image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.offset == 0); 644 image->bindings[ANV_IMAGE_MEMORY_BINDING_MAIN].address.bo = bo; 645 image->from_gralloc = true; 646 647 return VK_SUCCESS; 648} 649 650static VkResult 651format_supported_with_usage(VkDevice device_h, VkFormat format, 652 VkImageUsageFlags imageUsage) 653{ 654 ANV_FROM_HANDLE(anv_device, device, device_h); 655 VkPhysicalDevice phys_dev_h = anv_physical_device_to_handle(device->physical); 656 VkResult result; 657 658 const VkPhysicalDeviceImageFormatInfo2 image_format_info = { 659 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2, 660 .format = format, 661 .type = VK_IMAGE_TYPE_2D, 662 .tiling = VK_IMAGE_TILING_OPTIMAL, 663 .usage = imageUsage, 664 }; 665 666 VkImageFormatProperties2 image_format_props = { 667 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2, 668 }; 669 670 /* Check that requested format and usage are supported. */ 671 result = anv_GetPhysicalDeviceImageFormatProperties2(phys_dev_h, 672 &image_format_info, &image_format_props); 673 if (result != VK_SUCCESS) { 674 return vk_errorf(device, result, 675 "anv_GetPhysicalDeviceImageFormatProperties2 failed " 676 "inside %s", __func__); 677 } 678 return VK_SUCCESS; 679} 680 681 682static VkResult 683setup_gralloc0_usage(struct anv_device *device, VkFormat format, 684 VkImageUsageFlags imageUsage, int *grallocUsage) 685{ 686 /* WARNING: Android's libvulkan.so hardcodes the VkImageUsageFlags 687 * returned to applications via VkSurfaceCapabilitiesKHR::supportedUsageFlags. 688 * The relevant code in libvulkan/swapchain.cpp contains this fun comment: 689 * 690 * TODO(jessehall): I think these are right, but haven't thought hard 691 * about it. Do we need to query the driver for support of any of 692 * these? 693 * 694 * Any disagreement between this function and the hardcoded 695 * VkSurfaceCapabilitiesKHR:supportedUsageFlags causes tests 696 * dEQP-VK.wsi.android.swapchain.*.image_usage to fail. 697 */ 698 699 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_DST_BIT | 700 VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) 701 *grallocUsage |= GRALLOC_USAGE_HW_RENDER; 702 703 if (unmask32(&imageUsage, VK_IMAGE_USAGE_TRANSFER_SRC_BIT | 704 VK_IMAGE_USAGE_SAMPLED_BIT | 705 VK_IMAGE_USAGE_STORAGE_BIT | 706 VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT)) 707 *grallocUsage |= GRALLOC_USAGE_HW_TEXTURE; 708 709 /* All VkImageUsageFlags not explicitly checked here are unsupported for 710 * gralloc swapchains. 711 */ 712 if (imageUsage != 0) { 713 return vk_errorf(device, VK_ERROR_FORMAT_NOT_SUPPORTED, 714 "unsupported VkImageUsageFlags(0x%x) for gralloc " 715 "swapchain", imageUsage); 716 } 717 718 /* The below formats support GRALLOC_USAGE_HW_FB (that is, display 719 * scanout). This short list of formats is univserally supported on Intel 720 * but is incomplete. The full set of supported formats is dependent on 721 * kernel and hardware. 722 * 723 * FINISHME: Advertise all display-supported formats. 724 */ 725 switch (format) { 726 case VK_FORMAT_B8G8R8A8_UNORM: 727 case VK_FORMAT_R5G6B5_UNORM_PACK16: 728 case VK_FORMAT_R8G8B8A8_UNORM: 729 case VK_FORMAT_R8G8B8A8_SRGB: 730 *grallocUsage |= GRALLOC_USAGE_HW_FB | 731 GRALLOC_USAGE_HW_COMPOSER | 732 GRALLOC_USAGE_EXTERNAL_DISP; 733 break; 734 default: 735 mesa_logw("%s: unsupported format=%d", __func__, format); 736 } 737 738 if (*grallocUsage == 0) 739 return VK_ERROR_FORMAT_NOT_SUPPORTED; 740 741 return VK_SUCCESS; 742} 743 744#if ANDROID_API_LEVEL >= 26 745VkResult anv_GetSwapchainGrallocUsage2ANDROID( 746 VkDevice device_h, 747 VkFormat format, 748 VkImageUsageFlags imageUsage, 749 VkSwapchainImageUsageFlagsANDROID swapchainImageUsage, 750 uint64_t* grallocConsumerUsage, 751 uint64_t* grallocProducerUsage) 752{ 753 ANV_FROM_HANDLE(anv_device, device, device_h); 754 VkResult result; 755 756 *grallocConsumerUsage = 0; 757 *grallocProducerUsage = 0; 758 mesa_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage); 759 760 result = format_supported_with_usage(device_h, format, imageUsage); 761 if (result != VK_SUCCESS) 762 return result; 763 764 int32_t grallocUsage = 0; 765 result = setup_gralloc0_usage(device, format, imageUsage, &grallocUsage); 766 if (result != VK_SUCCESS) 767 return result; 768 769 /* Setup gralloc1 usage flags from gralloc0 flags. */ 770 771 if (grallocUsage & GRALLOC_USAGE_HW_RENDER) { 772 *grallocProducerUsage |= GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET; 773 *grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_CLIENT_TARGET; 774 } 775 776 if (grallocUsage & GRALLOC_USAGE_HW_TEXTURE) { 777 *grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_GPU_TEXTURE; 778 } 779 780 if (grallocUsage & (GRALLOC_USAGE_HW_FB | 781 GRALLOC_USAGE_HW_COMPOSER | 782 GRALLOC_USAGE_EXTERNAL_DISP)) { 783 *grallocProducerUsage |= GRALLOC1_PRODUCER_USAGE_GPU_RENDER_TARGET; 784 *grallocConsumerUsage |= GRALLOC1_CONSUMER_USAGE_HWCOMPOSER; 785 } 786 787 return VK_SUCCESS; 788} 789#endif 790 791VkResult anv_GetSwapchainGrallocUsageANDROID( 792 VkDevice device_h, 793 VkFormat format, 794 VkImageUsageFlags imageUsage, 795 int* grallocUsage) 796{ 797 ANV_FROM_HANDLE(anv_device, device, device_h); 798 VkResult result; 799 800 *grallocUsage = 0; 801 mesa_logd("%s: format=%d, usage=0x%x", __func__, format, imageUsage); 802 803 result = format_supported_with_usage(device_h, format, imageUsage); 804 if (result != VK_SUCCESS) 805 return result; 806 807 return setup_gralloc0_usage(device, format, imageUsage, grallocUsage); 808} 809 810VkResult 811anv_AcquireImageANDROID( 812 VkDevice device_h, 813 VkImage image_h, 814 int nativeFenceFd, 815 VkSemaphore semaphore_h, 816 VkFence fence_h) 817{ 818 ANV_FROM_HANDLE(anv_device, device, device_h); 819 VkResult result = VK_SUCCESS; 820 821 /* From https://source.android.com/devices/graphics/implement-vulkan : 822 * 823 * "The driver takes ownership of the fence file descriptor and closes 824 * the fence file descriptor when no longer needed. The driver must do 825 * so even if neither a semaphore or fence object is provided, or even 826 * if vkAcquireImageANDROID fails and returns an error." 827 * 828 * The Vulkan spec for VkImportFence/SemaphoreFdKHR(), however, requires 829 * the file descriptor to be left alone on failure. 830 */ 831 int semaphore_fd = -1, fence_fd = -1; 832 if (nativeFenceFd >= 0) { 833 if (semaphore_h != VK_NULL_HANDLE && fence_h != VK_NULL_HANDLE) { 834 /* We have both so we have to import the sync file twice. One of 835 * them needs to be a dup. 836 */ 837 semaphore_fd = nativeFenceFd; 838 fence_fd = dup(nativeFenceFd); 839 if (fence_fd < 0) { 840 VkResult err = (errno == EMFILE) ? VK_ERROR_TOO_MANY_OBJECTS : 841 VK_ERROR_OUT_OF_HOST_MEMORY; 842 close(nativeFenceFd); 843 return vk_error(device, err); 844 } 845 } else if (semaphore_h != VK_NULL_HANDLE) { 846 semaphore_fd = nativeFenceFd; 847 } else if (fence_h != VK_NULL_HANDLE) { 848 fence_fd = nativeFenceFd; 849 } else { 850 /* Nothing to import into so we have to close the file */ 851 close(nativeFenceFd); 852 } 853 } 854 855 if (semaphore_h != VK_NULL_HANDLE) { 856 const VkImportSemaphoreFdInfoKHR info = { 857 .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR, 858 .semaphore = semaphore_h, 859 .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT, 860 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT, 861 .fd = semaphore_fd, 862 }; 863 result = anv_ImportSemaphoreFdKHR(device_h, &info); 864 if (result == VK_SUCCESS) 865 semaphore_fd = -1; /* ANV took ownership */ 866 } 867 868 if (result == VK_SUCCESS && fence_h != VK_NULL_HANDLE) { 869 const VkImportFenceFdInfoKHR info = { 870 .sType = VK_STRUCTURE_TYPE_IMPORT_FENCE_FD_INFO_KHR, 871 .fence = fence_h, 872 .flags = VK_FENCE_IMPORT_TEMPORARY_BIT, 873 .handleType = VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT, 874 .fd = fence_fd, 875 }; 876 result = anv_ImportFenceFdKHR(device_h, &info); 877 if (result == VK_SUCCESS) 878 fence_fd = -1; /* ANV took ownership */ 879 } 880 881 if (semaphore_fd >= 0) 882 close(semaphore_fd); 883 if (fence_fd >= 0) 884 close(fence_fd); 885 886 return result; 887} 888 889VkResult 890anv_QueueSignalReleaseImageANDROID( 891 VkQueue queue, 892 uint32_t waitSemaphoreCount, 893 const VkSemaphore* pWaitSemaphores, 894 VkImage image, 895 int* pNativeFenceFd) 896{ 897 VkResult result; 898 899 if (waitSemaphoreCount == 0) 900 goto done; 901 902 result = vk_common_QueueSubmit(queue, 1, 903 &(VkSubmitInfo) { 904 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO, 905 .waitSemaphoreCount = 1, 906 .pWaitSemaphores = pWaitSemaphores, 907 .pWaitDstStageMask = &(VkPipelineStageFlags) { 908 VK_PIPELINE_STAGE_ALL_COMMANDS_BIT 909 }, 910 }, 911 (VkFence) VK_NULL_HANDLE); 912 if (result != VK_SUCCESS) 913 return result; 914 915 done: 916 if (pNativeFenceFd) { 917 /* We can rely implicit on sync because above we submitted all 918 * semaphores to the queue. 919 */ 920 *pNativeFenceFd = -1; 921 } 922 923 return VK_SUCCESS; 924} 925