1/*
2 * Copyright © 2016 Intel Corporation
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21 * IN THE SOFTWARE.
22 */
23
24#include <assert.h>
25#include <stdbool.h>
26
27#include "radv_meta.h"
28#include "radv_private.h"
29#include "vk_format.h"
30#include "nir/nir_builder.h"
31#include "sid.h"
32
33/* emit 0, 0, 0, 1 */
34static nir_shader *
35build_nir_fs(void)
36{
37	const struct glsl_type *vec4 = glsl_vec4_type();
38	nir_builder b;
39	nir_variable *f_color; /* vec4, fragment output color */
40
41	nir_builder_init_simple_shader(&b, NULL, MESA_SHADER_FRAGMENT, NULL);
42	b.shader->info.name = ralloc_asprintf(b.shader,
43					       "meta_resolve_fs");
44
45	f_color = nir_variable_create(b.shader, nir_var_shader_out, vec4,
46				      "f_color");
47	f_color->data.location = FRAG_RESULT_DATA0;
48	nir_store_var(&b, f_color, nir_imm_vec4(&b, 0.0, 0.0, 0.0, 1.0), 0xf);
49
50	return b.shader;
51}
52
53static VkResult
54create_pass(struct radv_device *device, VkFormat vk_format, VkRenderPass *pass)
55{
56	VkResult result;
57	VkDevice device_h = radv_device_to_handle(device);
58	const VkAllocationCallbacks *alloc = &device->meta_state.alloc;
59	VkAttachmentDescription attachments[2];
60	int i;
61
62	for (i = 0; i < 2; i++) {
63		attachments[i].format = vk_format;
64		attachments[i].samples = 1;
65		attachments[i].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
66		attachments[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
67	}
68	attachments[0].initialLayout = VK_IMAGE_LAYOUT_GENERAL;
69	attachments[0].finalLayout = VK_IMAGE_LAYOUT_GENERAL;
70	attachments[1].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
71	attachments[1].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
72
73	result = radv_CreateRenderPass(device_h,
74				       &(VkRenderPassCreateInfo) {
75					       .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
76						       .attachmentCount = 2,
77						       .pAttachments = attachments,
78						       .subpassCount = 1,
79								.pSubpasses = &(VkSubpassDescription) {
80						       .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
81						       .inputAttachmentCount = 0,
82						       .colorAttachmentCount = 2,
83						       .pColorAttachments = (VkAttachmentReference[]) {
84							       {
85								       .attachment = 0,
86								       .layout = VK_IMAGE_LAYOUT_GENERAL,
87							       },
88							       {
89								       .attachment = 1,
90								       .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
91							       },
92						       },
93						       .pResolveAttachments = NULL,
94						       .pDepthStencilAttachment = &(VkAttachmentReference) {
95							       .attachment = VK_ATTACHMENT_UNUSED,
96						       },
97						       .preserveAttachmentCount = 0,
98						       .pPreserveAttachments = NULL,
99					       },
100								.dependencyCount = 0,
101									 },
102				       alloc,
103				       pass);
104
105	return result;
106}
107
108static VkResult
109create_pipeline(struct radv_device *device,
110		 VkShaderModule vs_module_h,
111		 VkPipeline *pipeline,
112		 VkRenderPass pass)
113{
114	VkResult result;
115	VkDevice device_h = radv_device_to_handle(device);
116
117	struct radv_shader_module fs_module = {
118		.nir = build_nir_fs(),
119	};
120
121	if (!fs_module.nir) {
122		/* XXX: Need more accurate error */
123		result = VK_ERROR_OUT_OF_HOST_MEMORY;
124		goto cleanup;
125	}
126
127	VkPipelineLayoutCreateInfo pl_create_info = {
128		.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
129		.setLayoutCount = 0,
130		.pSetLayouts = NULL,
131		.pushConstantRangeCount = 0,
132		.pPushConstantRanges = NULL,
133	};
134
135	if (!device->meta_state.resolve.p_layout) {
136		result = radv_CreatePipelineLayout(radv_device_to_handle(device),
137						   &pl_create_info,
138						   &device->meta_state.alloc,
139						   &device->meta_state.resolve.p_layout);
140		if (result != VK_SUCCESS)
141			goto cleanup;
142	}
143
144	result = radv_graphics_pipeline_create(device_h,
145					       radv_pipeline_cache_to_handle(&device->meta_state.cache),
146					       &(VkGraphicsPipelineCreateInfo) {
147						       .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO,
148						       .stageCount = 2,
149						       .pStages = (VkPipelineShaderStageCreateInfo[]) {
150						       {
151							       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
152							       .stage = VK_SHADER_STAGE_VERTEX_BIT,
153							       .module = vs_module_h,
154							       .pName = "main",
155						       },
156						       {
157							       .sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
158							       .stage = VK_SHADER_STAGE_FRAGMENT_BIT,
159							       .module = radv_shader_module_to_handle(&fs_module),
160							       .pName = "main",
161						       },
162					       },
163					       .pVertexInputState = &(VkPipelineVertexInputStateCreateInfo) {
164						       .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO,
165						       .vertexBindingDescriptionCount = 0,
166						       .vertexAttributeDescriptionCount = 0,
167					       },
168					       .pInputAssemblyState = &(VkPipelineInputAssemblyStateCreateInfo) {
169						       .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO,
170						       .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
171						       .primitiveRestartEnable = false,
172					       },
173					       .pViewportState = &(VkPipelineViewportStateCreateInfo) {
174						       .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO,
175						       .viewportCount = 1,
176						       .scissorCount = 1,
177					       },
178					       .pRasterizationState = &(VkPipelineRasterizationStateCreateInfo) {
179						       .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO,
180						       .depthClampEnable = false,
181						       .rasterizerDiscardEnable = false,
182						       .polygonMode = VK_POLYGON_MODE_FILL,
183						       .cullMode = VK_CULL_MODE_NONE,
184						       .frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE,
185					       },
186					       .pMultisampleState = &(VkPipelineMultisampleStateCreateInfo) {
187						       .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO,
188						       .rasterizationSamples = 1,
189						       .sampleShadingEnable = false,
190						       .pSampleMask = NULL,
191						       .alphaToCoverageEnable = false,
192						       .alphaToOneEnable = false,
193					       },
194					       .pColorBlendState = &(VkPipelineColorBlendStateCreateInfo) {
195						       .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO,
196						       .logicOpEnable = false,
197						       .attachmentCount = 2,
198						       .pAttachments = (VkPipelineColorBlendAttachmentState []) {
199							       {
200							       .colorWriteMask = VK_COLOR_COMPONENT_R_BIT |
201									       VK_COLOR_COMPONENT_G_BIT |
202									       VK_COLOR_COMPONENT_B_BIT |
203									       VK_COLOR_COMPONENT_A_BIT,
204							       },
205							       {
206							       .colorWriteMask = 0,
207
208							       }
209						       },
210						},
211						.pDynamicState = &(VkPipelineDynamicStateCreateInfo) {
212							.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO,
213							.dynamicStateCount = 2,
214							.pDynamicStates = (VkDynamicState[]) {
215								VK_DYNAMIC_STATE_VIEWPORT,
216								VK_DYNAMIC_STATE_SCISSOR,
217							},
218						},
219						.layout = device->meta_state.resolve.p_layout,
220						.renderPass = pass,
221																       .subpass = 0,
222																       },
223					       &(struct radv_graphics_pipeline_create_info) {
224						       .use_rectlist = true,
225						       .custom_blend_mode = V_028808_CB_RESOLVE,
226							       },
227					       &device->meta_state.alloc, pipeline);
228	if (result != VK_SUCCESS)
229		goto cleanup;
230
231	goto cleanup;
232
233cleanup:
234	ralloc_free(fs_module.nir);
235	return result;
236}
237
238void
239radv_device_finish_meta_resolve_state(struct radv_device *device)
240{
241	struct radv_meta_state *state = &device->meta_state;
242
243	for (uint32_t j = 0; j < NUM_META_FS_KEYS; j++) {
244		radv_DestroyRenderPass(radv_device_to_handle(device),
245				       state->resolve.pass[j], &state->alloc);
246		radv_DestroyPipeline(radv_device_to_handle(device),
247				     state->resolve.pipeline[j], &state->alloc);
248	}
249	radv_DestroyPipelineLayout(radv_device_to_handle(device),
250				   state->resolve.p_layout, &state->alloc);
251
252}
253
254VkResult
255radv_device_init_meta_resolve_state(struct radv_device *device, bool on_demand)
256{
257	if (on_demand)
258		return VK_SUCCESS;
259
260	VkResult res = VK_SUCCESS;
261	struct radv_meta_state *state = &device->meta_state;
262	struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
263	if (!vs_module.nir) {
264		/* XXX: Need more accurate error */
265		res = VK_ERROR_OUT_OF_HOST_MEMORY;
266		goto fail;
267	}
268
269	for (uint32_t i = 0; i < NUM_META_FS_KEYS; ++i) {
270		VkFormat format = radv_fs_key_format_exemplars[i];
271		unsigned fs_key = radv_format_meta_fs_key(format);
272		res = create_pass(device, format, &state->resolve.pass[fs_key]);
273		if (res != VK_SUCCESS)
274			goto fail;
275
276		VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
277		res = create_pipeline(device, vs_module_h,
278				      &state->resolve.pipeline[fs_key], state->resolve.pass[fs_key]);
279		if (res != VK_SUCCESS)
280			goto fail;
281	}
282
283	goto cleanup;
284
285fail:
286	radv_device_finish_meta_resolve_state(device);
287
288cleanup:
289	ralloc_free(vs_module.nir);
290
291	return res;
292}
293
294static void
295emit_resolve(struct radv_cmd_buffer *cmd_buffer,
296	     VkFormat vk_format,
297             const VkOffset2D *dest_offset,
298             const VkExtent2D *resolve_extent)
299{
300	struct radv_device *device = cmd_buffer->device;
301	VkCommandBuffer cmd_buffer_h = radv_cmd_buffer_to_handle(cmd_buffer);
302	unsigned fs_key = radv_format_meta_fs_key(vk_format);
303
304	cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
305
306	radv_CmdBindPipeline(cmd_buffer_h, VK_PIPELINE_BIND_POINT_GRAPHICS,
307			     device->meta_state.resolve.pipeline[fs_key]);
308
309	radv_CmdSetViewport(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkViewport) {
310		.x = dest_offset->x,
311		.y = dest_offset->y,
312		.width = resolve_extent->width,
313		.height = resolve_extent->height,
314		.minDepth = 0.0f,
315		.maxDepth = 1.0f
316	});
317
318	radv_CmdSetScissor(radv_cmd_buffer_to_handle(cmd_buffer), 0, 1, &(VkRect2D) {
319		.offset = *dest_offset,
320		.extent = *resolve_extent,
321	});
322
323	radv_CmdDraw(cmd_buffer_h, 3, 1, 0, 0);
324	cmd_buffer->state.flush_bits |= RADV_CMD_FLAG_FLUSH_AND_INV_CB;
325}
326
327enum radv_resolve_method {
328	RESOLVE_HW,
329	RESOLVE_COMPUTE,
330	RESOLVE_FRAGMENT,
331};
332
333static void radv_pick_resolve_method_images(struct radv_image *src_image,
334					    struct radv_image *dest_image,
335					    VkImageLayout dest_image_layout,
336					    struct radv_cmd_buffer *cmd_buffer,
337					    enum radv_resolve_method *method)
338
339{
340	uint32_t queue_mask = radv_image_queue_family_mask(dest_image,
341	                                                   cmd_buffer->queue_family_index,
342	                                                   cmd_buffer->queue_family_index);
343
344	if (src_image->vk_format == VK_FORMAT_R16G16_UNORM ||
345	    src_image->vk_format == VK_FORMAT_R16G16_SNORM)
346		*method = RESOLVE_COMPUTE;
347	else if (vk_format_is_int(src_image->vk_format))
348		*method = RESOLVE_COMPUTE;
349	else if (src_image->info.array_size > 1 ||
350		 dest_image->info.array_size > 1)
351		*method = RESOLVE_COMPUTE;
352
353	if (radv_layout_dcc_compressed(dest_image, dest_image_layout, queue_mask)) {
354		*method = RESOLVE_FRAGMENT;
355	} else if (dest_image->planes[0].surface.micro_tile_mode !=
356	           src_image->planes[0].surface.micro_tile_mode) {
357		*method = RESOLVE_COMPUTE;
358	}
359}
360
361static VkResult
362build_resolve_pipeline(struct radv_device *device,
363                       unsigned fs_key)
364{
365	VkResult result = VK_SUCCESS;
366
367	if (device->meta_state.resolve.pipeline[fs_key])
368		return result;
369
370	mtx_lock(&device->meta_state.mtx);
371	if (device->meta_state.resolve.pipeline[fs_key]) {
372		mtx_unlock(&device->meta_state.mtx);
373		return result;
374	}
375
376	struct radv_shader_module vs_module = { .nir = radv_meta_build_nir_vs_generate_vertices() };
377
378	result = create_pass(device, radv_fs_key_format_exemplars[fs_key], &device->meta_state.resolve.pass[fs_key]);
379	if (result != VK_SUCCESS)
380		goto fail;
381
382	VkShaderModule vs_module_h = radv_shader_module_to_handle(&vs_module);
383	result = create_pipeline(device, vs_module_h, &device->meta_state.resolve.pipeline[fs_key], device->meta_state.resolve.pass[fs_key]);
384
385fail:
386	ralloc_free(vs_module.nir);
387	mtx_unlock(&device->meta_state.mtx);
388	return result;
389}
390
391void radv_CmdResolveImage(
392	VkCommandBuffer                             cmd_buffer_h,
393	VkImage                                     src_image_h,
394	VkImageLayout                               src_image_layout,
395	VkImage                                     dest_image_h,
396	VkImageLayout                               dest_image_layout,
397	uint32_t                                    region_count,
398	const VkImageResolve*                       regions)
399{
400	RADV_FROM_HANDLE(radv_cmd_buffer, cmd_buffer, cmd_buffer_h);
401	RADV_FROM_HANDLE(radv_image, src_image, src_image_h);
402	RADV_FROM_HANDLE(radv_image, dest_image, dest_image_h);
403	struct radv_device *device = cmd_buffer->device;
404	struct radv_meta_saved_state saved_state;
405	VkDevice device_h = radv_device_to_handle(device);
406	enum radv_resolve_method resolve_method = RESOLVE_HW;
407	/* we can use the hw resolve only for single full resolves */
408	if (region_count == 1) {
409		if (regions[0].srcOffset.x ||
410		    regions[0].srcOffset.y ||
411		    regions[0].srcOffset.z)
412			resolve_method = RESOLVE_COMPUTE;
413		if (regions[0].dstOffset.x ||
414		    regions[0].dstOffset.y ||
415		    regions[0].dstOffset.z)
416			resolve_method = RESOLVE_COMPUTE;
417
418		if (regions[0].extent.width != src_image->info.width ||
419		    regions[0].extent.height != src_image->info.height ||
420		    regions[0].extent.depth != src_image->info.depth)
421			resolve_method = RESOLVE_COMPUTE;
422	} else
423		resolve_method = RESOLVE_COMPUTE;
424
425	radv_pick_resolve_method_images(src_image, dest_image,
426					dest_image_layout, cmd_buffer,
427					&resolve_method);
428
429	if (resolve_method == RESOLVE_FRAGMENT) {
430		radv_meta_resolve_fragment_image(cmd_buffer,
431						 src_image,
432						 src_image_layout,
433						 dest_image,
434						 dest_image_layout,
435						 region_count, regions);
436		return;
437	}
438
439	if (resolve_method == RESOLVE_COMPUTE) {
440		radv_meta_resolve_compute_image(cmd_buffer,
441						src_image,
442						src_image_layout,
443						dest_image,
444						dest_image_layout,
445						region_count, regions);
446		return;
447	}
448
449	radv_meta_save(&saved_state, cmd_buffer,
450		       RADV_META_SAVE_GRAPHICS_PIPELINE);
451
452	assert(src_image->info.samples > 1);
453	if (src_image->info.samples <= 1) {
454		/* this causes GPU hangs if we get past here */
455		fprintf(stderr, "radv: Illegal resolve operation (src not multisampled), will hang GPU.");
456		return;
457	}
458	assert(dest_image->info.samples == 1);
459
460	if (src_image->info.array_size > 1)
461		radv_finishme("vkCmdResolveImage: multisample array images");
462
463	if (radv_image_has_dcc(dest_image)) {
464		radv_initialize_dcc(cmd_buffer, dest_image, 0xffffffff);
465	}
466	unsigned fs_key = radv_format_meta_fs_key(dest_image->vk_format);
467	for (uint32_t r = 0; r < region_count; ++r) {
468		const VkImageResolve *region = &regions[r];
469
470		/* From the Vulkan 1.0 spec:
471		 *
472		 *    - The aspectMask member of srcSubresource and dstSubresource must
473		 *      only contain VK_IMAGE_ASPECT_COLOR_BIT
474		 *
475		 *    - The layerCount member of srcSubresource and dstSubresource must
476		 *      match
477		 */
478		assert(region->srcSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
479		assert(region->dstSubresource.aspectMask == VK_IMAGE_ASPECT_COLOR_BIT);
480		assert(region->srcSubresource.layerCount ==
481		       region->dstSubresource.layerCount);
482
483		const uint32_t src_base_layer =
484			radv_meta_get_iview_layer(src_image, &region->srcSubresource,
485						  &region->srcOffset);
486
487		const uint32_t dest_base_layer =
488			radv_meta_get_iview_layer(dest_image, &region->dstSubresource,
489						  &region->dstOffset);
490
491		/**
492		 * From Vulkan 1.0.6 spec: 18.6 Resolving Multisample Images
493		 *
494		 *    extent is the size in texels of the source image to resolve in width,
495		 *    height and depth. 1D images use only x and width. 2D images use x, y,
496		 *    width and height. 3D images use x, y, z, width, height and depth.
497		 *
498		 *    srcOffset and dstOffset select the initial x, y, and z offsets in
499		 *    texels of the sub-regions of the source and destination image data.
500		 *    extent is the size in texels of the source image to resolve in width,
501		 *    height and depth. 1D images use only x and width. 2D images use x, y,
502		 *    width and height. 3D images use x, y, z, width, height and depth.
503		 */
504		const struct VkExtent3D extent =
505			radv_sanitize_image_extent(src_image->type, region->extent);
506		const struct VkOffset3D dstOffset =
507			radv_sanitize_image_offset(dest_image->type, region->dstOffset);
508
509
510		for (uint32_t layer = 0; layer < region->srcSubresource.layerCount;
511		     ++layer) {
512
513			VkResult ret = build_resolve_pipeline(device, fs_key);
514			if (ret != VK_SUCCESS) {
515				cmd_buffer->record_result = ret;
516				break;
517			}
518
519			struct radv_image_view src_iview;
520			radv_image_view_init(&src_iview, cmd_buffer->device,
521					     &(VkImageViewCreateInfo) {
522						     .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
523							     .image = src_image_h,
524							     .viewType = radv_meta_get_view_type(src_image),
525							     .format = src_image->vk_format,
526							     .subresourceRange = {
527							     .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
528							     .baseMipLevel = region->srcSubresource.mipLevel,
529							     .levelCount = 1,
530							     .baseArrayLayer = src_base_layer + layer,
531							     .layerCount = 1,
532						     },
533					     });
534
535			struct radv_image_view dest_iview;
536			radv_image_view_init(&dest_iview, cmd_buffer->device,
537					     &(VkImageViewCreateInfo) {
538						     .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
539							     .image = dest_image_h,
540							     .viewType = radv_meta_get_view_type(dest_image),
541							     .format = dest_image->vk_format,
542							     .subresourceRange = {
543							     .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
544							     .baseMipLevel = region->dstSubresource.mipLevel,
545							     .levelCount = 1,
546							     .baseArrayLayer = dest_base_layer + layer,
547							     .layerCount = 1,
548						     },
549					      });
550
551			VkFramebuffer fb_h;
552			radv_CreateFramebuffer(device_h,
553					       &(VkFramebufferCreateInfo) {
554						       .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
555							       .attachmentCount = 2,
556							       .pAttachments = (VkImageView[]) {
557							       radv_image_view_to_handle(&src_iview),
558							       radv_image_view_to_handle(&dest_iview),
559						       },
560						       .width = radv_minify(dest_image->info.width,
561									    region->dstSubresource.mipLevel),
562						       .height = radv_minify(dest_image->info.height,
563									      region->dstSubresource.mipLevel),
564						       .layers = 1
565					       },
566					       &cmd_buffer->pool->alloc,
567					       &fb_h);
568
569			radv_CmdBeginRenderPass(cmd_buffer_h,
570						      &(VkRenderPassBeginInfo) {
571							      .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
572								      .renderPass = device->meta_state.resolve.pass[fs_key],
573								      .framebuffer = fb_h,
574								      .renderArea = {
575								      .offset = {
576									      dstOffset.x,
577									      dstOffset.y,
578								      },
579								      .extent = {
580									      extent.width,
581									      extent.height,
582								      }
583							      },
584							      .clearValueCount = 0,
585							      .pClearValues = NULL,
586						      },
587						      VK_SUBPASS_CONTENTS_INLINE);
588
589			emit_resolve(cmd_buffer,
590				     dest_iview.vk_format,
591				     &(VkOffset2D) {
592					     .x = dstOffset.x,
593					     .y = dstOffset.y,
594				     },
595				     &(VkExtent2D) {
596					     .width = extent.width,
597					     .height = extent.height,
598				     });
599
600			radv_CmdEndRenderPass(cmd_buffer_h);
601
602			radv_DestroyFramebuffer(device_h, fb_h,
603						&cmd_buffer->pool->alloc);
604		}
605	}
606
607	radv_meta_restore(&saved_state, cmd_buffer);
608}
609
610/**
611 * Emit any needed resolves for the current subpass.
612 */
613void
614radv_cmd_buffer_resolve_subpass(struct radv_cmd_buffer *cmd_buffer)
615{
616	struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
617	const struct radv_subpass *subpass = cmd_buffer->state.subpass;
618	struct radv_meta_saved_state saved_state;
619	enum radv_resolve_method resolve_method = RESOLVE_HW;
620
621	/* FINISHME(perf): Skip clears for resolve attachments.
622	 *
623	 * From the Vulkan 1.0 spec:
624	 *
625	 *    If the first use of an attachment in a render pass is as a resolve
626	 *    attachment, then the loadOp is effectively ignored as the resolve is
627	 *    guaranteed to overwrite all pixels in the render area.
628	 */
629
630	if (!subpass->has_resolve)
631		return;
632
633	for (uint32_t i = 0; i < subpass->color_count; ++i) {
634		struct radv_subpass_attachment src_att = subpass->color_attachments[i];
635		struct radv_subpass_attachment dest_att = subpass->resolve_attachments[i];
636
637		if (dest_att.attachment == VK_ATTACHMENT_UNUSED)
638			continue;
639
640		struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
641		struct radv_image *src_img = cmd_buffer->state.framebuffer->attachments[src_att.attachment].attachment->image;
642
643		radv_pick_resolve_method_images(src_img, dst_img, dest_att.layout, cmd_buffer, &resolve_method);
644		if (resolve_method == RESOLVE_FRAGMENT) {
645			break;
646		}
647	}
648
649	if (resolve_method == RESOLVE_COMPUTE) {
650		radv_cmd_buffer_resolve_subpass_cs(cmd_buffer);
651		return;
652	} else if (resolve_method == RESOLVE_FRAGMENT) {
653		radv_cmd_buffer_resolve_subpass_fs(cmd_buffer);
654		return;
655	}
656
657	radv_meta_save(&saved_state, cmd_buffer,
658		       RADV_META_SAVE_GRAPHICS_PIPELINE);
659
660	for (uint32_t i = 0; i < subpass->color_count; ++i) {
661		struct radv_subpass_attachment src_att = subpass->color_attachments[i];
662		struct radv_subpass_attachment dest_att = subpass->resolve_attachments[i];
663
664		if (dest_att.attachment == VK_ATTACHMENT_UNUSED)
665			continue;
666
667		struct radv_image *dst_img = cmd_buffer->state.framebuffer->attachments[dest_att.attachment].attachment->image;
668
669		if (radv_image_has_dcc(dst_img)) {
670			radv_initialize_dcc(cmd_buffer, dst_img, 0xffffffff);
671			cmd_buffer->state.attachments[dest_att.attachment].current_layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
672		}
673
674		struct radv_subpass resolve_subpass = {
675			.color_count = 2,
676			.color_attachments = (struct radv_subpass_attachment[]) { src_att, dest_att },
677			.depth_stencil_attachment = NULL,
678		};
679
680		radv_cmd_buffer_set_subpass(cmd_buffer, &resolve_subpass);
681
682		VkResult ret = build_resolve_pipeline(cmd_buffer->device, radv_format_meta_fs_key(dst_img->vk_format));
683		if (ret != VK_SUCCESS) {
684			cmd_buffer->record_result = ret;
685			continue;
686		}
687
688		emit_resolve(cmd_buffer,
689			     dst_img->vk_format,
690			     &(VkOffset2D) { 0, 0 },
691			     &(VkExtent2D) { fb->width, fb->height });
692	}
693
694	cmd_buffer->state.subpass = subpass;
695	radv_meta_restore(&saved_state, cmd_buffer);
696}
697
698/**
699 * Decompress CMask/FMask before resolving a multisampled source image inside a
700 * subpass.
701 */
702void
703radv_decompress_resolve_subpass_src(struct radv_cmd_buffer *cmd_buffer)
704{
705	const struct radv_subpass *subpass = cmd_buffer->state.subpass;
706	struct radv_framebuffer *fb = cmd_buffer->state.framebuffer;
707
708	for (uint32_t i = 0; i < subpass->color_count; ++i) {
709		struct radv_subpass_attachment src_att = subpass->color_attachments[i];
710		struct radv_subpass_attachment dest_att = subpass->resolve_attachments[i];
711
712		if (dest_att.attachment == VK_ATTACHMENT_UNUSED)
713			continue;
714
715		struct radv_image *src_image =
716			fb->attachments[src_att.attachment].attachment->image;
717
718		VkImageResolve region = {};
719		region.srcSubresource.baseArrayLayer = 0;
720		region.srcSubresource.mipLevel = 0;
721		region.srcSubresource.layerCount = src_image->info.array_size;
722
723		radv_decompress_resolve_src(cmd_buffer, src_image,
724					    src_att.layout, 1, &region);
725	}
726}
727
728/**
729 * Decompress CMask/FMask before resolving a multisampled source image.
730 */
731void
732radv_decompress_resolve_src(struct radv_cmd_buffer *cmd_buffer,
733			    struct radv_image *src_image,
734			    VkImageLayout src_image_layout,
735			    uint32_t region_count,
736			    const VkImageResolve *regions)
737{
738	for (uint32_t r = 0; r < region_count; ++r) {
739		const VkImageResolve *region = &regions[r];
740		const uint32_t src_base_layer =
741			radv_meta_get_iview_layer(src_image, &region->srcSubresource,
742						  &region->srcOffset);
743		VkImageSubresourceRange range;
744		range.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
745		range.baseMipLevel = region->srcSubresource.mipLevel;
746		range.levelCount = 1;
747		range.baseArrayLayer = src_base_layer;
748		range.layerCount = region->srcSubresource.layerCount;
749
750		uint32_t queue_mask =
751			radv_image_queue_family_mask(src_image,
752						     cmd_buffer->queue_family_index,
753						     cmd_buffer->queue_family_index);
754
755		if (radv_layout_dcc_compressed(src_image, src_image_layout,
756					       queue_mask)) {
757			radv_decompress_dcc(cmd_buffer, src_image, &range);
758		} else {
759			radv_fast_clear_flush_image_inplace(cmd_buffer,
760							    src_image, &range);
761		}
762	}
763}
764