mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 04:01:08 +00:00
vulkan: implement the correct memory barriers necessary
Vulkan validation layers are finally silent!
This commit is contained in:
parent
1230a6c723
commit
ac09f88eb9
11 changed files with 334 additions and 212 deletions
|
@ -1089,9 +1089,57 @@ gst_vulkan_image_identity_transform (GstBaseTransform * bt, GstBuffer * inbuf,
|
|||
.clearValueCount = 1,
|
||||
.pClearValues = &clearColor
|
||||
};
|
||||
|
||||
VkImageMemoryBarrier in_image_memory_barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = in_img_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
|
||||
.oldLayout = in_img_mem->barrier.image_layout,
|
||||
.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = in_img_mem->image,
|
||||
.subresourceRange = in_img_mem->barrier.subresource_range
|
||||
};
|
||||
|
||||
VkImageMemoryBarrier out_image_memory_barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = out_img_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
|
||||
.oldLayout = out_img_mem->barrier.image_layout,
|
||||
.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = out_img_mem->image,
|
||||
.subresourceRange = out_img_mem->barrier.subresource_range
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
VkDeviceSize offsets[] = { 0 };
|
||||
|
||||
vkCmdPipelineBarrier (cmd, in_img_mem->barrier.parent.pipeline_stages,
|
||||
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT, 0, 0, NULL, 0, NULL, 1,
|
||||
&in_image_memory_barrier);
|
||||
|
||||
in_img_mem->barrier.parent.pipeline_stages =
|
||||
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
||||
in_img_mem->barrier.parent.access_flags =
|
||||
in_image_memory_barrier.dstAccessMask;
|
||||
in_img_mem->barrier.image_layout = in_image_memory_barrier.newLayout;
|
||||
|
||||
vkCmdPipelineBarrier (cmd, out_img_mem->barrier.parent.pipeline_stages,
|
||||
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 0, 0, NULL, 0, NULL, 1,
|
||||
&out_image_memory_barrier);
|
||||
|
||||
out_img_mem->barrier.parent.pipeline_stages =
|
||||
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
||||
out_img_mem->barrier.parent.access_flags =
|
||||
out_image_memory_barrier.dstAccessMask;
|
||||
out_img_mem->barrier.image_layout = out_image_memory_barrier.newLayout;
|
||||
|
||||
vkCmdBeginRenderPass (cmd, &render_pass_info, VK_SUBPASS_CONTENTS_INLINE);
|
||||
vkCmdBindPipeline (cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
|
||||
vk_identity->graphics_pipeline);
|
||||
|
|
|
@ -489,100 +489,6 @@ gst_vulkan_swapper_get_supported_caps (GstVulkanSwapper * swapper,
|
|||
return caps;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
_swapper_set_image_layout_with_cmd (GstVulkanSwapper * swapper,
|
||||
VkCommandBuffer cmd, GstVulkanImageMemory * image,
|
||||
VkImageLayout new_image_layout, GError ** error)
|
||||
{
|
||||
VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
||||
VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
||||
VkImageMemoryBarrier image_memory_barrier;
|
||||
|
||||
gst_vulkan_image_memory_set_layout (image, new_image_layout,
|
||||
&image_memory_barrier);
|
||||
|
||||
vkCmdPipelineBarrier (cmd, src_stages, dest_stages, 0, 0, NULL, 0, NULL, 1,
|
||||
&image_memory_barrier);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
_swapper_set_image_layout (GstVulkanSwapper * swapper,
|
||||
GstVulkanImageMemory * image, VkImageLayout new_image_layout,
|
||||
GError ** error)
|
||||
{
|
||||
VkCommandBuffer cmd = VK_NULL_HANDLE;
|
||||
GstVulkanFence *fence = NULL;
|
||||
VkResult err;
|
||||
|
||||
if (!(cmd = gst_vulkan_command_pool_create (swapper->cmd_pool, error)))
|
||||
goto error;
|
||||
|
||||
fence = gst_vulkan_fence_new (swapper->device, 0, error);
|
||||
if (!fence)
|
||||
goto error;
|
||||
|
||||
{
|
||||
/* *INDENT-OFF* */
|
||||
VkCommandBufferBeginInfo cmd_buf_info = {
|
||||
.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
|
||||
.pNext = NULL,
|
||||
.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
|
||||
.pInheritanceInfo = NULL
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
err = vkBeginCommandBuffer (cmd, &cmd_buf_info);
|
||||
if (gst_vulkan_error_to_g_error (err, error, "vkBeginCommandBuffer") < 0)
|
||||
goto error;
|
||||
}
|
||||
|
||||
if (!_swapper_set_image_layout_with_cmd (swapper, cmd, image,
|
||||
new_image_layout, error))
|
||||
goto error;
|
||||
|
||||
err = vkEndCommandBuffer (cmd);
|
||||
if (gst_vulkan_error_to_g_error (err, error, "vkEndCommandBuffer") < 0)
|
||||
goto error;
|
||||
|
||||
{
|
||||
VkSubmitInfo submit_info = { 0, };
|
||||
VkPipelineStageFlags stages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
submit_info = (VkSubmitInfo) {
|
||||
.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
|
||||
.pNext = NULL,
|
||||
.waitSemaphoreCount = 0,
|
||||
.pWaitSemaphores = NULL,
|
||||
.pWaitDstStageMask = &stages,
|
||||
.commandBufferCount = 1,
|
||||
.pCommandBuffers = &cmd,
|
||||
.signalSemaphoreCount = 0,
|
||||
.pSignalSemaphores = NULL,
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
err =
|
||||
vkQueueSubmit (swapper->queue->queue, 1, &submit_info,
|
||||
GST_VULKAN_FENCE_FENCE (fence));
|
||||
if (gst_vulkan_error_to_g_error (err, error, "vkQueueSubmit") < 0)
|
||||
goto error;
|
||||
}
|
||||
|
||||
swapper->priv->trash_list = g_list_prepend (swapper->priv->trash_list,
|
||||
gst_vulkan_trash_new_free_command_buffer (fence, swapper->cmd_pool, cmd));
|
||||
fence = NULL;
|
||||
|
||||
return TRUE;
|
||||
|
||||
error:
|
||||
if (fence)
|
||||
gst_vulkan_fence_unref (fence);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
_allocate_swapchain (GstVulkanSwapper * swapper, GstCaps * caps,
|
||||
GError ** error)
|
||||
|
@ -754,11 +660,12 @@ _allocate_swapchain (GstVulkanSwapper * swapper, GstCaps * caps,
|
|||
format, swapchain_dims.width, swapchain_dims.height,
|
||||
VK_IMAGE_TILING_OPTIMAL, usage, NULL, NULL);
|
||||
|
||||
if (!_swapper_set_image_layout (swapper, swapper->swap_chain_images[i],
|
||||
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, error)) {
|
||||
g_free (swap_chain_images);
|
||||
return FALSE;
|
||||
}
|
||||
swapper->swap_chain_images[i]->barrier.parent.pipeline_stages =
|
||||
VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
||||
swapper->swap_chain_images[i]->barrier.parent.access_flags =
|
||||
VK_ACCESS_MEMORY_READ_BIT;
|
||||
swapper->swap_chain_images[i]->barrier.image_layout =
|
||||
VK_IMAGE_LAYOUT_UNDEFINED;
|
||||
}
|
||||
|
||||
g_free (swap_chain_images);
|
||||
|
@ -808,13 +715,13 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
|
|||
GstBuffer * buffer, VkCommandBuffer * cmd_ret, GError ** error)
|
||||
{
|
||||
GstMemory *in_mem;
|
||||
GstVulkanImageMemory *swap_mem;
|
||||
GstVulkanImageMemory *swap_img;
|
||||
VkCommandBuffer cmd;
|
||||
GstVideoRectangle src, dst, rslt;
|
||||
VkResult err;
|
||||
|
||||
g_return_val_if_fail (swap_idx < swapper->n_swap_chain_images, FALSE);
|
||||
swap_mem = swapper->swap_chain_images[swap_idx];
|
||||
swap_img = swapper->swap_chain_images[swap_idx];
|
||||
|
||||
if (!(cmd = gst_vulkan_command_pool_create (swapper->cmd_pool, error)))
|
||||
return FALSE;
|
||||
|
@ -834,9 +741,30 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
|
|||
return FALSE;
|
||||
}
|
||||
|
||||
if (!_swapper_set_image_layout_with_cmd (swapper, cmd, swap_mem,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, error)) {
|
||||
return FALSE;
|
||||
{
|
||||
/* *INDENT-OFF* */
|
||||
VkImageMemoryBarrier image_memory_barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = swap_img->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
|
||||
.oldLayout = swap_img->barrier.image_layout,
|
||||
.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = swap_img->image,
|
||||
.subresourceRange = swap_img->barrier.subresource_range
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
vkCmdPipelineBarrier (cmd, swap_img->barrier.parent.pipeline_stages,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, NULL, 0, NULL, 1,
|
||||
&image_memory_barrier);
|
||||
|
||||
swap_img->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
swap_img->barrier.parent.access_flags = image_memory_barrier.dstAccessMask;
|
||||
swap_img->barrier.image_layout = image_memory_barrier.newLayout;
|
||||
}
|
||||
|
||||
src.x = src.y = 0;
|
||||
|
@ -844,8 +772,8 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
|
|||
src.h = GST_VIDEO_INFO_HEIGHT (&swapper->v_info);
|
||||
|
||||
dst.x = dst.y = 0;
|
||||
dst.w = gst_vulkan_image_memory_get_width (swap_mem);
|
||||
dst.h = gst_vulkan_image_memory_get_height (swap_mem);
|
||||
dst.w = gst_vulkan_image_memory_get_width (swap_img);
|
||||
dst.h = gst_vulkan_image_memory_get_height (swap_img);
|
||||
|
||||
gst_video_sink_center_rect (src, dst, &rslt, FALSE);
|
||||
|
||||
|
@ -856,10 +784,8 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
|
|||
in_mem = gst_buffer_peek_memory (buffer, 0);
|
||||
if (gst_is_vulkan_buffer_memory (in_mem)) {
|
||||
GstVulkanBufferMemory *buf_mem = (GstVulkanBufferMemory *) in_mem;
|
||||
VkBufferImageCopy region = { 0, };
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
region = (VkBufferImageCopy) {
|
||||
VkBufferImageCopy region = {
|
||||
.bufferOffset = 0,
|
||||
.bufferRowLength = src.w,
|
||||
.bufferImageHeight = src.h,
|
||||
|
@ -876,17 +802,33 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
|
|||
.depth = 1,
|
||||
}
|
||||
};
|
||||
VkBufferMemoryBarrier buffer_memory_barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = buf_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.buffer = buf_mem->buffer,
|
||||
.offset = region.bufferOffset,
|
||||
.size = region.bufferRowLength * region.bufferImageHeight
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
vkCmdPipelineBarrier (cmd, buf_mem->barrier.parent.pipeline_stages,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, NULL, 1, &buffer_memory_barrier,
|
||||
0, NULL);
|
||||
|
||||
vkCmdCopyBufferToImage (cmd, buf_mem->buffer, swap_mem->image,
|
||||
swap_mem->image_layout, 1, ®ion);
|
||||
buf_mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
buf_mem->barrier.parent.access_flags = buffer_memory_barrier.dstAccessMask;
|
||||
|
||||
vkCmdCopyBufferToImage (cmd, buf_mem->buffer, swap_img->image,
|
||||
swap_img->barrier.image_layout, 1, ®ion);
|
||||
} else if (gst_is_vulkan_image_memory (in_mem)) {
|
||||
GstVulkanImageMemory *img_mem = (GstVulkanImageMemory *) in_mem;
|
||||
VkImageCopy region = { 0, };
|
||||
|
||||
/* FIXME: should really be a blit to resize to the output dimensions */
|
||||
/* *INDENT-OFF* */
|
||||
region = (VkImageCopy) {
|
||||
VkImageCopy region = {
|
||||
.srcSubresource = {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.mipLevel = 0,
|
||||
|
@ -903,20 +845,57 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
|
|||
.dstOffset = { rslt.x, rslt.y, 0 },
|
||||
.extent = { rslt.w, rslt.h, 1 }
|
||||
};
|
||||
VkImageMemoryBarrier image_memory_barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = img_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
|
||||
.oldLayout = img_mem->barrier.image_layout,
|
||||
.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = img_mem->image,
|
||||
.subresourceRange = img_mem->barrier.subresource_range
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
if (!_swapper_set_image_layout_with_cmd (swapper, cmd, img_mem,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, error)) {
|
||||
return FALSE;
|
||||
}
|
||||
vkCmdPipelineBarrier (cmd, img_mem->barrier.parent.pipeline_stages,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, NULL, 0, NULL, 1,
|
||||
&image_memory_barrier);
|
||||
|
||||
vkCmdCopyImage (cmd, img_mem->image, img_mem->image_layout, swap_mem->image,
|
||||
swap_mem->image_layout, 1, ®ion);
|
||||
img_mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
img_mem->barrier.parent.access_flags = image_memory_barrier.dstAccessMask;
|
||||
img_mem->barrier.image_layout = image_memory_barrier.newLayout;
|
||||
|
||||
vkCmdCopyImage (cmd, img_mem->image, img_mem->barrier.image_layout,
|
||||
swap_img->image, swap_img->barrier.image_layout, 1, ®ion);
|
||||
}
|
||||
|
||||
if (!_swapper_set_image_layout_with_cmd (swapper, cmd, swap_mem,
|
||||
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, error)) {
|
||||
return FALSE;
|
||||
{
|
||||
/* *INDENT-OFF* */
|
||||
VkImageMemoryBarrier image_memory_barrier = {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = swap_img->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT,
|
||||
.oldLayout = swap_img->barrier.image_layout,
|
||||
.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = swap_img->image,
|
||||
.subresourceRange = swap_img->barrier.subresource_range
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
vkCmdPipelineBarrier (cmd, swap_img->barrier.parent.pipeline_stages,
|
||||
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, NULL, 0, NULL, 1,
|
||||
&image_memory_barrier);
|
||||
|
||||
swap_img->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
swap_img->barrier.parent.access_flags = image_memory_barrier.dstAccessMask;
|
||||
swap_img->barrier.image_layout = image_memory_barrier.newLayout;
|
||||
}
|
||||
|
||||
err = vkEndCommandBuffer (cmd);
|
||||
|
|
|
@ -496,6 +496,7 @@ _buffer_to_image_perform (gpointer impl, GstBuffer * inbuf, GstBuffer ** outbuf)
|
|||
GstVulkanBufferMemory *buf_mem;
|
||||
GstVulkanImageMemory *img_mem;
|
||||
VkImageMemoryBarrier image_memory_barrier;
|
||||
VkBufferMemoryBarrier buffer_memory_barrier;
|
||||
|
||||
in_mem = gst_buffer_peek_memory (inbuf, i);
|
||||
if (!gst_is_vulkan_buffer_memory (in_mem)) {
|
||||
|
@ -529,17 +530,49 @@ _buffer_to_image_perform (gpointer impl, GstBuffer * inbuf, GstBuffer ** outbuf)
|
|||
.depth = 1,
|
||||
}
|
||||
};
|
||||
|
||||
image_memory_barrier = (VkImageMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = img_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
|
||||
.oldLayout = img_mem->barrier.image_layout,
|
||||
.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = img_mem->image,
|
||||
.subresourceRange = img_mem->barrier.subresource_range
|
||||
};
|
||||
|
||||
buffer_memory_barrier = (VkBufferMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = buf_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.buffer = buf_mem->buffer,
|
||||
.offset = region.bufferOffset,
|
||||
.size = region.bufferRowLength * region.bufferImageHeight
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
gst_vulkan_image_memory_set_layout (img_mem,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &image_memory_barrier);
|
||||
vkCmdPipelineBarrier (cmd,
|
||||
buf_mem->barrier.parent.pipeline_stages | img_mem->barrier.
|
||||
parent.pipeline_stages, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, NULL, 1,
|
||||
&buffer_memory_barrier, 1, &image_memory_barrier);
|
||||
|
||||
vkCmdPipelineBarrier (cmd, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
|
||||
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, NULL, 0, NULL, 1,
|
||||
&image_memory_barrier);
|
||||
buf_mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
buf_mem->barrier.parent.access_flags = buffer_memory_barrier.dstAccessMask;
|
||||
|
||||
img_mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
img_mem->barrier.parent.access_flags = image_memory_barrier.dstAccessMask;
|
||||
img_mem->barrier.image_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
|
||||
|
||||
vkCmdCopyBufferToImage (cmd, buf_mem->buffer, img_mem->image,
|
||||
img_mem->image_layout, 1, ®ion);
|
||||
img_mem->barrier.image_layout, 1, ®ion);
|
||||
}
|
||||
|
||||
err = vkEndCommandBuffer (cmd);
|
||||
|
@ -768,6 +801,7 @@ _raw_to_image_perform (gpointer impl, GstBuffer * inbuf, GstBuffer ** outbuf)
|
|||
GstVulkanBufferMemory *buf_mem;
|
||||
GstVulkanImageMemory *img_mem;
|
||||
VkImageMemoryBarrier image_memory_barrier;
|
||||
VkBufferMemoryBarrier buffer_memory_barrier;
|
||||
|
||||
in_mem = gst_buffer_peek_memory (inbuf, i);
|
||||
if (!gst_is_vulkan_buffer_memory (in_mem)) {
|
||||
|
@ -801,17 +835,49 @@ _raw_to_image_perform (gpointer impl, GstBuffer * inbuf, GstBuffer ** outbuf)
|
|||
.depth = 1,
|
||||
}
|
||||
};
|
||||
|
||||
buffer_memory_barrier = (VkBufferMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = buf_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.buffer = buf_mem->buffer,
|
||||
.offset = region.bufferOffset,
|
||||
.size = region.bufferRowLength * region.bufferImageHeight
|
||||
};
|
||||
|
||||
image_memory_barrier = (VkImageMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.srcAccessMask = img_mem->barrier.parent.access_flags,
|
||||
.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT,
|
||||
.oldLayout = img_mem->barrier.image_layout,
|
||||
.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
|
||||
/* FIXME: implement exclusive transfers */
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = img_mem->image,
|
||||
.subresourceRange = img_mem->barrier.subresource_range
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
gst_vulkan_image_memory_set_layout (img_mem,
|
||||
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, &image_memory_barrier);
|
||||
vkCmdPipelineBarrier (cmd,
|
||||
buf_mem->barrier.parent.pipeline_stages | img_mem->barrier.
|
||||
parent.pipeline_stages, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, NULL, 1,
|
||||
&buffer_memory_barrier, 1, &image_memory_barrier);
|
||||
|
||||
vkCmdPipelineBarrier (cmd, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT,
|
||||
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0, NULL, 0, NULL, 1,
|
||||
&image_memory_barrier);
|
||||
buf_mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
buf_mem->barrier.parent.access_flags = buffer_memory_barrier.dstAccessMask;
|
||||
|
||||
img_mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
||||
img_mem->barrier.parent.access_flags = image_memory_barrier.dstAccessMask;
|
||||
img_mem->barrier.image_layout = image_memory_barrier.newLayout;
|
||||
|
||||
vkCmdCopyBufferToImage (cmd, buf_mem->buffer, img_mem->image,
|
||||
img_mem->image_layout, 1, ®ion);
|
||||
img_mem->barrier.image_layout, 1, ®ion);
|
||||
}
|
||||
|
||||
err = vkEndCommandBuffer (cmd);
|
||||
|
@ -820,7 +886,6 @@ _raw_to_image_perform (gpointer impl, GstBuffer * inbuf, GstBuffer ** outbuf)
|
|||
|
||||
{
|
||||
VkSubmitInfo submit_info = { 0, };
|
||||
VkPipelineStageFlags stages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
||||
GstVulkanFence *fence;
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
|
@ -829,7 +894,7 @@ _raw_to_image_perform (gpointer impl, GstBuffer * inbuf, GstBuffer ** outbuf)
|
|||
.pNext = NULL,
|
||||
.waitSemaphoreCount = 0,
|
||||
.pWaitSemaphores = NULL,
|
||||
.pWaitDstStageMask = &stages,
|
||||
.pWaitDstStageMask = NULL,
|
||||
.commandBufferCount = 1,
|
||||
.pCommandBuffers = &cmd,
|
||||
.signalSemaphoreCount = 0,
|
||||
|
|
53
gst-libs/gst/vulkan/gstvkbarrier.h
Normal file
53
gst-libs/gst/vulkan/gstvkbarrier.h
Normal file
|
@ -0,0 +1,53 @@
|
|||
/*
|
||||
* GStreamer
|
||||
* Copyright (C) 2019 Matthew Waters <matthew@centricular.com>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Library General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Library General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Library General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
||||
* Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_VULKAN_BARRIER_H__
|
||||
#define __GST_VULKAN_BARRIER_H__
|
||||
|
||||
#include <gst/vulkan/vulkan_fwd.h>
|
||||
#include <gst/vulkan/gstvkapi.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
typedef enum
|
||||
{
|
||||
GST_VULKAN_BARRIER_NONE = 0,
|
||||
GST_VULKAN_BARRIER_TYPE_MEMORY = 1,
|
||||
GST_VULKAN_BARRIER_TYPE_BUFFER = 2,
|
||||
GST_VULKAN_BARRIER_TYPE_IMAGE = 3,
|
||||
} GstVulkanBarrierType;
|
||||
|
||||
typedef enum
|
||||
{
|
||||
GST_VULKAN_BARRIER_FLAG_NONE = 0,
|
||||
} GstVulkanBarrierFlags;
|
||||
|
||||
struct _GstVulkanBarrierMemoryInfo
|
||||
{
|
||||
GstVulkanBarrierType type;
|
||||
GstVulkanBarrierFlags flags;
|
||||
GstVulkanQueue * queue;
|
||||
VkPipelineStageFlags pipeline_stages;
|
||||
VkAccessFlags access_flags;
|
||||
};
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_VULKAN_BARRIER_H__ */
|
|
@ -102,10 +102,15 @@ _vk_buffer_mem_init (GstVulkanBufferMemory * mem, GstAllocator * allocator,
|
|||
align, offset, size);
|
||||
|
||||
mem->device = gst_object_ref (device);
|
||||
mem->usage = usage;
|
||||
mem->wrapped = FALSE;
|
||||
mem->notify = notify;
|
||||
mem->user_data = user_data;
|
||||
|
||||
mem->barrier.parent.type = GST_VULKAN_BARRIER_TYPE_BUFFER;
|
||||
mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
||||
mem->barrier.parent.access_flags = 0;
|
||||
|
||||
g_mutex_init (&mem->lock);
|
||||
|
||||
GST_CAT_DEBUG (GST_CAT_VULKAN_BUFFER_MEMORY,
|
||||
|
@ -141,7 +146,7 @@ _vk_buffer_mem_new_alloc (GstAllocator * allocator, GstMemory * parent,
|
|||
if ((mem->requirements.alignment & (mem->requirements.alignment - 1)) != 0) {
|
||||
g_set_error_literal (&error, GST_VULKAN_ERROR, GST_VULKAN_FAILED,
|
||||
"Vulkan implementation requires unsupported non-power-of 2 memory alignment");
|
||||
goto error;
|
||||
goto vk_error;
|
||||
}
|
||||
|
||||
params.align = mem->requirements.alignment - 1;
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include <gst/gstallocator.h>
|
||||
#include <gst/gstmemory.h>
|
||||
|
||||
#include <gst/vulkan/gstvkbarrier.h>
|
||||
#include <gst/vulkan/vulkan.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
@ -43,6 +44,15 @@ GType gst_vulkan_buffer_memory_allocator_get_type(void);
|
|||
#define GST_VULKAN_BUFFER_MEMORY_ALLOCATOR_NAME "VulkanBuffer"
|
||||
#define GST_CAPS_FEATURE_MEMORY_VULKAN_BUFFER "memory:VulkanBuffer"
|
||||
|
||||
|
||||
struct _GstVulkanBarrierBufferInfo
|
||||
{
|
||||
GstVulkanBarrierMemoryInfo parent;
|
||||
|
||||
VkDeviceSize offset;
|
||||
VkDeviceSize size;
|
||||
};
|
||||
|
||||
struct _GstVulkanBufferMemory
|
||||
{
|
||||
GstMemory parent;
|
||||
|
@ -56,6 +66,8 @@ struct _GstVulkanBufferMemory
|
|||
VkMemoryRequirements requirements;
|
||||
VkBufferUsageFlags usage;
|
||||
|
||||
GstVulkanBarrierBufferInfo barrier;
|
||||
|
||||
GMutex lock;
|
||||
gboolean wrapped;
|
||||
GDestroyNotify notify;
|
||||
|
|
|
@ -190,7 +190,19 @@ _vk_image_mem_init (GstVulkanImageMemory * mem, GstAllocator * allocator,
|
|||
align, offset, size);
|
||||
|
||||
mem->device = gst_object_ref (device);
|
||||
mem->image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
|
||||
mem->barrier.parent.type = GST_VULKAN_BARRIER_TYPE_IMAGE;
|
||||
mem->barrier.parent.pipeline_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
||||
mem->barrier.parent.access_flags = 0;
|
||||
mem->barrier.image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
|
||||
/* *INDENT-OFF* */
|
||||
mem->barrier.subresource_range = (VkImageSubresourceRange) {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = 1,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
mem->usage = usage;
|
||||
mem->wrapped = FALSE;
|
||||
mem->notify = notify;
|
||||
|
@ -443,70 +455,6 @@ _vk_image_mem_free (GstAllocator * allocator, GstMemory * memory)
|
|||
g_free (mem);
|
||||
}
|
||||
|
||||
static VkAccessFlags
|
||||
_access_flags_from_layout (VkImageLayout image_layout)
|
||||
{
|
||||
if (image_layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
|
||||
return VK_ACCESS_TRANSFER_WRITE_BIT;
|
||||
|
||||
if (image_layout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
|
||||
return VK_ACCESS_TRANSFER_READ_BIT;
|
||||
|
||||
if (image_layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
|
||||
return VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
||||
|
||||
if (image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
|
||||
return VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
|
||||
|
||||
if (image_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
|
||||
return VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* gst_vulkan_image_memory_set_layout:
|
||||
* @vk_mem: a #GstVulkanImageMemory
|
||||
* @image_layout: the new iamge layout
|
||||
* @barrier: (inout): the barrier to fill
|
||||
*
|
||||
* Returns: wether the layout could be performed
|
||||
*
|
||||
* Since: 1.18
|
||||
*/
|
||||
gboolean
|
||||
gst_vulkan_image_memory_set_layout (GstVulkanImageMemory * vk_mem,
|
||||
VkImageLayout image_layout, VkImageMemoryBarrier * barrier)
|
||||
{
|
||||
/* validate vk_mem->usage with image_layout */
|
||||
|
||||
/* *INDENT-OFF* */
|
||||
*barrier = (VkImageMemoryBarrier) {
|
||||
.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
|
||||
.pNext = NULL,
|
||||
.dstAccessMask = _access_flags_from_layout (image_layout),
|
||||
.srcAccessMask = _access_flags_from_layout (vk_mem->image_layout),
|
||||
.oldLayout = vk_mem->image_layout,
|
||||
.newLayout = image_layout,
|
||||
.srcQueueFamilyIndex = 0,
|
||||
.dstQueueFamilyIndex = 0,
|
||||
.image = vk_mem->image,
|
||||
.subresourceRange = (VkImageSubresourceRange) {
|
||||
.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = 1,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1
|
||||
}
|
||||
};
|
||||
/* *INDENT-ON* */
|
||||
|
||||
/* FIXME: what if the barrier is never submitted or is submitted out of order? */
|
||||
vk_mem->image_layout = image_layout;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* gst_vulkan_image_memory_alloc:
|
||||
* @device: a #GstVulkanDevice
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#ifndef __GST_VULKAN_IMAGE_MEMORY_H__
|
||||
#define __GST_VULKAN_IMAGE_MEMORY_H__
|
||||
|
||||
#include <gst/vulkan/gstvkbarrier.h>
|
||||
#include <gst/vulkan/gstvkdevice.h>
|
||||
|
||||
#include <gst/video/video.h>
|
||||
|
@ -41,9 +42,14 @@ GType gst_vulkan_image_memory_allocator_get_type(void);
|
|||
#define GST_VULKAN_IMAGE_MEMORY_ALLOCATOR_NAME "VulkanImage"
|
||||
#define GST_CAPS_FEATURE_MEMORY_VULKAN_IMAGE "memory:VulkanImage"
|
||||
|
||||
typedef struct _GstVulkanImageMemory GstVulkanImageMemory;
|
||||
typedef struct _GstVulkanImageMemoryAllocator GstVulkanImageMemoryAllocator;
|
||||
typedef struct _GstVulkanImageMemoryAllocatorClass GstVulkanImageMemoryAllocatorClass;
|
||||
struct _GstVulkanBarrierImageInfo
|
||||
{
|
||||
GstVulkanBarrierMemoryInfo parent;
|
||||
|
||||
VkImageLayout image_layout;
|
||||
/* FIXME: multiple layers or mipmap levels may require multiple barriers */
|
||||
VkImageSubresourceRange subresource_range;
|
||||
};
|
||||
|
||||
struct _GstVulkanImageMemory
|
||||
{
|
||||
|
@ -52,7 +58,6 @@ struct _GstVulkanImageMemory
|
|||
GstVulkanDevice * device;
|
||||
|
||||
VkImage image;
|
||||
VkImageLayout image_layout;
|
||||
VkImageView view;
|
||||
GstVulkanMemory *vk_mem;
|
||||
|
||||
|
@ -61,6 +66,8 @@ struct _GstVulkanImageMemory
|
|||
VkImageFormatProperties format_properties;
|
||||
VkImageUsageFlags usage;
|
||||
|
||||
GstVulkanBarrierImageInfo barrier;
|
||||
|
||||
GMutex lock;
|
||||
gboolean wrapped;
|
||||
GDestroyNotify notify;
|
||||
|
@ -112,11 +119,6 @@ GstMemory * gst_vulkan_image_memory_wrapped (GstVulkanDevice * devic
|
|||
gpointer user_data,
|
||||
GDestroyNotify notify);
|
||||
|
||||
GST_VULKAN_API
|
||||
gboolean gst_vulkan_image_memory_set_layout (GstVulkanImageMemory * vk_mem,
|
||||
VkImageLayout image_layout,
|
||||
VkImageMemoryBarrier * barrier);
|
||||
|
||||
GST_VULKAN_API
|
||||
guint32 gst_vulkan_image_memory_get_width (GstVulkanImageMemory * image);
|
||||
GST_VULKAN_API
|
||||
|
|
|
@ -21,6 +21,7 @@ vulkan_sources = [
|
|||
]
|
||||
|
||||
vulkan_headers = [
|
||||
'gstvkbarrier.h',
|
||||
'gstvkbuffermemory.h',
|
||||
'gstvkbufferpool.h',
|
||||
'gstvkcommandpool.h',
|
||||
|
|
|
@ -33,6 +33,7 @@
|
|||
#include <gst/vulkan/gstvkdisplay.h>
|
||||
#include <gst/vulkan/gstvkwindow.h>
|
||||
#include <gst/vulkan/gstvkmemory.h>
|
||||
#include <gst/vulkan/gstvkbarrier.h>
|
||||
#include <gst/vulkan/gstvkbuffermemory.h>
|
||||
#include <gst/vulkan/gstvkimagememory.h>
|
||||
#include <gst/vulkan/gstvkbufferpool.h>
|
||||
|
|
|
@ -62,10 +62,18 @@ typedef struct _GstVulkanBufferPool GstVulkanBufferPool;
|
|||
typedef struct _GstVulkanBufferPoolClass GstVulkanBufferPoolClass;
|
||||
typedef struct _GstVulkanBufferPoolPrivate GstVulkanBufferPoolPrivate;
|
||||
|
||||
typedef struct _GstVulkanImageMemory GstVulkanImageMemory;
|
||||
typedef struct _GstVulkanImageMemoryAllocator GstVulkanImageMemoryAllocator;
|
||||
typedef struct _GstVulkanImageMemoryAllocatorClass GstVulkanImageMemoryAllocatorClass;
|
||||
|
||||
typedef struct _GstVulkanImageBufferPool GstVulkanImageBufferPool;
|
||||
typedef struct _GstVulkanImageBufferPoolClass GstVulkanImageBufferPoolClass;
|
||||
typedef struct _GstVulkanImageBufferPoolPrivate GstVulkanImageBufferPoolPrivate;
|
||||
|
||||
typedef struct _GstVulkanBarrierMemoryInfo GstVulkanBarrierMemoryInfo;
|
||||
typedef struct _GstVulkanBarrierBufferInfo GstVulkanBarrierBufferInfo;
|
||||
typedef struct _GstVulkanBarrierImageInfo GstVulkanBarrierImageInfo;
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_VULKAN_FWD_H__ */
|
||||
|
|
Loading…
Reference in a new issue