vulkan: separate allocation and binding of memory

This commit is contained in:
Matthew Waters 2016-02-08 12:22:11 +11:00
parent 914c1a5193
commit 300f4e03b2
8 changed files with 283 additions and 124 deletions

View file

@ -38,28 +38,6 @@ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFUALT);
static GstAllocator *_vulkan_buffer_memory_allocator;
static gboolean
_find_memory_type_index_with_type_properties (GstVulkanDevice * device,
guint32 typeBits, VkFlags properties, guint32 * typeIndex)
{
guint32 i;
/* Search memtypes to find first index with those properties */
for (i = 0; i < 32; i++) {
if ((typeBits & 1) == 1) {
/* Type is available, does it match user properties? */
if ((device->memory_properties.memoryTypes[i].
propertyFlags & properties) == properties) {
*typeIndex = i;
return TRUE;
}
}
typeBits >>= 1;
}
return FALSE;
}
#define GST_VK_BUFFER_CREATE_INFO_INIT GST_VK_STRUCT_8
#define GST_VK_BUFFER_CREATE_INFO(info, pNext, flags, size, usage, sharingMode, queueFamilyIndexCount, pQueueFamilyIndices ) \
G_STMT_START { \
@ -100,8 +78,9 @@ _create_view_from_args (VkBufferViewCreateInfo * info, VkBuffer buffer,
static void
_vk_buffer_mem_init (GstVulkanBufferMemory * mem, GstAllocator * allocator,
GstMemory * parent, GstVulkanDevice * device, GstAllocationParams * params,
gsize size, gpointer user_data, GDestroyNotify notify)
GstMemory * parent, GstVulkanDevice * device, VkBufferUsageFlags usage,
GstAllocationParams * params, gsize size, gpointer user_data,
GDestroyNotify notify)
{
gsize align = gst_memory_alignment, offset = 0, maxsize = size;
GstMemoryFlags flags = 0;
@ -136,7 +115,6 @@ _vk_buffer_mem_new_alloc (GstAllocator * allocator, GstMemory * parent,
GstVulkanBufferMemory *mem = NULL;
GstAllocationParams params = { 0, };
VkBufferCreateInfo buffer_info;
guint32 memory_type_index;
GError *error = NULL;
VkBuffer buffer;
VkResult err;
@ -154,33 +132,10 @@ _vk_buffer_mem_new_alloc (GstAllocator * allocator, GstMemory * parent,
vkGetBufferMemoryRequirements (device->device, buffer, &mem->requirements);
params.align = mem->requirements.alignment;
_vk_buffer_mem_init (mem, allocator, parent, device, &params,
_vk_buffer_mem_init (mem, allocator, parent, device, usage, &params,
mem->requirements.size, user_data, notify);
mem->buffer = buffer;
if (!_find_memory_type_index_with_type_properties (device,
mem->requirements.memoryTypeBits, mem_prop_flags,
&memory_type_index)) {
GST_CAT_ERROR (GST_CAT_VULKAN_BUFFER_MEMORY,
"Could not find suitable memory type");
goto error;
}
mem->vk_mem = (GstVulkanMemory *)
gst_vulkan_memory_alloc (device, memory_type_index, &params,
mem->requirements.size, mem_prop_flags);
if (!mem->vk_mem) {
GST_CAT_ERROR (GST_CAT_VULKAN_BUFFER_MEMORY,
"Failed to allocate device memory");
goto error;
}
err =
vkBindBufferMemory (device->device, mem->buffer, mem->vk_mem->mem_ptr,
0 /* offset */ );
if (gst_vulkan_error_to_g_error (err, &error, "vkBindBufferMemory") < 0)
goto vk_error;
if (usage & (VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT |
VK_BUFFER_USAGE_STORAGE_TEXEL_BUFFER_BIT |
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT |
@ -232,7 +187,7 @@ _vk_buffer_mem_new_wrapped (GstAllocator * allocator, GstMemory * parent,
/* no device memory so no mapping */
params.flags = GST_MEMORY_FLAG_NOT_MAPPABLE | GST_MEMORY_FLAG_READONLY;
_vk_buffer_mem_init (mem, allocator, parent, device, &params,
_vk_buffer_mem_init (mem, allocator, parent, device, usage, &params,
mem->requirements.size, user_data, notify);
mem->wrapped = TRUE;
@ -276,16 +231,21 @@ _vk_buffer_mem_map_full (GstVulkanBufferMemory * mem, GstMapInfo * info,
GstMapInfo *vk_map_info;
/* FIXME: possible barrier needed */
g_mutex_lock (&mem->lock);
if (!mem->vk_mem)
if (!mem->vk_mem) {
g_mutex_unlock (&mem->lock);
return NULL;
}
vk_map_info = g_new0 (GstMapInfo, 1);
info->user_data[0] = vk_map_info;
if (!gst_memory_map ((GstMemory *) mem->vk_mem, vk_map_info, info->flags)) {
g_free (vk_map_info);
g_mutex_unlock (&mem->lock);
return NULL;
}
g_mutex_unlock (&mem->lock);
return vk_map_info->data;
}
@ -293,7 +253,9 @@ _vk_buffer_mem_map_full (GstVulkanBufferMemory * mem, GstMapInfo * info,
static void
_vk_buffer_mem_unmap_full (GstVulkanBufferMemory * mem, GstMapInfo * info)
{
g_mutex_lock (&mem->lock);
gst_memory_unmap ((GstMemory *) mem->vk_mem, info->user_data[0]);
g_mutex_unlock (&mem->lock);
g_free (info->user_data[0]);
}
@ -372,6 +334,46 @@ gst_vulkan_buffer_memory_alloc (GstVulkanDevice * device, VkFormat format,
return (GstMemory *) mem;
}
GstMemory *
gst_vulkan_buffer_memory_alloc_bind (GstVulkanDevice * device, VkFormat format,
gsize size, VkBufferUsageFlags usage, VkMemoryPropertyFlags mem_prop_flags)
{
GstAllocationParams params = { 0, };
GstVulkanBufferMemory *mem;
GstVulkanMemory *dev_mem;
guint32 type_idx;
mem =
(GstVulkanBufferMemory *) gst_vulkan_buffer_memory_alloc (device, format,
size, usage, mem_prop_flags);
if (!mem)
return NULL;
if (!gst_vulkan_memory_find_memory_type_index_with_type_properties (device,
mem->requirements.memoryTypeBits, mem_prop_flags, &type_idx)) {
gst_memory_unref (GST_MEMORY_CAST (mem));
return NULL;
}
/* XXX: assumes alignment is a power of 2 */
params.align = mem->requirements.alignment - 1;
dev_mem = (GstVulkanMemory *) gst_vulkan_memory_alloc (device, type_idx,
&params, mem->requirements.size, mem_prop_flags);
if (!dev_mem) {
gst_memory_unref (GST_MEMORY_CAST (mem));
return NULL;
}
if (!gst_vulkan_buffer_memory_bind (mem, dev_mem)) {
gst_memory_unref (GST_MEMORY_CAST (dev_mem));
gst_memory_unref (GST_MEMORY_CAST (mem));
return NULL;
}
gst_memory_unref (GST_MEMORY_CAST (dev_mem));
return (GstMemory *) mem;
}
GstMemory *
gst_vulkan_buffer_memory_wrapped (GstVulkanDevice * device, VkBuffer buffer,
VkFormat format, VkBufferUsageFlags usage, gpointer user_data,
@ -386,6 +388,42 @@ gst_vulkan_buffer_memory_wrapped (GstVulkanDevice * device, VkBuffer buffer,
return (GstMemory *) mem;
}
gboolean
gst_vulkan_buffer_memory_bind (GstVulkanBufferMemory * buf_mem,
GstVulkanMemory * memory)
{
gsize maxsize;
g_return_val_if_fail (gst_is_vulkan_buffer_memory (GST_MEMORY_CAST (buf_mem)),
FALSE);
g_return_val_if_fail (gst_is_vulkan_memory (GST_MEMORY_CAST (memory)), FALSE);
/* will we overrun the allocated data */
gst_memory_get_sizes (GST_MEMORY_CAST (memory), NULL, &maxsize);
g_return_val_if_fail (memory->vk_offset + buf_mem->requirements.size <=
maxsize, FALSE);
g_mutex_lock (&buf_mem->lock);
if (buf_mem->vk_mem) {
guint vk_mem_map_count = buf_mem->vk_mem->map_count;
if (vk_mem_map_count > 0) {
g_mutex_unlock (&buf_mem->lock);
g_return_val_if_fail (vk_mem_map_count > 0, FALSE);
}
gst_memory_unref (GST_MEMORY_CAST (buf_mem->vk_mem));
}
vkBindBufferMemory (buf_mem->device->device, buf_mem->buffer, memory->mem_ptr,
memory->vk_offset);
buf_mem->vk_mem =
(GstVulkanMemory *) gst_memory_ref (GST_MEMORY_CAST (memory));
g_mutex_unlock (&buf_mem->lock);
return TRUE;
}
G_DEFINE_TYPE (GstVulkanBufferMemoryAllocator,
gst_vulkan_buffer_memory_allocator, GST_TYPE_ALLOCATOR);

View file

@ -52,6 +52,7 @@ struct _GstVulkanBufferMemory
GstVulkanMemory *vk_mem;
VkMemoryRequirements requirements;
VkBufferUsageFlags usage;
GMutex lock;
gboolean wrapped;
@ -88,6 +89,12 @@ GstMemory * gst_vulkan_buffer_memory_alloc (GstVulkanDevice * devi
VkBufferUsageFlags usage,
VkMemoryPropertyFlags mem_prop_flags);
GstMemory * gst_vulkan_buffer_memory_alloc_bind (GstVulkanDevice * device,
VkFormat format,
gsize size,
VkBufferUsageFlags usage,
VkMemoryPropertyFlags mem_prop_flags);
GstMemory * gst_vulkan_buffer_memory_wrapped (GstVulkanDevice * device,
VkBuffer buffer,
VkFormat format,
@ -95,6 +102,9 @@ GstMemory * gst_vulkan_buffer_memory_wrapped (GstVulkanDevice * devi
gpointer user_data,
GDestroyNotify notify);
gboolean gst_vulkan_buffer_memory_bind (GstVulkanBufferMemory * buf_mem,
GstVulkanMemory * memory);
G_END_DECLS
#endif /* _VK_BUFFER_MEMORY_H_ */

View file

@ -117,28 +117,6 @@ _view_create_info (VkImage image, VkFormat format, VkImageViewCreateInfo * info)
VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1);
}
static gboolean
_find_memory_type_index_with_type_properties (GstVulkanDevice * device,
guint32 typeBits, VkFlags properties, guint32 * typeIndex)
{
guint32 i;
/* Search memtypes to find first index with those properties */
for (i = 0; i < 32; i++) {
if ((typeBits & 1) == 1) {
/* Type is available, does it match user properties? */
if ((device->memory_properties.
memoryTypes[i].propertyFlags & properties) == properties) {
*typeIndex = i;
return TRUE;
}
}
typeBits >>= 1;
}
return FALSE;
}
static gboolean
_create_info_from_args (VkImageCreateInfo * info, VkFormat format, gsize width,
gsize height, VkImageTiling tiling, VkImageUsageFlags usage)
@ -166,8 +144,9 @@ _create_info_from_args (VkImageCreateInfo * info, VkFormat format, gsize width,
static void
_vk_image_mem_init (GstVulkanImageMemory * mem, GstAllocator * allocator,
GstMemory * parent, GstVulkanDevice * device, GstAllocationParams * params,
gsize size, gpointer user_data, GDestroyNotify notify)
GstMemory * parent, GstVulkanDevice * device, VkImageUsageFlags usage,
GstAllocationParams * params, gsize size, gpointer user_data,
GDestroyNotify notify)
{
gsize align = gst_memory_alignment, offset = 0, maxsize = size;
GstMemoryFlags flags = 0;
@ -184,6 +163,7 @@ _vk_image_mem_init (GstVulkanImageMemory * mem, GstAllocator * allocator,
mem->device = gst_object_ref (device);
mem->image_layout = VK_IMAGE_LAYOUT_UNDEFINED;
mem->usage = usage;
mem->wrapped = FALSE;
mem->notify = notify;
mem->user_data = user_data;
@ -205,7 +185,6 @@ _vk_image_mem_new_alloc (GstAllocator * allocator, GstMemory * parent,
GstAllocationParams params = { 0, };
VkImageViewCreateInfo view_info;
VkImageCreateInfo image_info;
guint32 memory_type_index;
VkPhysicalDevice gpu;
GError *error = NULL;
VkImage image;
@ -226,7 +205,7 @@ _vk_image_mem_new_alloc (GstAllocator * allocator, GstMemory * parent,
vkGetImageMemoryRequirements (device->device, image, &mem->requirements);
params.align = mem->requirements.alignment;
_vk_image_mem_init (mem, allocator, parent, device, &params,
_vk_image_mem_init (mem, allocator, parent, device, usage, &params,
mem->requirements.size, user_data, notify);
mem->create_info = image_info;
mem->image = image;
@ -234,29 +213,6 @@ _vk_image_mem_new_alloc (GstAllocator * allocator, GstMemory * parent,
vkGetPhysicalDeviceImageFormatProperties (gpu, format, VK_IMAGE_TYPE_2D,
tiling, usage, 0, &mem->format_properties);
if (!_find_memory_type_index_with_type_properties (device,
mem->requirements.memoryTypeBits, mem_prop_flags,
&memory_type_index)) {
GST_CAT_ERROR (GST_CAT_VULKAN_IMAGE_MEMORY,
"Could not find suitable memory type");
goto error;
}
mem->vk_mem = (GstVulkanMemory *)
gst_vulkan_memory_alloc (device, memory_type_index, &params,
mem->requirements.size, mem_prop_flags);
if (!mem->vk_mem) {
GST_CAT_ERROR (GST_CAT_VULKAN_IMAGE_MEMORY,
"Failed to allocate device memory");
goto error;
}
err =
vkBindImageMemory (device->device, mem->image, mem->vk_mem->mem_ptr,
0 /* offset */ );
if (gst_vulkan_error_to_g_error (err, &error, "vkBindImageMemory") < 0)
goto vk_error;
if (usage & (VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_STORAGE_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)) {
_view_create_info (mem->image, format, &view_info);
@ -302,7 +258,7 @@ _vk_image_mem_new_wrapped (GstAllocator * allocator, GstMemory * parent,
vkGetImageMemoryRequirements (device->device, mem->image, &mem->requirements);
params.flags = GST_MEMORY_FLAG_NOT_MAPPABLE | GST_MEMORY_FLAG_READONLY;
_vk_image_mem_init (mem, allocator, parent, device, &params,
_vk_image_mem_init (mem, allocator, parent, device, usage, &params,
mem->requirements.size, user_data, notify);
mem->wrapped = TRUE;
@ -350,16 +306,21 @@ _vk_image_mem_map_full (GstVulkanImageMemory * mem, GstMapInfo * info,
GstMapInfo *vk_map_info;
/* FIXME: possible layout transformation needed */
g_mutex_lock (&mem->lock);
if (!mem->vk_mem)
if (!mem->vk_mem) {
g_mutex_unlock (&mem->lock);
return NULL;
}
vk_map_info = g_new0 (GstMapInfo, 1);
info->user_data[0] = vk_map_info;
if (!gst_memory_map ((GstMemory *) mem->vk_mem, vk_map_info, info->flags)) {
g_free (vk_map_info);
g_mutex_unlock (&mem->lock);
return NULL;
}
g_mutex_unlock (&mem->lock);
return vk_map_info->data;
}
@ -367,7 +328,9 @@ _vk_image_mem_map_full (GstVulkanImageMemory * mem, GstMapInfo * info,
static void
_vk_image_mem_unmap_full (GstVulkanImageMemory * mem, GstMapInfo * info)
{
g_mutex_lock (&mem->lock);
gst_memory_unmap ((GstMemory *) mem->vk_mem, info->user_data[0]);
g_mutex_unlock (&mem->lock);
g_free (info->user_data[0]);
}
@ -468,10 +431,6 @@ gst_vulkan_image_memory_set_layout (GstVulkanImageMemory * vk_mem,
/**
* gst_vulkan_image_memory_alloc:
* @device:a #GstVulkanDevice
* @memory_type_index: the Vulkan memory type index
* @params: a #GstAllocationParams
* @size: the size to allocate
*
* Allocated a new #GstVulkanImageMemory.
*
@ -490,6 +449,47 @@ gst_vulkan_image_memory_alloc (GstVulkanDevice * device, VkFormat format,
return (GstMemory *) mem;
}
GstMemory *
gst_vulkan_image_memory_alloc_bind (GstVulkanDevice * device, VkFormat format,
gsize width, gsize height, VkImageTiling tiling, VkImageUsageFlags usage,
VkMemoryPropertyFlags mem_prop_flags)
{
GstAllocationParams params = { 0, };
GstVulkanImageMemory *mem;
GstVulkanMemory *dev_mem;
guint32 type_idx;
mem =
(GstVulkanImageMemory *) gst_vulkan_image_memory_alloc (device, format,
width, height, tiling, usage, mem_prop_flags);
if (!mem)
return NULL;
if (!gst_vulkan_memory_find_memory_type_index_with_type_properties (device,
mem->requirements.memoryTypeBits, mem_prop_flags, &type_idx)) {
gst_memory_unref (GST_MEMORY_CAST (mem));
return NULL;
}
/* XXX: assumes alignment is a power of 2 */
params.align = mem->requirements.alignment - 1;
dev_mem = (GstVulkanMemory *) gst_vulkan_memory_alloc (device, type_idx,
&params, mem->requirements.size, mem_prop_flags);
if (!dev_mem) {
gst_memory_unref (GST_MEMORY_CAST (mem));
return NULL;
}
if (!gst_vulkan_image_memory_bind (mem, dev_mem)) {
gst_memory_unref (GST_MEMORY_CAST (dev_mem));
gst_memory_unref (GST_MEMORY_CAST (mem));
return NULL;
}
gst_memory_unref (GST_MEMORY_CAST (dev_mem));
return (GstMemory *) mem;
}
GstMemory *
gst_vulkan_image_memory_wrapped (GstVulkanDevice * device, VkImage image,
VkFormat format, gsize width, gsize height, VkImageTiling tiling,
@ -521,6 +521,41 @@ gst_vulkan_image_memory_get_height (GstVulkanImageMemory * image)
return image->create_info.extent.height;
}
gboolean
gst_vulkan_image_memory_bind (GstVulkanImageMemory * img_mem,
GstVulkanMemory * memory)
{
gsize maxsize;
g_return_val_if_fail (gst_is_vulkan_image_memory (GST_MEMORY_CAST (img_mem)),
FALSE);
g_return_val_if_fail (gst_is_vulkan_memory (GST_MEMORY_CAST (memory)), FALSE);
/* will we overrun the allocated data? */
gst_memory_get_sizes (GST_MEMORY_CAST (memory), NULL, &maxsize);
g_return_val_if_fail (memory->vk_offset + img_mem->requirements.size <=
maxsize, FALSE);
g_mutex_lock (&img_mem->lock);
if (img_mem->vk_mem) {
guint vk_mem_map_count = img_mem->vk_mem->map_count;
if (vk_mem_map_count > 0) {
g_mutex_unlock (&img_mem->lock);
g_return_val_if_fail (vk_mem_map_count > 0, FALSE);
}
gst_memory_unref (GST_MEMORY_CAST (img_mem->vk_mem));
}
vkBindImageMemory (img_mem->device->device, img_mem->image, memory->mem_ptr,
memory->vk_offset);
img_mem->vk_mem =
(GstVulkanMemory *) gst_memory_ref (GST_MEMORY_CAST (memory));
g_mutex_unlock (&img_mem->lock);
return TRUE;
}
G_DEFINE_TYPE (GstVulkanImageMemoryAllocator, gst_vulkan_image_memory_allocator,
GST_TYPE_ALLOCATOR);

View file

@ -57,6 +57,7 @@ struct _GstVulkanImageMemory
VkImageCreateInfo create_info;
VkMemoryRequirements requirements;
VkImageFormatProperties format_properties;
VkImageUsageFlags usage;
GMutex lock;
gboolean wrapped;
@ -95,6 +96,14 @@ GstMemory * gst_vulkan_image_memory_alloc (GstVulkanDevice * devic
VkImageUsageFlags usage,
VkMemoryPropertyFlags mem_prop_flags);
GstMemory * gst_vulkan_image_memory_alloc_bind (GstVulkanDevice * device,
VkFormat format,
gsize width,
gsize height,
VkImageTiling tiling,
VkImageUsageFlags usage,
VkMemoryPropertyFlags mem_prop_flags);
GstMemory * gst_vulkan_image_memory_wrapped (GstVulkanDevice * device,
VkImage image,
VkFormat format,
@ -104,6 +113,10 @@ GstMemory * gst_vulkan_image_memory_wrapped (GstVulkanDevice * devic
VkImageUsageFlags usage,
gpointer user_data,
GDestroyNotify notify);
gboolean gst_vulkan_image_memory_bind (GstVulkanImageMemory * img_mem,
GstVulkanMemory * memory);
gboolean gst_vulkan_image_memory_set_layout (GstVulkanImageMemory * vk_mem,
VkImageLayout,
VkImageMemoryBarrier * barrier);

View file

@ -39,6 +39,8 @@ G_BEGIN_DECLS
{ a, b, c, d, e, f }
#define GST_VK_STRUCT_7(a, b, c, d, e, f, g) \
{ a, b, c, d, e, f, g }
#define GST_VK_STRUCT_8(a, b, c, d, e, f, g, h) \
{ a, b, c, d, e, f, g, h }
#define GST_VK_BUFFER_IMAGE_COPY_INIT GST_VK_STRUCT_6
#define GST_VK_BUFFER_IMAGE_COPY(info,bufferOffset_,bufferRowLength_,bufferImageHeight_,imageSubresourceLayers_,imageOffset_,imageExtent_) \

View file

@ -35,6 +35,9 @@
* Vulkan device memory.
*/
/* WARNING: while suballocation is allowed, nothing prevents aliasing which
* requires external synchronisation */
#define GST_CAT_DEFUALT GST_CAT_VULKAN_MEMORY
GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFUALT);
@ -110,6 +113,7 @@ _vk_mem_init (GstVulkanMemory * mem, GstAllocator * allocator,
mem->properties = mem_prop_flags;
mem->notify = notify;
mem->user_data = user_data;
mem->vk_offset = 0;
g_mutex_init (&mem->lock);
@ -128,7 +132,7 @@ _vk_mem_new (GstAllocator * allocator, GstMemory * parent,
VkMemoryPropertyFlags mem_props_flags, gpointer user_data,
GDestroyNotify notify)
{
GstVulkanMemory *mem = g_slice_new0 (GstVulkanMemory);
GstVulkanMemory *mem = g_new0 (GstVulkanMemory, 1);
GError *error = NULL;
VkResult err;
@ -160,7 +164,8 @@ _vk_mem_map_full (GstVulkanMemory * mem, GstMapInfo * info, gsize size)
return NULL;
}
err = vkMapMemory (mem->device->device, mem->mem_ptr, 0, size, 0, &data);
err = vkMapMemory (mem->device->device, mem->mem_ptr, mem->vk_offset,
size, 0, &data);
if (gst_vulkan_error_to_g_error (err, &error, "vkMapMemory") < 0) {
GST_CAT_ERROR (GST_CAT_VULKAN_MEMORY, "Failed to map device memory %s",
error->message);
@ -184,9 +189,30 @@ _vk_mem_copy (GstVulkanMemory * src, gssize offset, gssize size)
}
static GstMemory *
_vk_mem_share (GstVulkanMemory * mem, gssize offset, gssize size)
_vk_mem_share (GstVulkanMemory * mem, gssize offset, gsize size)
{
return NULL;
GstVulkanMemory *shared = g_new0 (GstVulkanMemory, 1);
GstVulkanMemory *parent = mem;
GstAllocationParams params = { 0, };
if (size == -1)
size = mem->mem.size - offset;
g_return_val_if_fail (size > 0, NULL);
while ((parent = (GstVulkanMemory *) (GST_MEMORY_CAST (parent)->parent)));
params.flags = GST_MEMORY_FLAGS (mem);
params.align = GST_MEMORY_CAST (parent)->align;
_vk_mem_init (shared, _vulkan_memory_allocator, GST_MEMORY_CAST (mem),
parent->device, parent->alloc_info.memoryTypeIndex, &params, size,
parent->properties, NULL, NULL);
shared->mem_ptr = parent->mem_ptr;
shared->wrapped = TRUE;
shared->vk_offset = offset + mem->vk_offset;
return GST_MEMORY_CAST (shared);
}
static gboolean
@ -217,11 +243,35 @@ _vk_mem_free (GstAllocator * allocator, GstMemory * memory)
if (mem->notify)
mem->notify (mem->user_data);
vkFreeMemory (mem->device->device, mem->mem_ptr, NULL);
if (mem->mem_ptr && !mem->wrapped)
vkFreeMemory (mem->device->device, mem->mem_ptr, NULL);
gst_object_unref (mem->device);
}
gboolean
gst_vulkan_memory_find_memory_type_index_with_type_properties (GstVulkanDevice *
device, guint32 typeBits, VkMemoryPropertyFlags properties,
guint32 * typeIndex)
{
guint32 i;
/* Search memtypes to find first index with those properties */
for (i = 0; i < 32; i++) {
if ((typeBits & 1) == 1) {
/* Type is available, does it match user properties? */
if ((device->memory_properties.memoryTypes[i].
propertyFlags & properties) == properties) {
*typeIndex = i;
return TRUE;
}
}
typeBits >>= 1;
}
return FALSE;
}
/**
* gst_vulkan_memory_alloc:
* @device:a #GstVulkanDevice

View file

@ -51,6 +51,7 @@ struct _GstVulkanMemory
/* <protected> */
GMutex lock;
guint map_count;
/* <private> */
GDestroyNotify notify;
@ -58,6 +59,12 @@ struct _GstVulkanMemory
VkMemoryAllocateInfo alloc_info;
VkMemoryPropertyFlags properties;
/* we need our own offset because GstMemory's is used to offset into the
* mapped pointer which when suballocating, we need to avoid. This in
* relation to the root memory */
guint64 vk_offset;
gboolean wrapped;
};
/**
@ -89,6 +96,11 @@ GstMemory * gst_vulkan_memory_alloc (GstVulkanDevice * device,
gsize size,
VkMemoryPropertyFlags mem_prop_flags);
gboolean gst_vulkan_memory_find_memory_type_index_with_type_properties (GstVulkanDevice * device,
guint32 typeBits,
VkMemoryPropertyFlags properties,
guint32 * typeIndex);
G_END_DECLS
#endif /* _GST_VULKAN_BASE_BUFFER_H_ */

View file

@ -181,8 +181,8 @@ _vulkan_swapper_retrieve_surface_properties (GstVulkanSwapper * swapper,
supports_present =
gst_vulkan_window_get_presentation_support (swapper->window,
swapper->device, i);
if ((swapper->device->
queue_family_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
if ((swapper->device->queue_family_props[i].
queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
if (supports_present) {
/* found one that supports both */
graphics_queue = present_queue = i;
@ -568,8 +568,8 @@ _allocate_swapchain (GstVulkanSwapper * swapper, GstCaps * caps,
n_images_wanted = swapper->surf_props.maxImageCount;
}
if (swapper->surf_props.
supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
if (swapper->
surf_props.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
} else {
preTransform = swapper->surf_props.currentTransform;
@ -609,8 +609,8 @@ _allocate_swapchain (GstVulkanSwapper * swapper, GstCaps * caps,
"Incorrect usage flags available for the swap images");
return FALSE;
}
if ((swapper->
surf_props.supportedUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
if ((swapper->surf_props.
supportedUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT)
!= 0) {
usage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
} else {
@ -762,10 +762,12 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
return FALSE;
}
buf_mem =
(GstVulkanBufferMemory *) gst_vulkan_buffer_memory_alloc (swapper->device,
swap_mem->create_info.format, GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, 0) *
GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0),
size =
GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,
0) * GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0);
buf_mem = (GstVulkanBufferMemory *)
gst_vulkan_buffer_memory_alloc_bind (swapper->device,
swap_mem->create_info.format, size,
VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT);
@ -807,9 +809,6 @@ _build_render_buffer_cmd (GstVulkanSwapper * swapper, guint32 swap_idx,
return FALSE;
}
size =
GST_VIDEO_FRAME_PLANE_STRIDE (&vframe,
0) * GST_VIDEO_FRAME_COMP_HEIGHT (&vframe, 0);
g_assert (buf_map_info.size >= size);
memcpy (buf_map_info.data, vframe.data[0], size);
gst_memory_unmap ((GstMemory *) buf_mem, &buf_map_info);