va: implement pooled allocators

1. Allocators don't implement memory free() methods since all the memories will
   implement dispose() returning FALSE
2. Memory/miniobject dispose() will act as memory release, enqueueing the
   release memory
3. A new allocator's method prepare_buffer() which queries the released memory
   queue and will add the requiered memories to the buffer.
4. Allocators added a GCond to synchronize dispose() and prepare_buffer()
5. A new allocator's method flush() which will free for real the memories.

While the bufferpool will

1. Remove all the memories at reset_buffer()
2. Implement acquire_buffer() calling allocator's prepare_buffer()
3. Implement flush_start() calling allocator's flush()
4. start() is disabled since it pre-allocs buffers but also calls
   our reset_buffer() which will drop the memories and later the
   buffers are ditched, something we don't want. This approach avoids
   buffer pre-allocation.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1626>
This commit is contained in:
Víctor Manuel Jáquez Leal 2020-09-30 19:35:14 +02:00 committed by GStreamer Merge Bot
parent d6f9cfc159
commit 734e2a74c4
3 changed files with 240 additions and 26 deletions

View file

@ -43,6 +43,8 @@ struct _GstVaDmabufAllocator
GstVaDisplay *display;
GstMemoryMapFunction parent_map;
GCond buffer_cond;
};
static void _init_debug_category (void);
@ -368,41 +370,36 @@ gst_va_dmabuf_mem_map (GstMemory * gmem, gsize maxsize, GstMapFlags flags)
return self->parent_map (gmem, maxsize, flags);
}
static void
gst_va_dmabuf_allocator_finalize (GObject * object)
{
GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (object);
g_cond_clear (&self->buffer_cond);
G_OBJECT_CLASS (dmabuf_parent_class)->finalize (object);
}
static void
gst_va_dmabuf_allocator_dispose (GObject * object)
{
GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (object);
gst_clear_object (&self->display);
gst_va_dmabuf_allocator_flush (GST_ALLOCATOR (object));
gst_atomic_queue_unref (self->available_mems);
gst_clear_object (&self->display);
G_OBJECT_CLASS (dmabuf_parent_class)->dispose (object);
}
static void
gst_va_dmabuf_allocator_free (GstAllocator * allocator, GstMemory * mem)
{
GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
GstVaBufferSurface *buf;
/* first close the dmabuf fd */
GST_ALLOCATOR_CLASS (dmabuf_parent_class)->free (allocator, mem);
while ((buf = gst_atomic_queue_pop (self->available_mems))) {
GST_LOG_OBJECT (self, "Destroying surface %#x", buf->surface);
_destroy_surfaces (self->display, &buf->surface, 1);
g_slice_free (GstVaBufferSurface, buf);
}
}
static void
gst_va_dmabuf_allocator_class_init (GstVaDmabufAllocatorClass * klass)
{
GstAllocatorClass *allocator_class = GST_ALLOCATOR_CLASS (klass);
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->dispose = gst_va_dmabuf_allocator_dispose;
allocator_class->free = gst_va_dmabuf_allocator_free;
object_class->finalize = gst_va_dmabuf_allocator_finalize;
}
static void
@ -468,13 +465,18 @@ gst_va_dmabuf_memory_release (GstMiniObject * mini_object)
{
GstMemory *mem = GST_MEMORY_CAST (mini_object);
GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (mem->allocator);
GstVaBufferSurface *buf;
buf = gst_mini_object_get_qdata (mini_object, gst_va_buffer_surface_quark ());
if (buf && g_atomic_int_dec_and_test (&buf->ref_count))
gst_atomic_queue_push (self->available_mems, buf);
GST_OBJECT_LOCK (self);
return TRUE;
GST_LOG ("releasing %p", mem);
gst_atomic_queue_push (self->available_mems, gst_memory_ref (mem));
g_cond_signal (&self->buffer_cond);
GST_OBJECT_UNLOCK (self);
/* don't call mini_object's free */
return FALSE;
}
/* creates an exported VASurface and adds it as @buffer's memories
@ -578,6 +580,69 @@ failed:
}
}
gboolean
gst_va_dmabuf_allocator_prepare_buffer (GstAllocator * allocator,
GstBuffer * buffer)
{
GstMemory *pmem, *mem[GST_VIDEO_MAX_PLANES] = { 0, };
GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
VASurfaceID surface, psurface;
gint j, idx = 1;
GST_OBJECT_LOCK (self);
/* if available mems, use them */
if (gst_atomic_queue_length (self->available_mems) == 0)
g_cond_wait (&self->buffer_cond, GST_OBJECT_GET_LOCK (self));
mem[0] = gst_atomic_queue_pop (self->available_mems);
surface = gst_va_memory_get_surface (mem[0], NULL);
do {
pmem = gst_atomic_queue_peek (self->available_mems);
if (!pmem)
break;
psurface = gst_va_memory_get_surface (pmem, NULL);
if (psurface != surface)
break;
mem[idx++] = gst_atomic_queue_pop (self->available_mems);
} while (TRUE);
GST_OBJECT_UNLOCK (self);
/* append them in reverse order */
for (j = idx - 1; j >= 0; j--)
gst_buffer_append_memory (buffer, mem[j]);
GST_TRACE_OBJECT (self, "Prepared surface %#x in buffer %p", surface, buffer);
return TRUE;
}
void
gst_va_dmabuf_allocator_flush (GstAllocator * allocator)
{
GstMemory *mem;
GstVaBufferSurface *buf;
GstVaDmabufAllocator *self = GST_VA_DMABUF_ALLOCATOR (allocator);
while ((mem = gst_atomic_queue_pop (self->available_mems))) {
/* destroy the surface */
buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
gst_va_buffer_surface_quark ());
if (buf && g_atomic_int_dec_and_test (&buf->ref_count)) {
GST_LOG_OBJECT (self, "Destroying surface %#x", buf->surface);
_destroy_surfaces (self->display, &buf->surface, 1);
g_slice_free (GstVaBufferSurface, buf);
}
GST_MINI_OBJECT_CAST (mem)->dispose = NULL;
gst_memory_unref (mem);
}
}
gboolean
gst_va_dmabuf_try (GstAllocator * allocator, GstVaAllocationParams * params)
{
@ -672,9 +737,14 @@ struct _GstVaAllocator
{
GstAllocator parent;
/* queue for disposed surfaces */
GstAtomicQueue *available_mems;
GstVaDisplay *display;
gboolean use_derived;
GArray *surface_formats;
GCond buffer_cond;
};
typedef struct _GstVaMemory GstVaMemory;
@ -701,11 +771,24 @@ G_DEFINE_TYPE_WITH_CODE (GstVaAllocator, gst_va_allocator, GST_TYPE_ALLOCATOR,
static gboolean _va_unmap (GstVaMemory * mem);
static void
gst_va_allocator_finalize (GObject * object)
{
GstVaAllocator *self = GST_VA_ALLOCATOR (object);
g_cond_clear (&self->buffer_cond);
G_OBJECT_CLASS (gst_va_allocator_parent_class)->finalize (object);
}
static void
gst_va_allocator_dispose (GObject * object)
{
GstVaAllocator *self = GST_VA_ALLOCATOR (object);
gst_va_allocator_flush (GST_ALLOCATOR (object));
gst_atomic_queue_unref (self->available_mems);
gst_clear_object (&self->display);
g_clear_pointer (&self->surface_formats, g_array_unref);
@ -742,6 +825,7 @@ gst_va_allocator_class_init (GstVaAllocatorClass * klass)
GObjectClass *object_class = G_OBJECT_CLASS (klass);
object_class->dispose = gst_va_allocator_dispose;
object_class->finalize = gst_va_allocator_finalize;
allocator_class->free = _va_free;
}
@ -963,6 +1047,8 @@ gst_va_allocator_init (GstVaAllocator * self)
{
GstAllocator *allocator = GST_ALLOCATOR (self);
self->available_mems = gst_atomic_queue_new (2);
allocator->mem_type = GST_ALLOCATOR_VASURFACE;
allocator->mem_map = (GstMemoryMapFunction) _va_map;
allocator->mem_unmap = (GstMemoryUnmapFunction) _va_unmap;
@ -970,9 +1056,29 @@ gst_va_allocator_init (GstVaAllocator * self)
self->use_derived = TRUE;
g_cond_init (&self->buffer_cond);
GST_OBJECT_FLAG_SET (self, GST_ALLOCATOR_FLAG_CUSTOM_ALLOC);
}
static gboolean
gst_va_memory_release (GstMiniObject * mini_object)
{
GstMemory *mem = GST_MEMORY_CAST (mini_object);
GstVaAllocator *self = GST_VA_ALLOCATOR (mem->allocator);
GST_OBJECT_LOCK (self);
GST_LOG ("releasing %p", mem);
gst_atomic_queue_push (self->available_mems, gst_memory_ref (mem));
g_cond_signal (&self->buffer_cond);
GST_OBJECT_UNLOCK (self);
/* don't call mini_object's free */
return FALSE;
}
GstMemory *
gst_va_allocator_alloc (GstAllocator * allocator,
GstVaAllocationParams * params)
@ -1026,6 +1132,8 @@ gst_va_allocator_alloc (GstAllocator * allocator,
_reset_mem (mem, allocator, GST_VIDEO_INFO_SIZE (&params->info));
GST_MINI_OBJECT (mem)->dispose = gst_va_memory_release;
GST_LOG_OBJECT (self, "Created surface %#x [%dx%d]", mem->surface,
GST_VIDEO_INFO_WIDTH (&mem->info), GST_VIDEO_INFO_HEIGHT (&mem->info));
@ -1047,6 +1155,51 @@ gst_va_allocator_new (GstVaDisplay * display, GArray * surface_formats)
return GST_ALLOCATOR (self);
}
gboolean
gst_va_allocator_prepare_buffer (GstAllocator * allocator, GstBuffer * buffer)
{
GstMemory *mem;
GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
VASurfaceID surface;
GST_OBJECT_LOCK (self);
/* if available mems, use them */
if (gst_atomic_queue_length (self->available_mems) == 0)
g_cond_wait (&self->buffer_cond, GST_OBJECT_GET_LOCK (self));
mem = gst_atomic_queue_pop (self->available_mems);
GST_OBJECT_UNLOCK (self);
surface = gst_va_memory_get_surface (mem, NULL);
gst_buffer_append_memory (buffer, mem);
GST_TRACE_OBJECT (self, "Prepared surface %#x in buffer %p", surface, buffer);
return TRUE;
}
void
gst_va_allocator_flush (GstAllocator * allocator)
{
GstMemory *mem;
GstVaBufferSurface *buf;
GstVaAllocator *self = GST_VA_ALLOCATOR (allocator);
while ((mem = gst_atomic_queue_pop (self->available_mems))) {
/* destroy the surface */
buf = gst_mini_object_get_qdata (GST_MINI_OBJECT (mem),
gst_va_buffer_surface_quark ());
if (buf && g_atomic_int_dec_and_test (&buf->ref_count)) {
GST_LOG_OBJECT (self, "Destroying surface %#x", buf->surface);
_destroy_surfaces (self->display, &buf->surface, 1);
g_slice_free (GstVaBufferSurface, buf);
}
GST_MINI_OBJECT_CAST (mem)->dispose = NULL;
gst_memory_unref (mem);
}
}
gboolean
gst_va_allocator_try (GstAllocator * allocator, GstVaAllocationParams * params)
{

View file

@ -42,6 +42,9 @@ GstAllocator * gst_va_dmabuf_allocator_new (GstVaDisplay * displa
gboolean gst_va_dmabuf_allocator_setup_buffer (GstAllocator * allocator,
GstBuffer * buffer,
GstVaAllocationParams * params);
gboolean gst_va_dmabuf_allocator_prepare_buffer (GstAllocator * allocator,
GstBuffer * buffer);
void gst_va_dmabuf_allocator_flush (GstAllocator * allocator);
gboolean gst_va_dmabuf_try (GstAllocator * allocator,
GstVaAllocationParams * params);
@ -64,6 +67,9 @@ GstAllocator * gst_va_allocator_new (GstVaDisplay * displa
GArray * surface_formats);
GstMemory * gst_va_allocator_alloc (GstAllocator * allocator,
GstVaAllocationParams * params);
gboolean gst_va_allocator_prepare_buffer (GstAllocator * allocator,
GstBuffer * buffer);
void gst_va_allocator_flush (GstAllocator * allocator);
gboolean gst_va_allocator_try (GstAllocator * allocator,
GstVaAllocationParams * params);

View file

@ -142,8 +142,11 @@ gst_va_pool_set_config (GstBufferPool * pool, GstStructure * config)
vpool->usage_hint = usage_hint;
vpool->video_align = video_align;
gst_buffer_pool_config_set_params (config, caps,
GST_VIDEO_INFO_SIZE (&caps_info), min_buffers, max_buffers);
/* with pooled allocators bufferpool->release_buffer() is cheated
* because the memories are removed from the buffer at
* reset_buffer(), then buffer is an empty holder with size 0 while
* releasing. */
gst_buffer_pool_config_set_params (config, caps, 0, min_buffers, max_buffers);
return GST_BUFFER_POOL_CLASS (parent_class)->set_config (pool, config);
@ -231,6 +234,54 @@ no_memory:
}
}
static void
gst_va_pool_reset_buffer (GstBufferPool * pool, GstBuffer * buffer)
{
/* Clears all the memories and only pool the GstBuffer objects */
gst_buffer_remove_all_memory (buffer);
GST_BUFFER_POOL_CLASS (parent_class)->reset_buffer (pool, buffer);
GST_BUFFER_FLAGS (buffer) = 0;
}
static GstFlowReturn
gst_va_pool_acquire_buffer (GstBufferPool * pool, GstBuffer ** buffer,
GstBufferPoolAcquireParams * params)
{
GstFlowReturn ret;
GstVaPool *vpool = GST_VA_POOL (pool);
ret = GST_BUFFER_POOL_CLASS (parent_class)->acquire_buffer (pool, buffer,
params);
if (ret != GST_FLOW_OK)
return ret;
/* if buffer is new, return it */
if (gst_buffer_n_memory (*buffer) > 0)
return GST_FLOW_OK;
if (GST_IS_VA_DMABUF_ALLOCATOR (vpool->allocator)) {
if (gst_va_dmabuf_allocator_prepare_buffer (vpool->allocator, *buffer))
return GST_FLOW_OK;
} else if (GST_IS_VA_ALLOCATOR (vpool->allocator)) {
if (gst_va_allocator_prepare_buffer (vpool->allocator, *buffer))
return GST_FLOW_OK;
}
gst_buffer_replace (buffer, NULL);
return GST_FLOW_ERROR;
}
static void
gst_va_pool_flush_start (GstBufferPool * pool)
{
GstVaPool *vpool = GST_VA_POOL (pool);
if (GST_IS_VA_DMABUF_ALLOCATOR (vpool->allocator))
gst_va_dmabuf_allocator_flush (vpool->allocator);
else if (GST_IS_VA_ALLOCATOR (vpool->allocator))
gst_va_allocator_flush (vpool->allocator);
}
static void
gst_va_pool_dispose (GObject * object)
{
@ -254,6 +305,10 @@ gst_va_pool_class_init (GstVaPoolClass * klass)
gstbufferpool_class->get_options = gst_va_pool_get_options;
gstbufferpool_class->set_config = gst_va_pool_set_config;
gstbufferpool_class->alloc_buffer = gst_va_pool_alloc;
gstbufferpool_class->reset_buffer = gst_va_pool_reset_buffer;
gstbufferpool_class->acquire_buffer = gst_va_pool_acquire_buffer;
gstbufferpool_class->flush_start = gst_va_pool_flush_start;
gstbufferpool_class->start = NULL;
}
static void