mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-16 13:26:36 +00:00
miniobject: avoid race when recycling buffers
Avoid a race where a miniobject is recycled and quickly freed, which causes the g_type_free_instance() to be called on the same object twice. Ref the object before calling the finalize method and check if we still need to free it afterward. Also add a unit test for this case. Fixes #601587
This commit is contained in:
parent
4d17d331bf
commit
73f2d464b7
2 changed files with 188 additions and 7 deletions
|
@ -303,8 +303,9 @@ GstMiniObject *
|
|||
gst_mini_object_ref (GstMiniObject * mini_object)
|
||||
{
|
||||
g_return_val_if_fail (mini_object != NULL, NULL);
|
||||
/* we cannot assert that the refcount > 0 since a bufferalloc
|
||||
* function might resurrect an object
|
||||
/* we can't assert that the refcount > 0 since the _free functions
|
||||
* increments the refcount from 0 to 1 again to allow resurecting
|
||||
* the object
|
||||
g_return_val_if_fail (mini_object->refcount > 0, NULL);
|
||||
*/
|
||||
#ifdef DEBUG_REFCOUNT
|
||||
|
@ -326,12 +327,17 @@ gst_mini_object_free (GstMiniObject * mini_object)
|
|||
{
|
||||
GstMiniObjectClass *mo_class;
|
||||
|
||||
/* At this point, the refcount of the object is 0. We increase the refcount
|
||||
* here because if a subclass recycles the object and gives out a new
|
||||
* reference we don't want to free the instance anymore. */
|
||||
gst_mini_object_ref (mini_object);
|
||||
|
||||
mo_class = GST_MINI_OBJECT_GET_CLASS (mini_object);
|
||||
mo_class->finalize (mini_object);
|
||||
|
||||
/* if the refcount is still 0 we can really free the
|
||||
* object, else the finalize method recycled the object */
|
||||
if (g_atomic_int_get (&mini_object->refcount) == 0) {
|
||||
/* decrement the refcount again, if the subclass recycled the object we don't
|
||||
* want to free the instance anymore */
|
||||
if (G_LIKELY (g_atomic_int_dec_and_test (&mini_object->refcount))) {
|
||||
#ifndef GST_DISABLE_TRACE
|
||||
gst_alloc_trace_free (_gst_mini_object_trace, mini_object);
|
||||
#endif
|
||||
|
|
|
@ -175,6 +175,180 @@ GST_START_TEST (test_unref_threaded)
|
|||
|
||||
GST_END_TEST;
|
||||
|
||||
/* ======== recycle test ======== */
|
||||
|
||||
static gint recycle_buffer_count = 10;
|
||||
|
||||
#define MY_TYPE_RECYCLE_BUFFER (my_recycle_buffer_get_type ())
|
||||
|
||||
#define MY_IS_RECYCLE_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), \
|
||||
MY_TYPE_RECYCLE_BUFFER))
|
||||
#define MY_RECYCLE_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), \
|
||||
MY_TYPE_RECYCLE_BUFFER, MyRecycleBuffer))
|
||||
#define MY_RECYCLE_BUFFER_CAST(obj) ((MyRecycleBuffer *) (obj))
|
||||
|
||||
typedef struct _MyBufferPool MyBufferPool;
|
||||
typedef struct _MyRecycleBuffer MyRecycleBuffer;
|
||||
typedef struct _MyRecycleBufferClass MyRecycleBufferClass;
|
||||
|
||||
struct _MyBufferPool
|
||||
{
|
||||
GSList *buffers;
|
||||
|
||||
volatile gboolean is_closed;
|
||||
};
|
||||
|
||||
struct _MyRecycleBuffer
|
||||
{
|
||||
GstBuffer buffer;
|
||||
|
||||
MyBufferPool *pool;
|
||||
};
|
||||
|
||||
struct _MyRecycleBufferClass
|
||||
{
|
||||
GstBufferClass parent_class;
|
||||
};
|
||||
|
||||
static void my_recycle_buffer_destroy (MyRecycleBuffer * buf);
|
||||
|
||||
static MyBufferPool *
|
||||
my_buffer_pool_new (void)
|
||||
{
|
||||
return g_new0 (MyBufferPool, 1);
|
||||
}
|
||||
|
||||
static void
|
||||
my_buffer_pool_free (MyBufferPool * self)
|
||||
{
|
||||
while (self->buffers != NULL) {
|
||||
my_recycle_buffer_destroy (self->buffers->data);
|
||||
self->buffers = g_slist_delete_link (self->buffers, self->buffers);
|
||||
}
|
||||
|
||||
g_free (self);
|
||||
}
|
||||
|
||||
static void
|
||||
my_buffer_pool_add (MyBufferPool * self, GstBuffer * buf)
|
||||
{
|
||||
g_mutex_lock (mutex);
|
||||
self->buffers = g_slist_prepend (self->buffers, gst_buffer_ref (buf));
|
||||
g_mutex_unlock (mutex);
|
||||
}
|
||||
|
||||
static GstBuffer *
|
||||
my_buffer_pool_drain_one (MyBufferPool * self)
|
||||
{
|
||||
GstBuffer *buf = NULL;
|
||||
|
||||
g_mutex_lock (mutex);
|
||||
if (self->buffers != NULL) {
|
||||
buf = self->buffers->data;
|
||||
self->buffers = g_slist_delete_link (self->buffers, self->buffers);
|
||||
}
|
||||
g_mutex_unlock (mutex);
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
G_DEFINE_TYPE (MyRecycleBuffer, my_recycle_buffer, GST_TYPE_BUFFER);
|
||||
|
||||
static void my_recycle_buffer_finalize (GstMiniObject * mini_object);
|
||||
|
||||
static void
|
||||
my_recycle_buffer_class_init (MyRecycleBufferClass * klass)
|
||||
{
|
||||
GstMiniObjectClass *miniobject_class = GST_MINI_OBJECT_CLASS (klass);
|
||||
|
||||
miniobject_class->finalize = my_recycle_buffer_finalize;
|
||||
}
|
||||
|
||||
static void
|
||||
my_recycle_buffer_init (MyRecycleBuffer * self)
|
||||
{
|
||||
}
|
||||
|
||||
static void
|
||||
my_recycle_buffer_finalize (GstMiniObject * mini_object)
|
||||
{
|
||||
MyRecycleBuffer *self = MY_RECYCLE_BUFFER_CAST (mini_object);
|
||||
|
||||
if (self->pool != NULL) {
|
||||
my_buffer_pool_add (self->pool, GST_BUFFER_CAST (self));
|
||||
g_usleep (G_USEC_PER_SEC / 100);
|
||||
} else {
|
||||
GST_MINI_OBJECT_CLASS (my_recycle_buffer_parent_class)->finalize
|
||||
(mini_object);
|
||||
}
|
||||
}
|
||||
|
||||
static GstBuffer *
|
||||
my_recycle_buffer_new (MyBufferPool * pool)
|
||||
{
|
||||
MyRecycleBuffer *buf;
|
||||
|
||||
buf = MY_RECYCLE_BUFFER (gst_mini_object_new (MY_TYPE_RECYCLE_BUFFER));
|
||||
buf->pool = pool;
|
||||
|
||||
return GST_BUFFER_CAST (buf);
|
||||
}
|
||||
|
||||
static void
|
||||
my_recycle_buffer_destroy (MyRecycleBuffer * buf)
|
||||
{
|
||||
buf->pool = NULL;
|
||||
gst_buffer_unref (GST_BUFFER_CAST (buf));
|
||||
}
|
||||
|
||||
static void
|
||||
thread_buffer_producer (MyBufferPool * pool)
|
||||
{
|
||||
int j;
|
||||
|
||||
THREAD_START ();
|
||||
|
||||
for (j = 0; j < recycle_buffer_count; ++j) {
|
||||
GstBuffer *buf = my_recycle_buffer_new (pool);
|
||||
gst_buffer_unref (buf);
|
||||
}
|
||||
|
||||
pool->is_closed = TRUE;
|
||||
}
|
||||
|
||||
static void
|
||||
thread_buffer_consumer (MyBufferPool * pool)
|
||||
{
|
||||
THREAD_START ();
|
||||
|
||||
do {
|
||||
GstBuffer *buf;
|
||||
|
||||
buf = my_buffer_pool_drain_one (pool);
|
||||
if (buf != NULL)
|
||||
my_recycle_buffer_destroy (MY_RECYCLE_BUFFER_CAST (buf));
|
||||
|
||||
THREAD_SWITCH ();
|
||||
}
|
||||
while (!pool->is_closed);
|
||||
}
|
||||
|
||||
GST_START_TEST (test_recycle_threaded)
|
||||
{
|
||||
MyBufferPool *pool;
|
||||
|
||||
pool = my_buffer_pool_new ();
|
||||
|
||||
MAIN_START_THREADS (1, thread_buffer_producer, pool);
|
||||
MAIN_START_THREADS (1, thread_buffer_consumer, pool);
|
||||
|
||||
MAIN_STOP_THREADS ();
|
||||
|
||||
my_buffer_pool_free (pool);
|
||||
}
|
||||
|
||||
GST_END_TEST;
|
||||
|
||||
/* ======== value collection test ======== */
|
||||
typedef struct _MyFoo
|
||||
{
|
||||
|
@ -286,6 +460,7 @@ gst_mini_object_suite (void)
|
|||
tcase_add_test (tc_chain, test_make_writable);
|
||||
tcase_add_test (tc_chain, test_ref_threaded);
|
||||
tcase_add_test (tc_chain, test_unref_threaded);
|
||||
tcase_add_test (tc_chain, test_recycle_threaded);
|
||||
tcase_add_test (tc_chain, test_value_collection);
|
||||
return s;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue