videoaggregator: Switch to a GstVideoAggregatorConvertPad subclass

This moves all the conversion related code to a single place, allows
less code-duplication inside compositor and makes the glmixer code less
awkward. It's also the same pattern as used by GstAudioAggregator.
This commit is contained in:
Sebastian Dröge 2018-05-06 15:21:24 +02:00
parent f777c66dcc
commit ea5de0d757
7 changed files with 370 additions and 368 deletions

View file

@ -73,7 +73,6 @@ gst_gl_base_mixer_pad_class_init (GstGLBaseMixerPadClass * klass)
gobject_class->set_property = gst_gl_base_mixer_pad_set_property; gobject_class->set_property = gst_gl_base_mixer_pad_set_property;
gobject_class->get_property = gst_gl_base_mixer_pad_get_property; gobject_class->get_property = gst_gl_base_mixer_pad_get_property;
vaggpad_class->set_info = NULL;
vaggpad_class->prepare_frame = NULL; vaggpad_class->prepare_frame = NULL;
vaggpad_class->clean_frame = NULL; vaggpad_class->clean_frame = NULL;
} }

View file

@ -75,7 +75,6 @@ gst_gl_mixer_pad_class_init (GstGLMixerPadClass * klass)
gobject_class->set_property = gst_gl_mixer_pad_set_property; gobject_class->set_property = gst_gl_mixer_pad_set_property;
gobject_class->get_property = gst_gl_mixer_pad_get_property; gobject_class->get_property = gst_gl_mixer_pad_get_property;
vaggpad_class->set_info = NULL;
vaggpad_class->prepare_frame = gst_gl_mixer_pad_prepare_frame; vaggpad_class->prepare_frame = gst_gl_mixer_pad_prepare_frame;
vaggpad_class->clean_frame = gst_gl_mixer_pad_clean_frame; vaggpad_class->clean_frame = gst_gl_mixer_pad_clean_frame;
} }

View file

@ -361,7 +361,7 @@ gst_iqa_class_init (GstIqaClass * klass)
gst_element_class_add_static_pad_template_with_gtype (gstelement_class, gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
&src_factory, GST_TYPE_AGGREGATOR_PAD); &src_factory, GST_TYPE_AGGREGATOR_PAD);
gst_element_class_add_static_pad_template_with_gtype (gstelement_class, gst_element_class_add_static_pad_template_with_gtype (gstelement_class,
&sink_factory, GST_TYPE_VIDEO_AGGREGATOR_PAD); &sink_factory, GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD);
gobject_class->set_property = _set_property; gobject_class->set_property = _set_property;
gobject_class->get_property = _get_property; gobject_class->get_property = _get_property;

View file

@ -76,13 +76,6 @@ struct _GstVideoAggregatorPadPrivate
* colorspace format */ * colorspace format */
gboolean needs_alpha; gboolean needs_alpha;
/* Converter, if NULL no conversion is done */
GstVideoConverter *convert;
/* caps used for conversion if needed */
GstVideoInfo conversion_info;
GstBuffer *converted_buffer;
GstClockTime start_time; GstClockTime start_time;
GstClockTime end_time; GstClockTime end_time;
@ -160,75 +153,6 @@ _flush_pad (GstAggregatorPad * aggpad, GstAggregator * aggregator)
return GST_FLOW_OK; return GST_FLOW_OK;
} }
static gboolean
gst_video_aggregator_pad_set_info (GstVideoAggregatorPad * pad,
GstVideoAggregator * vagg G_GNUC_UNUSED,
GstVideoInfo * current_info, GstVideoInfo * wanted_info)
{
gchar *colorimetry, *best_colorimetry;
const gchar *chroma, *best_chroma;
if (!current_info->finfo)
return TRUE;
if (GST_VIDEO_INFO_FORMAT (current_info) == GST_VIDEO_FORMAT_UNKNOWN)
return TRUE;
if (pad->priv->convert)
gst_video_converter_free (pad->priv->convert);
pad->priv->convert = NULL;
colorimetry = gst_video_colorimetry_to_string (&(current_info->colorimetry));
chroma = gst_video_chroma_to_string (current_info->chroma_site);
best_colorimetry =
gst_video_colorimetry_to_string (&(wanted_info->colorimetry));
best_chroma = gst_video_chroma_to_string (wanted_info->chroma_site);
if (GST_VIDEO_INFO_FORMAT (wanted_info) !=
GST_VIDEO_INFO_FORMAT (current_info)
|| g_strcmp0 (colorimetry, best_colorimetry)
|| g_strcmp0 (chroma, best_chroma)) {
GstVideoInfo tmp_info;
/* Initialize with the wanted video format and our original width and
* height as we don't want to rescale. Then copy over the wanted
* colorimetry, and chroma-site and our current pixel-aspect-ratio
* and other relevant fields.
*/
gst_video_info_set_format (&tmp_info, GST_VIDEO_INFO_FORMAT (wanted_info),
current_info->width, current_info->height);
tmp_info.chroma_site = wanted_info->chroma_site;
tmp_info.colorimetry = wanted_info->colorimetry;
tmp_info.par_n = current_info->par_n;
tmp_info.par_d = current_info->par_d;
tmp_info.fps_n = current_info->fps_n;
tmp_info.fps_d = current_info->fps_d;
tmp_info.flags = current_info->flags;
tmp_info.interlace_mode = current_info->interlace_mode;
GST_DEBUG_OBJECT (pad, "This pad will be converted from %d to %d",
GST_VIDEO_INFO_FORMAT (current_info),
GST_VIDEO_INFO_FORMAT (&tmp_info));
pad->priv->convert =
gst_video_converter_new (current_info, &tmp_info, NULL);
pad->priv->conversion_info = tmp_info;
if (!pad->priv->convert) {
g_free (colorimetry);
g_free (best_colorimetry);
GST_WARNING_OBJECT (pad, "No path found for conversion");
return FALSE;
}
} else {
GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
}
g_free (colorimetry);
g_free (best_colorimetry);
return TRUE;
}
static gboolean static gboolean
gst_video_aggregator_pad_skip_buffer (GstAggregatorPad * aggpad, gst_video_aggregator_pad_skip_buffer (GstAggregatorPad * aggpad,
GstAggregator * agg, GstBuffer * buffer) GstAggregator * agg, GstBuffer * buffer)
@ -252,63 +176,16 @@ gst_video_aggregator_pad_skip_buffer (GstAggregatorPad * aggpad,
return ret; return ret;
} }
static void
gst_video_aggregator_pad_finalize (GObject * o)
{
GstVideoAggregatorPad *vaggpad = GST_VIDEO_AGGREGATOR_PAD (o);
if (vaggpad->priv->convert)
gst_video_converter_free (vaggpad->priv->convert);
vaggpad->priv->convert = NULL;
G_OBJECT_CLASS (gst_video_aggregator_pad_parent_class)->finalize (o);
}
static gboolean static gboolean
gst_video_aggregator_pad_prepare_frame (GstVideoAggregatorPad * pad, gst_video_aggregator_pad_prepare_frame (GstVideoAggregatorPad * pad,
GstVideoAggregator * vagg, GstBuffer * buffer, GstVideoAggregator * vagg, GstBuffer * buffer,
GstVideoFrame * prepared_frame) GstVideoFrame * prepared_frame)
{ {
GstVideoFrame frame; if (!gst_video_frame_map (prepared_frame, &pad->info, buffer, GST_MAP_READ)) {
if (!pad->priv->buffer)
return TRUE;
if (!gst_video_frame_map (&frame, &pad->info, pad->priv->buffer,
GST_MAP_READ)) {
GST_WARNING_OBJECT (vagg, "Could not map input buffer"); GST_WARNING_OBJECT (vagg, "Could not map input buffer");
return FALSE; return FALSE;
} }
if (pad->priv->convert) {
GstVideoFrame converted_frame;
GstBuffer *converted_buf = NULL;
static GstAllocationParams params = { 0, 15, 0, 0, };
gint converted_size;
guint outsize;
/* We wait until here to set the conversion infos, in case vagg->info changed */
converted_size = pad->priv->conversion_info.size;
outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
converted_size = converted_size > outsize ? converted_size : outsize;
converted_buf = gst_buffer_new_allocate (NULL, converted_size, &params);
if (!gst_video_frame_map (&converted_frame, &(pad->priv->conversion_info),
converted_buf, GST_MAP_READWRITE)) {
GST_WARNING_OBJECT (vagg, "Could not map converted frame");
gst_video_frame_unmap (&frame);
return FALSE;
}
gst_video_converter_frame (pad->priv->convert, &frame, &converted_frame);
pad->priv->converted_buffer = converted_buf;
gst_video_frame_unmap (&frame);
*prepared_frame = converted_frame;
} else {
*prepared_frame = frame;
}
return TRUE; return TRUE;
} }
@ -320,11 +197,6 @@ gst_video_aggregator_pad_clean_frame (GstVideoAggregatorPad * pad,
gst_video_frame_unmap (prepared_frame); gst_video_frame_unmap (prepared_frame);
memset (prepared_frame, 0, sizeof (GstVideoFrame)); memset (prepared_frame, 0, sizeof (GstVideoFrame));
} }
if (pad->priv->converted_buffer) {
gst_buffer_unref (pad->priv->converted_buffer);
pad->priv->converted_buffer = NULL;
}
} }
static void static void
@ -335,7 +207,6 @@ gst_video_aggregator_pad_class_init (GstVideoAggregatorPadClass * klass)
gobject_class->set_property = gst_video_aggregator_pad_set_property; gobject_class->set_property = gst_video_aggregator_pad_set_property;
gobject_class->get_property = gst_video_aggregator_pad_get_property; gobject_class->get_property = gst_video_aggregator_pad_get_property;
gobject_class->finalize = gst_video_aggregator_pad_finalize;
g_object_class_install_property (gobject_class, PROP_PAD_ZORDER, g_object_class_install_property (gobject_class, PROP_PAD_ZORDER,
g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture", g_param_spec_uint ("zorder", "Z-Order", "Z Order of the picture",
@ -352,7 +223,6 @@ gst_video_aggregator_pad_class_init (GstVideoAggregatorPadClass * klass)
aggpadclass->flush = GST_DEBUG_FUNCPTR (_flush_pad); aggpadclass->flush = GST_DEBUG_FUNCPTR (_flush_pad);
aggpadclass->skip_buffer = aggpadclass->skip_buffer =
GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_skip_buffer); GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_skip_buffer);
klass->set_info = GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_set_info);
klass->prepare_frame = klass->prepare_frame =
GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_prepare_frame); GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_prepare_frame);
klass->clean_frame = GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_clean_frame); klass->clean_frame = GST_DEBUG_FUNCPTR (gst_video_aggregator_pad_clean_frame);
@ -367,10 +237,7 @@ gst_video_aggregator_pad_init (GstVideoAggregatorPad * vaggpad)
vaggpad->priv->zorder = DEFAULT_PAD_ZORDER; vaggpad->priv->zorder = DEFAULT_PAD_ZORDER;
vaggpad->priv->repeat_after_eos = DEFAULT_PAD_REPEAT_AFTER_EOS; vaggpad->priv->repeat_after_eos = DEFAULT_PAD_REPEAT_AFTER_EOS;
vaggpad->priv->converted_buffer = NULL;
memset (&vaggpad->priv->prepared_frame, 0, sizeof (GstVideoFrame)); memset (&vaggpad->priv->prepared_frame, 0, sizeof (GstVideoFrame));
vaggpad->priv->convert = NULL;
} }
/** /**
@ -465,6 +332,260 @@ gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad * pad,
} }
} }
/****************************************
* GstVideoAggregatorConvertPad implementation *
****************************************/
struct _GstVideoAggregatorConvertPadPrivate
{
/* Converter, if NULL no conversion is done */
GstVideoConverter *convert;
/* caps used for conversion if needed */
GstVideoInfo conversion_info;
GstBuffer *converted_buffer;
gboolean converter_config_changed;
};
G_DEFINE_TYPE (GstVideoAggregatorConvertPad, gst_video_aggregator_convert_pad,
GST_TYPE_VIDEO_AGGREGATOR_PAD);
static void
gst_video_aggregator_convert_pad_finalize (GObject * o)
{
GstVideoAggregatorConvertPad *vaggpad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (o);
if (vaggpad->priv->convert)
gst_video_converter_free (vaggpad->priv->convert);
vaggpad->priv->convert = NULL;
G_OBJECT_CLASS (gst_video_aggregator_pad_parent_class)->finalize (o);
}
static void
gst_video_aggregator_convert_pad_update_conversion_info_internal
(GstVideoAggregatorPad * vpad)
{
GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
pad->priv->converter_config_changed = TRUE;
}
static gboolean
gst_video_aggregator_convert_pad_prepare_frame (GstVideoAggregatorPad * vpad,
GstVideoAggregator * vagg, GstBuffer * buffer,
GstVideoFrame * prepared_frame)
{
GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
GstVideoFrame frame;
/* Update/create converter as needed */
if (pad->priv->converter_config_changed) {
GstVideoAggregatorConvertPadClass *klass =
GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS (pad);
GstVideoInfo conversion_info;
gst_video_info_init (&conversion_info);
klass->create_conversion_info (pad, vagg, &conversion_info);
if (conversion_info.finfo == NULL)
return FALSE;
pad->priv->converter_config_changed = FALSE;
if (!pad->priv->conversion_info.finfo
|| !gst_video_info_is_equal (&conversion_info,
&pad->priv->conversion_info)) {
pad->priv->conversion_info = conversion_info;
if (pad->priv->convert)
gst_video_converter_free (pad->priv->convert);
pad->priv->convert = NULL;
if (!gst_video_info_is_equal (&vpad->info, &pad->priv->conversion_info)) {
pad->priv->convert =
gst_video_converter_new (&vpad->info, &pad->priv->conversion_info,
NULL);
if (!pad->priv->convert) {
GST_WARNING_OBJECT (pad, "No path found for conversion");
return FALSE;
}
GST_DEBUG_OBJECT (pad, "This pad will be converted from %d to %d",
GST_VIDEO_INFO_FORMAT (&vpad->info),
GST_VIDEO_INFO_FORMAT (&pad->priv->conversion_info));
} else {
GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
}
}
}
if (!gst_video_frame_map (&frame, &vpad->info, buffer, GST_MAP_READ)) {
GST_WARNING_OBJECT (vagg, "Could not map input buffer");
return FALSE;
}
if (pad->priv->convert) {
GstVideoFrame converted_frame;
GstBuffer *converted_buf = NULL;
static GstAllocationParams params = { 0, 15, 0, 0, };
gint converted_size;
guint outsize;
/* We wait until here to set the conversion infos, in case vagg->info changed */
converted_size = pad->priv->conversion_info.size;
outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
converted_size = converted_size > outsize ? converted_size : outsize;
converted_buf = gst_buffer_new_allocate (NULL, converted_size, &params);
if (!gst_video_frame_map (&converted_frame, &(pad->priv->conversion_info),
converted_buf, GST_MAP_READWRITE)) {
GST_WARNING_OBJECT (vagg, "Could not map converted frame");
gst_video_frame_unmap (&frame);
return FALSE;
}
gst_video_converter_frame (pad->priv->convert, &frame, &converted_frame);
pad->priv->converted_buffer = converted_buf;
gst_video_frame_unmap (&frame);
*prepared_frame = converted_frame;
} else {
*prepared_frame = frame;
}
return TRUE;
}
static void
gst_video_aggregator_convert_pad_clean_frame (GstVideoAggregatorPad * vpad,
GstVideoAggregator * vagg, GstVideoFrame * prepared_frame)
{
GstVideoAggregatorConvertPad *pad = GST_VIDEO_AGGREGATOR_CONVERT_PAD (vpad);
if (prepared_frame->buffer) {
gst_video_frame_unmap (prepared_frame);
memset (prepared_frame, 0, sizeof (GstVideoFrame));
}
if (pad->priv->converted_buffer) {
gst_buffer_unref (pad->priv->converted_buffer);
pad->priv->converted_buffer = NULL;
}
}
static void
gst_video_aggregator_convert_pad_create_conversion_info
(GstVideoAggregatorConvertPad * pad, GstVideoAggregator * agg,
GstVideoInfo * convert_info)
{
GstVideoAggregatorPad *vpad = GST_VIDEO_AGGREGATOR_PAD (pad);
gchar *colorimetry, *best_colorimetry;
const gchar *chroma, *best_chroma;
g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
g_return_if_fail (convert_info != NULL);
if (!vpad->info.finfo
|| GST_VIDEO_INFO_FORMAT (&vpad->info) == GST_VIDEO_FORMAT_UNKNOWN) {
return;
}
if (!agg->info.finfo
|| GST_VIDEO_INFO_FORMAT (&agg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
return;
}
colorimetry = gst_video_colorimetry_to_string (&vpad->info.colorimetry);
chroma = gst_video_chroma_to_string (vpad->info.chroma_site);
best_colorimetry = gst_video_colorimetry_to_string (&agg->info.colorimetry);
best_chroma = gst_video_chroma_to_string (agg->info.chroma_site);
if (GST_VIDEO_INFO_FORMAT (&agg->info) != GST_VIDEO_INFO_FORMAT (&vpad->info)
|| g_strcmp0 (colorimetry, best_colorimetry)
|| g_strcmp0 (chroma, best_chroma)) {
GstVideoInfo tmp_info;
/* Initialize with the wanted video format and our original width and
* height as we don't want to rescale. Then copy over the wanted
* colorimetry, and chroma-site and our current pixel-aspect-ratio
* and other relevant fields.
*/
gst_video_info_set_format (&tmp_info, GST_VIDEO_INFO_FORMAT (&agg->info),
vpad->info.width, vpad->info.height);
tmp_info.chroma_site = agg->info.chroma_site;
tmp_info.colorimetry = agg->info.colorimetry;
tmp_info.par_n = vpad->info.par_n;
tmp_info.par_d = vpad->info.par_d;
tmp_info.fps_n = vpad->info.fps_n;
tmp_info.fps_d = vpad->info.fps_d;
tmp_info.flags = vpad->info.flags;
tmp_info.interlace_mode = vpad->info.interlace_mode;
*convert_info = tmp_info;
} else {
*convert_info = vpad->info;
}
g_free (colorimetry);
g_free (best_colorimetry);
}
static void
gst_video_aggregator_convert_pad_class_init (GstVideoAggregatorConvertPadClass *
klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
GstVideoAggregatorPadClass *vaggpadclass =
(GstVideoAggregatorPadClass *) klass;
gobject_class->finalize = gst_video_aggregator_convert_pad_finalize;
g_type_class_add_private (klass,
sizeof (GstVideoAggregatorConvertPadPrivate));
vaggpadclass->update_conversion_info =
GST_DEBUG_FUNCPTR
(gst_video_aggregator_convert_pad_update_conversion_info_internal);
vaggpadclass->prepare_frame =
GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_prepare_frame);
vaggpadclass->clean_frame =
GST_DEBUG_FUNCPTR (gst_video_aggregator_convert_pad_clean_frame);
klass->create_conversion_info =
gst_video_aggregator_convert_pad_create_conversion_info;
}
static void
gst_video_aggregator_convert_pad_init (GstVideoAggregatorConvertPad * vaggpad)
{
vaggpad->priv =
G_TYPE_INSTANCE_GET_PRIVATE (vaggpad,
GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD,
GstVideoAggregatorConvertPadPrivate);
vaggpad->priv->converted_buffer = NULL;
vaggpad->priv->convert = NULL;
vaggpad->priv->converter_config_changed = FALSE;
}
/**
* gst_video_aggregator_convert_pad_update_conversion_info:
* @pad: a #GstVideoAggregatorPad
*
* Requests the pad to check and update the converter before the next usage to
* update for any changes that have happened.
*
*/
void gst_video_aggregator_convert_pad_update_conversion_info
(GstVideoAggregatorConvertPad * pad)
{
g_return_if_fail (GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
pad->priv->converter_config_changed = TRUE;
}
/************************************** /**************************************
* GstVideoAggregator implementation * * GstVideoAggregator implementation *
**************************************/ **************************************/
@ -818,9 +939,8 @@ gst_video_aggregator_default_negotiated_src_caps (GstAggregator * agg,
GstVideoAggregatorPadClass *vaggpad_klass = GstVideoAggregatorPadClass *vaggpad_klass =
GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad); GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (pad);
if (vaggpad_klass->set_info if (vaggpad_klass->update_conversion_info) {
&& !vaggpad_klass->set_info (pad, vagg, &pad->info, &vagg->info)) { vaggpad_klass->update_conversion_info (pad);
return FALSE;
} }
} }

View file

@ -42,7 +42,7 @@ typedef struct _GstVideoAggregatorPrivate GstVideoAggregatorPrivate;
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR_PAD, GstVideoAggregatorPad)) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR_PAD, GstVideoAggregatorPad))
#define GST_VIDEO_AGGREGATOR_PAD_CAST(obj) ((GstVideoAggregatorPad *)(obj)) #define GST_VIDEO_AGGREGATOR_PAD_CAST(obj) ((GstVideoAggregatorPad *)(obj))
#define GST_VIDEO_AGGREGATOR_PAD_CLASS(klass) \ #define GST_VIDEO_AGGREGATOR_PAD_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_COMPOSITOR_PAD, GstVideoAggregatorPadClass)) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_AGGREGATOR_PAD, GstVideoAggregatorPadClass))
#define GST_IS_VIDEO_AGGREGATOR_PAD(obj) \ #define GST_IS_VIDEO_AGGREGATOR_PAD(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_AGGREGATOR_PAD)) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_AGGREGATOR_PAD))
#define GST_IS_VIDEO_AGGREGATOR_PAD_CLASS(klass) \ #define GST_IS_VIDEO_AGGREGATOR_PAD_CLASS(klass) \
@ -89,10 +89,7 @@ struct _GstVideoAggregatorPad
struct _GstVideoAggregatorPadClass struct _GstVideoAggregatorPadClass
{ {
GstAggregatorPadClass parent_class; GstAggregatorPadClass parent_class;
gboolean (*set_info) (GstVideoAggregatorPad * pad, void (*update_conversion_info) (GstVideoAggregatorPad * pad);
GstVideoAggregator * videoaggregator,
GstVideoInfo * current_info,
GstVideoInfo * wanted_info);
gboolean (*prepare_frame) (GstVideoAggregatorPad * pad, gboolean (*prepare_frame) (GstVideoAggregatorPad * pad,
GstVideoAggregator * videoaggregator, GstVideoAggregator * videoaggregator,
@ -121,6 +118,59 @@ GstVideoFrame * gst_video_aggregator_pad_get_prepared_frame (GstVideoAggregatorP
GST_VIDEO_BAD_API GST_VIDEO_BAD_API
void gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad *pad, gboolean needs_alpha); void gst_video_aggregator_pad_set_needs_alpha (GstVideoAggregatorPad *pad, gboolean needs_alpha);
/****************************
* GstVideoAggregatorPad Structs *
***************************/
#define GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD (gst_video_aggregator_convert_pad_get_type())
#define GST_VIDEO_AGGREGATOR_CONVERT_PAD(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD, GstVideoAggregatorConvertPad))
#define GST_VIDEO_AGGREGATOR_CONVERT_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD, GstVideoAggregatorConvertPadClass))
#define GST_VIDEO_AGGREGATOR_CONVERT_PAD_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD, GstVideoAggregatorConvertPadClass))
#define GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD))
#define GST_IS_VIDEO_AGGREGATOR_CONVERT_PAD_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD))
typedef struct _GstVideoAggregatorConvertPad GstVideoAggregatorConvertPad;
typedef struct _GstVideoAggregatorConvertPadClass GstVideoAggregatorConvertPadClass;
typedef struct _GstVideoAggregatorConvertPadPrivate GstVideoAggregatorConvertPadPrivate;
/**
* GstVideoAggregatorConvertPad:
*
* An implementation of GstPad that can be used with #GstVideoAggregator.
*
* See #GstVideoAggregator for more details.
*/
struct _GstVideoAggregatorConvertPad
{
/*< private >*/
GstVideoAggregatorPad parent;
GstVideoAggregatorConvertPadPrivate *priv;
gpointer _gst_reserved[GST_PADDING];
};
/**
* GstVideoAggregatorConvertPadClass:
*
*/
struct _GstVideoAggregatorConvertPadClass
{
GstVideoAggregatorPadClass parent_class;
void (*create_conversion_info) (GstVideoAggregatorConvertPad *pad, GstVideoAggregator *agg, GstVideoInfo *conversion_info);
/*< private >*/
gpointer _gst_reserved[GST_PADDING];
};
GST_VIDEO_BAD_API
GType gst_video_aggregator_convert_pad_get_type (void);
GST_VIDEO_BAD_API
void gst_video_aggregator_convert_pad_update_conversion_info (GstVideoAggregatorConvertPad * pad);
#define GST_TYPE_VIDEO_AGGREGATOR (gst_video_aggregator_get_type()) #define GST_TYPE_VIDEO_AGGREGATOR (gst_video_aggregator_get_type())
#define GST_VIDEO_AGGREGATOR(obj) \ #define GST_VIDEO_AGGREGATOR(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR, GstVideoAggregator)) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VIDEO_AGGREGATOR, GstVideoAggregator))

View file

@ -141,7 +141,7 @@ enum
}; };
G_DEFINE_TYPE (GstCompositorPad, gst_compositor_pad, G_DEFINE_TYPE (GstCompositorPad, gst_compositor_pad,
GST_TYPE_VIDEO_AGGREGATOR_PAD); GST_TYPE_VIDEO_AGGREGATOR_CONVERT_PAD);
static void static void
gst_compositor_pad_get_property (GObject * object, guint prop_id, gst_compositor_pad_get_property (GObject * object, guint prop_id,
@ -158,9 +158,13 @@ gst_compositor_pad_get_property (GObject * object, guint prop_id,
break; break;
case PROP_PAD_WIDTH: case PROP_PAD_WIDTH:
g_value_set_int (value, pad->width); g_value_set_int (value, pad->width);
gst_video_aggregator_convert_pad_update_conversion_info
(GST_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
break; break;
case PROP_PAD_HEIGHT: case PROP_PAD_HEIGHT:
g_value_set_int (value, pad->height); g_value_set_int (value, pad->height);
gst_video_aggregator_convert_pad_update_conversion_info
(GST_VIDEO_AGGREGATOR_CONVERT_PAD (pad));
break; break;
case PROP_PAD_ALPHA: case PROP_PAD_ALPHA:
g_value_set_double (value, pad->alpha); g_value_set_double (value, pad->alpha);
@ -255,95 +259,6 @@ _mixer_pad_get_output_size (GstCompositor * comp,
*height = pad_height; *height = pad_height;
} }
static gboolean
gst_compositor_pad_set_info (GstVideoAggregatorPad * pad,
GstVideoAggregator * vagg G_GNUC_UNUSED,
GstVideoInfo * current_info, GstVideoInfo * wanted_info)
{
GstCompositor *comp = GST_COMPOSITOR (vagg);
GstCompositorPad *cpad = GST_COMPOSITOR_PAD (pad);
gchar *colorimetry, *best_colorimetry;
const gchar *chroma, *best_chroma;
gint width, height;
if (!current_info->finfo)
return TRUE;
if (GST_VIDEO_INFO_FORMAT (current_info) == GST_VIDEO_FORMAT_UNKNOWN)
return TRUE;
if (cpad->convert)
gst_video_converter_free (cpad->convert);
cpad->convert = NULL;
if (GST_VIDEO_INFO_MULTIVIEW_MODE (current_info) !=
GST_VIDEO_MULTIVIEW_MODE_NONE
&& GST_VIDEO_INFO_MULTIVIEW_MODE (current_info) !=
GST_VIDEO_MULTIVIEW_MODE_MONO) {
GST_FIXME_OBJECT (pad, "Multiview support is not implemented yet");
return FALSE;
}
colorimetry = gst_video_colorimetry_to_string (&(current_info->colorimetry));
chroma = gst_video_chroma_to_string (current_info->chroma_site);
best_colorimetry =
gst_video_colorimetry_to_string (&(wanted_info->colorimetry));
best_chroma = gst_video_chroma_to_string (wanted_info->chroma_site);
_mixer_pad_get_output_size (comp, cpad, GST_VIDEO_INFO_PAR_N (&vagg->info),
GST_VIDEO_INFO_PAR_D (&vagg->info), &width, &height);
if (GST_VIDEO_INFO_FORMAT (wanted_info) !=
GST_VIDEO_INFO_FORMAT (current_info)
|| g_strcmp0 (colorimetry, best_colorimetry)
|| g_strcmp0 (chroma, best_chroma)
|| width != current_info->width || height != current_info->height) {
GstVideoInfo tmp_info;
/* Initialize with the wanted video format and our original width and
* height as we don't want to rescale. Then copy over the wanted
* colorimetry, and chroma-site and our current pixel-aspect-ratio
* and other relevant fields.
*/
gst_video_info_set_format (&tmp_info, GST_VIDEO_INFO_FORMAT (wanted_info),
width, height);
tmp_info.chroma_site = wanted_info->chroma_site;
tmp_info.colorimetry = wanted_info->colorimetry;
tmp_info.par_n = wanted_info->par_n;
tmp_info.par_d = wanted_info->par_d;
tmp_info.fps_n = current_info->fps_n;
tmp_info.fps_d = current_info->fps_d;
tmp_info.flags = current_info->flags;
tmp_info.interlace_mode = current_info->interlace_mode;
GST_DEBUG_OBJECT (pad, "This pad will be converted from format %s to %s, "
"colorimetry %s to %s, chroma-site %s to %s, "
"width/height %d/%d to %d/%d",
current_info->finfo->name, tmp_info.finfo->name,
colorimetry, best_colorimetry,
chroma, best_chroma,
current_info->width, current_info->height, width, height);
cpad->convert = gst_video_converter_new (current_info, &tmp_info, NULL);
cpad->conversion_info = tmp_info;
if (!cpad->convert) {
g_free (colorimetry);
g_free (best_colorimetry);
GST_WARNING_OBJECT (pad, "No path found for conversion");
return FALSE;
}
} else {
cpad->conversion_info = *current_info;
GST_DEBUG_OBJECT (pad, "This pad will not need conversion");
}
g_free (colorimetry);
g_free (best_colorimetry);
return TRUE;
}
/* Test whether rectangle2 contains rectangle 1 (geometrically) */ /* Test whether rectangle2 contains rectangle 1 (geometrically) */
static gboolean static gboolean
is_rectangle_contained (GstVideoRectangle rect1, GstVideoRectangle rect2) is_rectangle_contained (GstVideoRectangle rect1, GstVideoRectangle rect2)
@ -382,9 +297,6 @@ gst_compositor_pad_prepare_frame (GstVideoAggregatorPad * pad,
{ {
GstCompositor *comp = GST_COMPOSITOR (vagg); GstCompositor *comp = GST_COMPOSITOR (vagg);
GstCompositorPad *cpad = GST_COMPOSITOR_PAD (pad); GstCompositorPad *cpad = GST_COMPOSITOR_PAD (pad);
guint outsize;
GstVideoFrame frame;
static GstAllocationParams params = { 0, 15, 0, 0, };
gint width, height; gint width, height;
gboolean frame_obscured = FALSE; gboolean frame_obscured = FALSE;
GList *l; GList *l;
@ -407,70 +319,6 @@ gst_compositor_pad_prepare_frame (GstVideoAggregatorPad * pad,
_mixer_pad_get_output_size (comp, cpad, GST_VIDEO_INFO_PAR_N (&vagg->info), _mixer_pad_get_output_size (comp, cpad, GST_VIDEO_INFO_PAR_N (&vagg->info),
GST_VIDEO_INFO_PAR_D (&vagg->info), &width, &height); GST_VIDEO_INFO_PAR_D (&vagg->info), &width, &height);
/* The only thing that can change here is the width
* and height, otherwise set_info would've been called */
if (GST_VIDEO_INFO_WIDTH (&cpad->conversion_info) != width ||
GST_VIDEO_INFO_HEIGHT (&cpad->conversion_info) != height) {
gchar *colorimetry, *wanted_colorimetry;
const gchar *chroma, *wanted_chroma;
/* We might end up with no converter afterwards if
* the only reason for conversion was a different
* width or height
*/
if (cpad->convert)
gst_video_converter_free (cpad->convert);
cpad->convert = NULL;
colorimetry = gst_video_colorimetry_to_string (&pad->info.colorimetry);
chroma = gst_video_chroma_to_string (pad->info.chroma_site);
wanted_colorimetry =
gst_video_colorimetry_to_string (&cpad->conversion_info.colorimetry);
wanted_chroma =
gst_video_chroma_to_string (cpad->conversion_info.chroma_site);
if (GST_VIDEO_INFO_FORMAT (&pad->info) !=
GST_VIDEO_INFO_FORMAT (&cpad->conversion_info)
|| g_strcmp0 (colorimetry, wanted_colorimetry)
|| g_strcmp0 (chroma, wanted_chroma)
|| width != GST_VIDEO_INFO_WIDTH (&pad->info)
|| height != GST_VIDEO_INFO_HEIGHT (&pad->info)) {
GstVideoInfo tmp_info;
gst_video_info_set_format (&tmp_info, cpad->conversion_info.finfo->format,
width, height);
tmp_info.chroma_site = cpad->conversion_info.chroma_site;
tmp_info.colorimetry = cpad->conversion_info.colorimetry;
tmp_info.par_n = vagg->info.par_n;
tmp_info.par_d = vagg->info.par_d;
tmp_info.fps_n = cpad->conversion_info.fps_n;
tmp_info.fps_d = cpad->conversion_info.fps_d;
tmp_info.flags = cpad->conversion_info.flags;
tmp_info.interlace_mode = cpad->conversion_info.interlace_mode;
GST_DEBUG_OBJECT (pad, "This pad will be converted from %d to %d",
GST_VIDEO_INFO_FORMAT (&pad->info),
GST_VIDEO_INFO_FORMAT (&tmp_info));
cpad->convert = gst_video_converter_new (&pad->info, &tmp_info, NULL);
cpad->conversion_info = tmp_info;
if (!cpad->convert) {
GST_WARNING_OBJECT (pad, "No path found for conversion");
g_free (colorimetry);
g_free (wanted_colorimetry);
return FALSE;
}
} else {
GST_VIDEO_INFO_WIDTH (&cpad->conversion_info) = width;
GST_VIDEO_INFO_HEIGHT (&cpad->conversion_info) = height;
}
g_free (colorimetry);
g_free (wanted_colorimetry);
}
if (cpad->alpha == 0.0) { if (cpad->alpha == 0.0) {
GST_DEBUG_OBJECT (vagg, "Pad has alpha 0.0, not converting frame"); GST_DEBUG_OBJECT (vagg, "Pad has alpha 0.0, not converting frame");
goto done; goto done;
@ -536,37 +384,10 @@ gst_compositor_pad_prepare_frame (GstVideoAggregatorPad * pad,
if (frame_obscured) if (frame_obscured)
goto done; goto done;
if (!gst_video_frame_map (&frame, &pad->info, buffer, GST_MAP_READ)) { return
GST_WARNING_OBJECT (vagg, "Could not map input buffer"); GST_VIDEO_AGGREGATOR_PAD_CLASS
return FALSE; (gst_compositor_pad_parent_class)->prepare_frame (pad, vagg, buffer,
} prepared_frame);
if (cpad->convert) {
gint converted_size;
GstVideoFrame converted_frame;
GstBuffer *converted_buf = NULL;
/* We wait until here to set the conversion infos, in case vagg->info changed */
converted_size = GST_VIDEO_INFO_SIZE (&cpad->conversion_info);
outsize = GST_VIDEO_INFO_SIZE (&vagg->info);
converted_size = converted_size > outsize ? converted_size : outsize;
converted_buf = gst_buffer_new_allocate (NULL, converted_size, &params);
if (!gst_video_frame_map (&converted_frame, &(cpad->conversion_info),
converted_buf, GST_MAP_READWRITE)) {
GST_WARNING_OBJECT (vagg, "Could not map converted frame");
gst_video_frame_unmap (&frame);
return FALSE;
}
gst_video_converter_frame (cpad->convert, &frame, &converted_frame);
cpad->converted_buffer = converted_buf;
gst_video_frame_unmap (&frame);
*prepared_frame = converted_frame;
} else {
*prepared_frame = frame;
}
done: done:
@ -574,32 +395,46 @@ done:
} }
static void static void
gst_compositor_pad_clean_frame (GstVideoAggregatorPad * pad, gst_compositor_pad_create_conversion_info (GstVideoAggregatorConvertPad * pad,
GstVideoAggregator * vagg, GstVideoFrame * prepared_frame) GstVideoAggregator * vagg, GstVideoInfo * conversion_info)
{ {
GstCompositor *comp = GST_COMPOSITOR (vagg);
GstCompositorPad *cpad = GST_COMPOSITOR_PAD (pad); GstCompositorPad *cpad = GST_COMPOSITOR_PAD (pad);
gint width, height;
if (prepared_frame->buffer) { GST_VIDEO_AGGREGATOR_CONVERT_PAD_CLASS
gst_video_frame_unmap (prepared_frame); (gst_compositor_pad_parent_class)->create_conversion_info (pad, vagg,
memset (prepared_frame, 0, sizeof (GstVideoFrame)); conversion_info);
if (!conversion_info->finfo)
return;
_mixer_pad_get_output_size (comp, cpad, GST_VIDEO_INFO_PAR_N (&vagg->info),
GST_VIDEO_INFO_PAR_D (&vagg->info), &width, &height);
/* The only thing that can change here is the width
* and height, otherwise set_info would've been called */
if (GST_VIDEO_INFO_WIDTH (conversion_info) != width ||
GST_VIDEO_INFO_HEIGHT (conversion_info) != height) {
GstVideoInfo tmp_info;
/* Initialize with the wanted video format and our original width and
* height as we don't want to rescale. Then copy over the wanted
* colorimetry, and chroma-site and our current pixel-aspect-ratio
* and other relevant fields.
*/
gst_video_info_set_format (&tmp_info,
GST_VIDEO_INFO_FORMAT (conversion_info), width, height);
tmp_info.chroma_site = conversion_info->chroma_site;
tmp_info.colorimetry = conversion_info->colorimetry;
tmp_info.par_n = conversion_info->par_n;
tmp_info.par_d = conversion_info->par_d;
tmp_info.fps_n = conversion_info->fps_n;
tmp_info.fps_d = conversion_info->fps_d;
tmp_info.flags = conversion_info->flags;
tmp_info.interlace_mode = conversion_info->interlace_mode;
*conversion_info = tmp_info;
} }
if (cpad->converted_buffer) {
gst_buffer_unref (cpad->converted_buffer);
cpad->converted_buffer = NULL;
}
}
static void
gst_compositor_pad_finalize (GObject * object)
{
GstCompositorPad *pad = GST_COMPOSITOR_PAD (object);
if (pad->convert)
gst_video_converter_free (pad->convert);
pad->convert = NULL;
G_OBJECT_CLASS (gst_compositor_pad_parent_class)->finalize (object);
} }
static void static void
@ -608,10 +443,11 @@ gst_compositor_pad_class_init (GstCompositorPadClass * klass)
GObjectClass *gobject_class = (GObjectClass *) klass; GObjectClass *gobject_class = (GObjectClass *) klass;
GstVideoAggregatorPadClass *vaggpadclass = GstVideoAggregatorPadClass *vaggpadclass =
(GstVideoAggregatorPadClass *) klass; (GstVideoAggregatorPadClass *) klass;
GstVideoAggregatorConvertPadClass *vaggcpadclass =
(GstVideoAggregatorConvertPadClass *) klass;
gobject_class->set_property = gst_compositor_pad_set_property; gobject_class->set_property = gst_compositor_pad_set_property;
gobject_class->get_property = gst_compositor_pad_get_property; gobject_class->get_property = gst_compositor_pad_get_property;
gobject_class->finalize = gst_compositor_pad_finalize;
g_object_class_install_property (gobject_class, PROP_PAD_XPOS, g_object_class_install_property (gobject_class, PROP_PAD_XPOS,
g_param_spec_int ("xpos", "X Position", "X Position of the picture", g_param_spec_int ("xpos", "X Position", "X Position of the picture",
@ -640,11 +476,11 @@ gst_compositor_pad_class_init (GstCompositorPadClass * klass)
-1.0, 1.0, DEFAULT_PAD_CROSSFADE_RATIO, -1.0, 1.0, DEFAULT_PAD_CROSSFADE_RATIO,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS)); G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE | G_PARAM_STATIC_STRINGS));
vaggpadclass->set_info = GST_DEBUG_FUNCPTR (gst_compositor_pad_set_info);
vaggpadclass->prepare_frame = vaggpadclass->prepare_frame =
GST_DEBUG_FUNCPTR (gst_compositor_pad_prepare_frame); GST_DEBUG_FUNCPTR (gst_compositor_pad_prepare_frame);
vaggpadclass->clean_frame =
GST_DEBUG_FUNCPTR (gst_compositor_pad_clean_frame); vaggcpadclass->create_conversion_info =
GST_DEBUG_FUNCPTR (gst_compositor_pad_create_conversion_info);
} }
static void static void
@ -1052,6 +888,8 @@ gst_compositor_crossfade_frames (GstCompositor * self, GstVideoFrame * outframe)
for (l = GST_ELEMENT (self)->sinkpads; l; l = l->next) { for (l = GST_ELEMENT (self)->sinkpads; l; l = l->next) {
GstVideoAggregatorPad *pad = l->data; GstVideoAggregatorPad *pad = l->data;
GstCompositorPad *compo_pad = GST_COMPOSITOR_PAD (pad); GstCompositorPad *compo_pad = GST_COMPOSITOR_PAD (pad);
GstVideoAggregatorPadClass *pad_class =
GST_VIDEO_AGGREGATOR_PAD_GET_CLASS (compo_pad);
GstVideoFrame *prepared_frame = GstVideoFrame *prepared_frame =
gst_video_aggregator_pad_get_prepared_frame (pad); gst_video_aggregator_pad_get_prepared_frame (pad);
@ -1084,16 +922,16 @@ gst_compositor_crossfade_frames (GstCompositor * self, GstVideoFrame * outframe)
COMPOSITOR_BLEND_MODE_ADDITIVE); COMPOSITOR_BLEND_MODE_ADDITIVE);
/* Replace frame with current frame */ /* Replace frame with current frame */
gst_compositor_pad_clean_frame (npad, vagg, next_prepared_frame); pad_class->clean_frame (npad, vagg, next_prepared_frame);
if (!all_crossfading) if (!all_crossfading)
*next_prepared_frame = nframe; *next_prepared_frame = nframe;
next_compo_pad->crossfaded = TRUE; next_compo_pad->crossfaded = TRUE;
/* Frame is now consumed, clean it up */ /* Frame is now consumed, clean it up */
gst_compositor_pad_clean_frame (pad, vagg, prepared_frame); pad_class->clean_frame (pad, vagg, prepared_frame);
} else { } else {
GST_LOG_OBJECT (self, "Simply fading out as no following pad found"); GST_LOG_OBJECT (self, "Simply fading out as no following pad found");
gst_compositor_pad_clean_frame (pad, vagg, prepared_frame); pad_class->clean_frame (pad, vagg, prepared_frame);
if (!all_crossfading) if (!all_crossfading)
*prepared_frame = nframe; *prepared_frame = nframe;
compo_pad->crossfaded = TRUE; compo_pad->crossfaded = TRUE;

View file

@ -46,7 +46,7 @@ typedef struct _GstCompositorPadClass GstCompositorPadClass;
*/ */
struct _GstCompositorPad struct _GstCompositorPad
{ {
GstVideoAggregatorPad parent; GstVideoAggregatorConvertPad parent;
/* properties */ /* properties */
gint xpos, ypos; gint xpos, ypos;
@ -54,16 +54,12 @@ struct _GstCompositorPad
gdouble alpha; gdouble alpha;
gdouble crossfade; gdouble crossfade;
GstVideoConverter *convert;
GstVideoInfo conversion_info;
GstBuffer *converted_buffer;
gboolean crossfaded; gboolean crossfaded;
}; };
struct _GstCompositorPadClass struct _GstCompositorPadClass
{ {
GstVideoAggregatorPadClass parent_class; GstVideoAggregatorConvertPadClass parent_class;
}; };
GType gst_compositor_pad_get_type (void); GType gst_compositor_pad_get_type (void);