aggregator: add simple support for caps handling

Modelled off the videoaggregator caps handling as that seems the most
mature aggregtor-using implementation that has caps handling there is.

https://bugzilla.gnome.org/show_bug.cgi?id=776931
This commit is contained in:
Matthew Waters 2017-05-20 14:24:57 +02:00 committed by Olivier Crête
parent 2a60a9f66f
commit 719498601f
7 changed files with 166 additions and 302 deletions

View file

@ -108,11 +108,14 @@ gst_gl_base_mixer_pad_set_property (GObject * object, guint prop_id,
} }
static gboolean static gboolean
_negotiated_caps (GstVideoAggregator * vagg, GstCaps * caps) _negotiated_caps (GstAggregator * agg, GstCaps * caps)
{ {
GstGLBaseMixer *mix = GST_GL_BASE_MIXER (vagg); GstGLBaseMixer *mix = GST_GL_BASE_MIXER (agg);
return gst_gl_base_mixer_do_bufferpool (mix, caps); if (!gst_gl_base_mixer_do_bufferpool (mix, caps))
return FALSE;
return GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps);
} }
static gboolean static gboolean
@ -324,9 +327,6 @@ gst_gl_base_mixer_class_init (GstGLBaseMixerClass * klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstElementClass *element_class; GstElementClass *element_class;
GstVideoAggregatorClass *videoaggregator_class =
(GstVideoAggregatorClass *) klass;
GstAggregatorClass *agg_class = (GstAggregatorClass *) klass; GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "glmixer", 0, "opengl mixer"); GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "glmixer", 0, "opengl mixer");
@ -350,8 +350,7 @@ gst_gl_base_mixer_class_init (GstGLBaseMixerClass * klass)
agg_class->src_activate = gst_gl_base_mixer_src_activate_mode; agg_class->src_activate = gst_gl_base_mixer_src_activate_mode;
agg_class->stop = gst_gl_base_mixer_stop; agg_class->stop = gst_gl_base_mixer_stop;
agg_class->start = gst_gl_base_mixer_start; agg_class->start = gst_gl_base_mixer_start;
agg_class->negotiated_src_caps = _negotiated_caps;
videoaggregator_class->negotiated_caps = _negotiated_caps;
klass->propose_allocation = _default_propose_allocation; klass->propose_allocation = _default_propose_allocation;

View file

@ -96,16 +96,16 @@ gst_gl_mixer_pad_set_property (GObject * object, guint prop_id,
} }
static gboolean static gboolean
_negotiated_caps (GstVideoAggregator * vagg, GstCaps * caps) _negotiated_caps (GstAggregator * agg, GstCaps * caps)
{ {
GstGLMixer *mix = GST_GL_MIXER (vagg); GstGLMixer *mix = GST_GL_MIXER (agg);
gboolean ret; gboolean ret;
mix->priv->negotiated = TRUE; mix->priv->negotiated = TRUE;
gst_caps_replace (&mix->out_caps, caps); gst_caps_replace (&mix->out_caps, caps);
ret = GST_VIDEO_AGGREGATOR_CLASS (parent_class)->negotiated_caps (vagg, caps); ret = GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps);
return ret; return ret;
} }
@ -215,29 +215,6 @@ gst_gl_mixer_pad_sink_acceptcaps (GstPad * pad, GstGLMixer * mix,
return ret; return ret;
} }
/* copies the given caps */
static GstCaps *
_update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter)
{
GstCaps *tmp;
guint i, n;
if (filter) {
tmp = gst_caps_intersect (caps, filter);
tmp = gst_caps_make_writable (tmp);
} else {
tmp = gst_caps_copy (caps);
}
n = gst_caps_get_size (tmp);
for (i = 0; i < n; i++) {
gst_caps_set_features (tmp, i,
gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY));
}
return tmp;
}
static GstCaps * static GstCaps *
gst_gl_mixer_pad_sink_getcaps (GstPad * pad, GstGLMixer * mix, GstCaps * filter) gst_gl_mixer_pad_sink_getcaps (GstPad * pad, GstGLMixer * mix, GstCaps * filter)
{ {
@ -391,11 +368,10 @@ gst_gl_mixer_class_init (GstGLMixerClass * klass)
agg_class->src_query = gst_gl_mixer_src_query; agg_class->src_query = gst_gl_mixer_src_query;
agg_class->stop = gst_gl_mixer_stop; agg_class->stop = gst_gl_mixer_stop;
agg_class->start = gst_gl_mixer_start; agg_class->start = gst_gl_mixer_start;
agg_class->negotiated_src_caps = _negotiated_caps;
videoaggregator_class->aggregate_frames = gst_gl_mixer_aggregate_frames; videoaggregator_class->aggregate_frames = gst_gl_mixer_aggregate_frames;
videoaggregator_class->get_output_buffer = gst_gl_mixer_get_output_buffer; videoaggregator_class->get_output_buffer = gst_gl_mixer_get_output_buffer;
videoaggregator_class->negotiated_caps = _negotiated_caps;
videoaggregator_class->update_caps = _update_caps;
videoaggregator_class->find_best_format = _find_best_format; videoaggregator_class->find_best_format = _find_best_format;
mix_class->propose_allocation = gst_gl_mixer_propose_allocation; mix_class->propose_allocation = gst_gl_mixer_propose_allocation;

View file

@ -81,10 +81,8 @@ gst_gl_stereo_mix_pad_init (GstGLStereoMixPad * pad)
#define gst_gl_stereo_mix_parent_class parent_class #define gst_gl_stereo_mix_parent_class parent_class
G_DEFINE_TYPE (GstGLStereoMix, gst_gl_stereo_mix, GST_TYPE_GL_MIXER); G_DEFINE_TYPE (GstGLStereoMix, gst_gl_stereo_mix, GST_TYPE_GL_MIXER);
static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps, static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps);
GstCaps * filter); static gboolean _negotiated_caps (GstAggregator * aggregator, GstCaps * caps);
static gboolean _negotiated_caps (GstVideoAggregator * videoaggregator,
GstCaps * caps);
gboolean gst_gl_stereo_mix_make_output (GstGLStereoMix * mix); gboolean gst_gl_stereo_mix_make_output (GstGLStereoMix * mix);
static gboolean gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer); static gboolean gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer);
@ -188,10 +186,10 @@ gst_gl_stereo_mix_class_init (GstGLStereoMixClass * klass)
agg_class->stop = gst_gl_stereo_mix_stop; agg_class->stop = gst_gl_stereo_mix_stop;
agg_class->start = gst_gl_stereo_mix_start; agg_class->start = gst_gl_stereo_mix_start;
agg_class->src_query = gst_gl_stereo_mix_src_query; agg_class->src_query = gst_gl_stereo_mix_src_query;
agg_class->negotiated_src_caps = _negotiated_caps;
videoaggregator_class->aggregate_frames = gst_gl_stereo_mix_aggregate_frames; videoaggregator_class->aggregate_frames = gst_gl_stereo_mix_aggregate_frames;
videoaggregator_class->update_caps = _update_caps; videoaggregator_class->update_caps = _update_caps;
videoaggregator_class->negotiated_caps = _negotiated_caps;
videoaggregator_class->get_output_buffer = videoaggregator_class->get_output_buffer =
gst_gl_stereo_mix_get_output_buffer; gst_gl_stereo_mix_get_output_buffer;
@ -470,7 +468,7 @@ get_converted_caps (GstGLStereoMix * mix, GstCaps * caps)
/* Return the possible output caps based on inputs and downstream prefs */ /* Return the possible output caps based on inputs and downstream prefs */
static GstCaps * static GstCaps *
_update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter) _update_caps (GstVideoAggregator * vagg, GstCaps * caps)
{ {
GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg); GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
GList *l; GList *l;
@ -563,16 +561,16 @@ _update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter)
/* Called after videoaggregator fixates our caps */ /* Called after videoaggregator fixates our caps */
static gboolean static gboolean
_negotiated_caps (GstVideoAggregator * vagg, GstCaps * caps) _negotiated_caps (GstAggregator * agg, GstCaps * caps)
{ {
GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg); GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg);
GstCaps *in_caps; GstCaps *in_caps;
GST_LOG_OBJECT (mix, "Configured output caps %" GST_PTR_FORMAT, caps); GST_LOG_OBJECT (mix, "Configured output caps %" GST_PTR_FORMAT, caps);
if (GST_VIDEO_AGGREGATOR_CLASS (parent_class)->negotiated_caps) if (GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps)
if (!GST_VIDEO_AGGREGATOR_CLASS (parent_class)->negotiated_caps (vagg, if (!GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps))
caps))
return FALSE; return FALSE;
/* Update the glview_convert output */ /* Update the glview_convert output */

View file

@ -453,9 +453,8 @@ static void gst_gl_video_mixer_set_property (GObject * object, guint prop_id,
static void gst_gl_video_mixer_get_property (GObject * object, guint prop_id, static void gst_gl_video_mixer_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec); GValue * value, GParamSpec * pspec);
static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps, static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps);
GstCaps * filter); static GstCaps *_fixate_caps (GstAggregator * agg, GstCaps * caps);
static GstCaps *_fixate_caps (GstVideoAggregator * vagg, GstCaps * caps);
static gboolean gst_gl_video_mixer_propose_allocation (GstGLBaseMixer * static gboolean gst_gl_video_mixer_propose_allocation (GstGLBaseMixer *
base_mix, GstGLBaseMixerPad * base_pad, GstQuery * decide_query, base_mix, GstGLBaseMixerPad * base_pad, GstQuery * decide_query,
GstQuery * query); GstQuery * query);
@ -874,9 +873,9 @@ gst_gl_video_mixer_class_init (GstGLVideoMixerClass * klass)
gst_gl_video_mixer_process_textures; gst_gl_video_mixer_process_textures;
vagg_class->update_caps = _update_caps; vagg_class->update_caps = _update_caps;
vagg_class->fixate_caps = _fixate_caps;
agg_class->sinkpads_type = GST_TYPE_GL_VIDEO_MIXER_PAD; agg_class->sinkpads_type = GST_TYPE_GL_VIDEO_MIXER_PAD;
agg_class->fixate_src_caps = _fixate_caps;
mix_class->propose_allocation = gst_gl_video_mixer_propose_allocation; mix_class->propose_allocation = gst_gl_video_mixer_propose_allocation;
@ -986,7 +985,7 @@ _mixer_pad_get_output_size (GstGLVideoMixer * mix,
} }
static GstCaps * static GstCaps *
_update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter) _update_caps (GstVideoAggregator * vagg, GstCaps * caps)
{ {
GstCaps *ret; GstCaps *ret;
GList *l; GList *l;
@ -1014,18 +1013,15 @@ _update_caps (GstVideoAggregator * vagg, GstCaps * caps, GstCaps * filter)
GST_OBJECT_UNLOCK (vagg); GST_OBJECT_UNLOCK (vagg);
if (filter) {
ret = gst_caps_intersect (caps, filter);
} else {
ret = gst_caps_ref (caps); ret = gst_caps_ref (caps);
}
return ret; return ret;
} }
static GstCaps * static GstCaps *
_fixate_caps (GstVideoAggregator * vagg, GstCaps * caps) _fixate_caps (GstAggregator * agg, GstCaps * caps)
{ {
GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
GstGLVideoMixer *mix = GST_GL_VIDEO_MIXER (vagg); GstGLVideoMixer *mix = GST_GL_VIDEO_MIXER (vagg);
gint best_width = 0, best_height = 0; gint best_width = 0, best_height = 0;
gint best_fps_n = 0, best_fps_d = 0; gint best_fps_n = 0, best_fps_d = 0;

View file

@ -535,60 +535,11 @@ gst_video_aggregator_find_best_format (GstVideoAggregator * vagg,
g_hash_table_unref (formats_table); g_hash_table_unref (formats_table);
} }
/* WITH GST_VIDEO_AGGREGATOR_LOCK TAKEN */
static gboolean
gst_video_aggregator_src_setcaps (GstVideoAggregator * vagg, GstCaps * caps)
{
GstAggregator *agg = GST_AGGREGATOR (vagg);
gboolean ret = FALSE;
GstVideoInfo info;
GstPad *pad = GST_AGGREGATOR (vagg)->srcpad;
GST_INFO_OBJECT (pad, "set src caps: %" GST_PTR_FORMAT, caps);
if (!gst_video_info_from_caps (&info, caps))
goto done;
ret = TRUE;
if (GST_VIDEO_INFO_FPS_N (&vagg->info) != GST_VIDEO_INFO_FPS_N (&info) ||
GST_VIDEO_INFO_FPS_D (&vagg->info) != GST_VIDEO_INFO_FPS_D (&info)) {
if (agg->segment.position != -1) {
vagg->priv->nframes = 0;
/* The timestamp offset will be updated based on the
* segment position the next time we aggregate */
GST_DEBUG_OBJECT (vagg,
"Resetting frame counter because of framerate change");
}
gst_video_aggregator_reset_qos (vagg);
}
vagg->info = info;
if (vagg->priv->current_caps == NULL ||
gst_caps_is_equal (caps, vagg->priv->current_caps) == FALSE) {
GstClockTime latency;
gst_caps_replace (&vagg->priv->current_caps, caps);
GST_VIDEO_AGGREGATOR_UNLOCK (vagg);
gst_aggregator_set_src_caps (agg, caps);
latency = gst_util_uint64_scale (GST_SECOND,
GST_VIDEO_INFO_FPS_D (&info), GST_VIDEO_INFO_FPS_N (&info));
gst_aggregator_set_latency (agg, latency, latency);
GST_VIDEO_AGGREGATOR_LOCK (vagg);
}
done:
return ret;
}
static GstCaps * static GstCaps *
gst_video_aggregator_default_fixate_caps (GstVideoAggregator * vagg, gst_video_aggregator_default_fixate_src_caps (GstAggregator * agg,
GstCaps * caps) GstCaps * caps)
{ {
GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
gint best_width = -1, best_height = -1; gint best_width = -1, best_height = -1;
gint best_fps_n = -1, best_fps_d = -1; gint best_fps_n = -1, best_fps_d = -1;
gdouble best_fps = -1.; gdouble best_fps = -1.;
@ -634,6 +585,7 @@ gst_video_aggregator_default_fixate_caps (GstVideoAggregator * vagg,
best_fps = 25.0; best_fps = 25.0;
} }
caps = gst_caps_make_writable (caps);
s = gst_caps_get_structure (caps, 0); s = gst_caps_get_structure (caps, 0);
gst_structure_fixate_field_nearest_int (s, "width", best_width); gst_structure_fixate_field_nearest_int (s, "width", best_width);
gst_structure_fixate_field_nearest_int (s, "height", best_height); gst_structure_fixate_field_nearest_int (s, "height", best_height);
@ -648,7 +600,7 @@ gst_video_aggregator_default_fixate_caps (GstVideoAggregator * vagg,
static GstCaps * static GstCaps *
gst_video_aggregator_default_update_caps (GstVideoAggregator * vagg, gst_video_aggregator_default_update_caps (GstVideoAggregator * vagg,
GstCaps * caps, GstCaps * filter) GstCaps * caps)
{ {
GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (vagg); GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (vagg);
GstCaps *ret, *best_format_caps; GstCaps *ret, *best_format_caps;
@ -682,38 +634,59 @@ gst_video_aggregator_default_update_caps (GstVideoAggregator * vagg,
gst_video_chroma_to_string (best_info.chroma_site), NULL); gst_video_chroma_to_string (best_info.chroma_site), NULL);
ret = gst_caps_merge (best_format_caps, gst_caps_ref (caps)); ret = gst_caps_merge (best_format_caps, gst_caps_ref (caps));
if (filter) {
GstCaps *tmp;
tmp = gst_caps_intersect (ret, filter);
gst_caps_unref (ret);
ret = tmp;
}
return ret; return ret;
} }
/* WITH GST_VIDEO_AGGREGATOR_LOCK TAKEN */ static GstFlowReturn
static gboolean gst_video_aggregator_default_update_src_caps (GstAggregator * agg,
gst_video_aggregator_update_src_caps (GstVideoAggregator * vagg) GstCaps * caps, GstCaps ** ret)
{ {
GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (vagg); GstVideoAggregatorClass *vagg_klass = GST_VIDEO_AGGREGATOR_GET_CLASS (agg);
GstVideoAggregatorPadClass *vaggpad_klass = g_type_class_peek GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
(GST_AGGREGATOR_GET_CLASS (vagg)->sinkpads_type); gboolean at_least_one_pad_configured = FALSE;
GstAggregator *agg = GST_AGGREGATOR (vagg);
gboolean ret = TRUE, at_least_one_pad_configured = FALSE;
gboolean at_least_one_alpha = FALSE;
GstCaps *downstream_caps;
GList *l; GList *l;
downstream_caps = gst_pad_get_allowed_caps (agg->srcpad); GST_OBJECT_LOCK (vagg);
for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
GstVideoAggregatorPad *mpad = l->data;
if (!downstream_caps || gst_caps_is_empty (downstream_caps)) { if (GST_VIDEO_INFO_WIDTH (&mpad->info) == 0
GST_INFO_OBJECT (vagg, "No downstream caps found %" || GST_VIDEO_INFO_HEIGHT (&mpad->info) == 0)
GST_PTR_FORMAT, downstream_caps); continue;
if (downstream_caps)
gst_caps_unref (downstream_caps); at_least_one_pad_configured = TRUE;
return FALSE;
} }
GST_OBJECT_UNLOCK (vagg);
if (!at_least_one_pad_configured) {
/* We couldn't decide the output video info because the sinkpads don't have
* all the caps yet, so we mark the pad as needing a reconfigure. This
* allows aggregate() to skip ahead a bit and try again later. */
GST_DEBUG_OBJECT (vagg, "Couldn't decide output video info");
gst_pad_mark_reconfigure (agg->srcpad);
return GST_AGGREGATOR_FLOW_NEED_DATA;
}
g_assert (vagg_klass->update_caps);
*ret = vagg_klass->update_caps (vagg, caps);
return GST_FLOW_OK;
}
static gboolean
gst_video_aggregator_default_negotiated_src_caps (GstAggregator * agg,
GstCaps * caps)
{
GstVideoAggregatorPadClass *vaggpad_klass = g_type_class_peek
(GST_AGGREGATOR_GET_CLASS (agg)->sinkpads_type);
GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
gboolean at_least_one_alpha = FALSE;
const GstVideoFormatInfo *finfo;
GstVideoInfo info;
GList *l;
GST_INFO_OBJECT (agg->srcpad, "set src caps: %" GST_PTR_FORMAT, caps);
GST_OBJECT_LOCK (vagg); GST_OBJECT_LOCK (vagg);
for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) { for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) {
@ -725,72 +698,34 @@ gst_video_aggregator_update_src_caps (GstVideoAggregator * vagg)
if (mpad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA) if (mpad->info.finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
at_least_one_alpha = TRUE; at_least_one_alpha = TRUE;
at_least_one_pad_configured = TRUE;
} }
GST_OBJECT_UNLOCK (vagg); GST_OBJECT_UNLOCK (vagg);
if (at_least_one_pad_configured) { if (!gst_video_info_from_caps (&info, caps))
GstCaps *caps, *peercaps; return FALSE;
peercaps = gst_pad_peer_query_caps (agg->srcpad, NULL); if (GST_VIDEO_INFO_FPS_N (&vagg->info) != GST_VIDEO_INFO_FPS_N (&info) ||
GST_VIDEO_INFO_FPS_D (&vagg->info) != GST_VIDEO_INFO_FPS_D (&info)) {
g_assert (vagg_klass->update_caps); if (agg->segment.position != -1) {
GST_DEBUG_OBJECT (vagg, "updating caps from %" GST_PTR_FORMAT, vagg->priv->nframes = 0;
downstream_caps); /* The timestamp offset will be updated based on the
GST_DEBUG_OBJECT (vagg, " with filter %" GST_PTR_FORMAT, peercaps); * segment position the next time we aggregate */
if (!(caps = vagg_klass->update_caps (vagg, downstream_caps, peercaps)) || GST_DEBUG_OBJECT (vagg,
gst_caps_is_empty (caps)) { "Resetting frame counter because of framerate change");
GST_WARNING_OBJECT (vagg, "Subclass failed to update provided caps");
gst_caps_unref (downstream_caps);
if (peercaps)
gst_caps_unref (peercaps);
ret = FALSE;
goto done;
} }
GST_DEBUG_OBJECT (vagg, " to %" GST_PTR_FORMAT, caps); gst_video_aggregator_reset_qos (vagg);
gst_caps_unref (downstream_caps);
if (peercaps)
gst_caps_unref (peercaps);
if (!gst_caps_is_fixed (caps)) {
g_assert (vagg_klass->fixate_caps);
caps = gst_caps_make_writable (caps);
GST_DEBUG_OBJECT (vagg, "fixate caps from %" GST_PTR_FORMAT, caps);
if (!(caps = vagg_klass->fixate_caps (vagg, caps))) {
GST_WARNING_OBJECT (vagg, "Subclass failed to fixate provided caps");
ret = FALSE;
goto done;
}
GST_DEBUG_OBJECT (vagg, " to %" GST_PTR_FORMAT, caps);
} }
{ vagg->info = info;
const GstVideoFormatInfo *finfo;
const gchar *v_format_str;
GstVideoFormat v_format;
GstStructure *s;
s = gst_caps_get_structure (caps, 0); finfo = vagg->info.finfo;
v_format_str = gst_structure_get_string (s, "format");
g_return_val_if_fail (v_format_str != NULL, FALSE);
v_format = gst_video_format_from_string (v_format_str);
g_return_val_if_fail (v_format != GST_VIDEO_FORMAT_UNKNOWN, FALSE);
finfo = gst_video_format_get_info (v_format);
g_return_val_if_fail (finfo != NULL, FALSE);
if (at_least_one_alpha && !(finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) { if (at_least_one_alpha && !(finfo->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)) {
GST_ELEMENT_ERROR (vagg, CORE, NEGOTIATION, GST_ELEMENT_ERROR (vagg, CORE, NEGOTIATION,
("At least one of the input pads contains alpha, but configured caps don't support alpha."), ("At least one of the input pads contains alpha, but configured caps don't support alpha."),
("Either convert your inputs to not contain alpha or add a videoconvert after the aggregator")); ("Either convert your inputs to not contain alpha or add a videoconvert after the aggregator"));
ret = FALSE; return FALSE;
goto done;
} }
}
gst_video_info_from_caps (&vagg->info, caps);
if (vaggpad_klass->set_info) { if (vaggpad_klass->set_info) {
/* Then browse the sinks once more, setting or unsetting conversion if needed */ /* Then browse the sinks once more, setting or unsetting conversion if needed */
@ -803,23 +738,19 @@ gst_video_aggregator_update_src_caps (GstVideoAggregator * vagg)
} }
} }
if (gst_video_aggregator_src_setcaps (vagg, caps)) { if (vagg->priv->current_caps == NULL ||
if (vagg_klass->negotiated_caps) gst_caps_is_equal (caps, vagg->priv->current_caps) == FALSE) {
ret = GstClockTime latency;
GST_VIDEO_AGGREGATOR_GET_CLASS (vagg)->negotiated_caps (vagg, caps);
} gst_caps_replace (&vagg->priv->current_caps, caps);
gst_caps_unref (caps);
} else { gst_aggregator_set_src_caps (agg, caps);
/* We couldn't decide the output video info because the sinkpads don't have latency = gst_util_uint64_scale (GST_SECOND,
* all the caps yet, so we mark the pad as needing a reconfigure. This GST_VIDEO_INFO_FPS_D (&vagg->info), GST_VIDEO_INFO_FPS_N (&vagg->info));
* allows aggregate() to skip ahead a bit and try again later. */ gst_aggregator_set_latency (agg, latency, latency);
GST_DEBUG_OBJECT (vagg, "Couldn't decide output video info");
gst_pad_mark_reconfigure (agg->srcpad);
ret = FALSE;
} }
done: return TRUE;
return ret;
} }
static gboolean static gboolean
@ -1101,7 +1032,6 @@ gst_video_aggregator_reset (GstVideoAggregator * vagg)
GST_OBJECT_UNLOCK (vagg); GST_OBJECT_UNLOCK (vagg);
} }
#define GST_FLOW_NEEDS_DATA GST_FLOW_CUSTOM_ERROR
static gint static gint
gst_video_aggregator_fill_queues (GstVideoAggregator * vagg, gst_video_aggregator_fill_queues (GstVideoAggregator * vagg,
GstClockTime output_start_running_time, GstClockTime output_start_running_time,
@ -1309,7 +1239,7 @@ gst_video_aggregator_fill_queues (GstVideoAggregator * vagg,
gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg)); gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
if (need_more_data) if (need_more_data)
return GST_FLOW_NEEDS_DATA; return GST_AGGREGATOR_FLOW_NEED_DATA;
if (eos) if (eos)
return GST_FLOW_EOS; return GST_FLOW_EOS;
@ -1471,27 +1401,14 @@ gst_video_aggregator_get_next_time (GstAggregator * agg)
return next_time; return next_time;
} }
static GstFlowReturn static void
gst_video_aggregator_check_reconfigure (GstVideoAggregator * vagg, gst_video_aggregator_advance_on_timeout (GstVideoAggregator * vagg)
gboolean timeout)
{ {
GstAggregator *agg = (GstAggregator *) vagg; GstAggregator *agg = GST_AGGREGATOR (vagg);
if (GST_VIDEO_INFO_FORMAT (&vagg->info) == GST_VIDEO_FORMAT_UNKNOWN
|| gst_pad_check_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg))) {
gboolean ret;
restart:
ret = gst_video_aggregator_update_src_caps (vagg);
if (!ret) {
gst_pad_mark_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg));
if (timeout) {
guint64 frame_duration; guint64 frame_duration;
gint fps_d, fps_n; gint fps_d, fps_n;
GST_DEBUG_OBJECT (vagg, GST_OBJECT_LOCK (agg);
"Got timeout before receiving any caps, don't output anything");
if (agg->segment.position == -1) { if (agg->segment.position == -1) {
if (agg->segment.rate > 0.0) if (agg->segment.rate > 0.0)
agg->segment.position = agg->segment.start; agg->segment.position = agg->segment.start;
@ -1513,24 +1430,7 @@ gst_video_aggregator_check_reconfigure (GstVideoAggregator * vagg,
else else
agg->segment.position = 0; agg->segment.position = 0;
vagg->priv->nframes++; vagg->priv->nframes++;
return GST_FLOW_NEEDS_DATA; GST_OBJECT_UNLOCK (agg);
} else {
if (GST_PAD_IS_FLUSHING (GST_AGGREGATOR_SRC_PAD (vagg)))
return GST_FLOW_FLUSHING;
else
return GST_FLOW_NOT_NEGOTIATED;
}
} else {
/* It is possible that during gst_video_aggregator_update_src_caps()
* we got a caps change on one of the sink pads, in which case we need
* to redo the negotiation
* - https://bugzilla.gnome.org/show_bug.cgi?id=755782 */
if (gst_pad_check_reconfigure (GST_AGGREGATOR_SRC_PAD (vagg)))
goto restart;
}
}
return GST_FLOW_OK;
} }
static GstFlowReturn static GstFlowReturn
@ -1546,10 +1446,10 @@ gst_video_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
GST_VIDEO_AGGREGATOR_LOCK (vagg); GST_VIDEO_AGGREGATOR_LOCK (vagg);
restart: restart:
flow_ret = gst_video_aggregator_check_reconfigure (vagg, timeout); if (GST_VIDEO_INFO_FORMAT (&vagg->info) == GST_VIDEO_FORMAT_UNKNOWN) {
if (flow_ret != GST_FLOW_OK) { if (timeout)
if (flow_ret == GST_FLOW_NEEDS_DATA) gst_video_aggregator_advance_on_timeout (vagg);
flow_ret = GST_FLOW_OK; flow_ret = GST_AGGREGATOR_FLOW_NEED_DATA;
goto unlock_and_return; goto unlock_and_return;
} }
@ -1591,9 +1491,8 @@ restart:
output_end_running_time); output_end_running_time);
} }
if (flow_ret == GST_FLOW_NEEDS_DATA && !timeout) { if (flow_ret == GST_AGGREGATOR_FLOW_NEED_DATA && !timeout) {
GST_DEBUG_OBJECT (vagg, "Need more data for decisions"); GST_DEBUG_OBJECT (vagg, "Need more data for decisions");
flow_ret = GST_FLOW_OK;
goto unlock_and_return; goto unlock_and_return;
} else if (flow_ret == GST_FLOW_EOS) { } else if (flow_ret == GST_FLOW_EOS) {
GST_DEBUG_OBJECT (vagg, "All sinkpads are EOS -- forwarding"); GST_DEBUG_OBJECT (vagg, "All sinkpads are EOS -- forwarding");
@ -2187,11 +2086,14 @@ gst_video_aggregator_class_init (GstVideoAggregatorClass * klass)
agg_class->src_event = gst_video_aggregator_src_event; agg_class->src_event = gst_video_aggregator_src_event;
agg_class->src_query = gst_video_aggregator_src_query; agg_class->src_query = gst_video_aggregator_src_query;
agg_class->get_next_time = gst_video_aggregator_get_next_time; agg_class->get_next_time = gst_video_aggregator_get_next_time;
agg_class->update_src_caps = gst_video_aggregator_default_update_src_caps;
agg_class->fixate_src_caps = gst_video_aggregator_default_fixate_src_caps;
agg_class->negotiated_src_caps =
gst_video_aggregator_default_negotiated_src_caps;
klass->find_best_format = gst_video_aggregator_find_best_format; klass->find_best_format = gst_video_aggregator_find_best_format;
klass->get_output_buffer = gst_video_aggregator_get_output_buffer; klass->get_output_buffer = gst_video_aggregator_get_output_buffer;
klass->update_caps = gst_video_aggregator_default_update_caps; klass->update_caps = gst_video_aggregator_default_update_caps;
klass->fixate_caps = gst_video_aggregator_default_fixate_caps;
/* Register the pad class */ /* Register the pad class */
g_type_class_ref (GST_TYPE_VIDEO_AGGREGATOR_PAD); g_type_class_ref (GST_TYPE_VIDEO_AGGREGATOR_PAD);

View file

@ -73,9 +73,6 @@ struct _GstVideoAggregator
* @update_caps: Optional. * @update_caps: Optional.
* Lets subclasses update the #GstCaps representing * Lets subclasses update the #GstCaps representing
* the src pad caps before usage. Return %NULL to indicate failure. * the src pad caps before usage. Return %NULL to indicate failure.
* @fixate_caps: Fixate and return the src pad caps provided. The function takes
* ownership of @caps and returns a fixated version of
* @caps. @caps is not guaranteed to be writable.
* @aggregate_frames: Lets subclasses aggregate frames that are ready. Subclasses * @aggregate_frames: Lets subclasses aggregate frames that are ready. Subclasses
* should iterate the GstElement.sinkpads and use the already * should iterate the GstElement.sinkpads and use the already
* mapped #GstVideoFrame from GstVideoAggregatorPad.aggregated_frame * mapped #GstVideoFrame from GstVideoAggregatorPad.aggregated_frame
@ -97,16 +94,11 @@ struct _GstVideoAggregatorClass
/*< public >*/ /*< public >*/
GstCaps * (*update_caps) (GstVideoAggregator * videoaggregator, GstCaps * (*update_caps) (GstVideoAggregator * videoaggregator,
GstCaps * caps,
GstCaps * filter_caps);
GstCaps * (*fixate_caps) (GstVideoAggregator * videoaggregator,
GstCaps * caps); GstCaps * caps);
GstFlowReturn (*aggregate_frames) (GstVideoAggregator * videoaggregator, GstFlowReturn (*aggregate_frames) (GstVideoAggregator * videoaggregator,
GstBuffer * outbuffer); GstBuffer * outbuffer);
GstFlowReturn (*get_output_buffer) (GstVideoAggregator * videoaggregator, GstFlowReturn (*get_output_buffer) (GstVideoAggregator * videoaggregator,
GstBuffer ** outbuffer); GstBuffer ** outbuffer);
gboolean (*negotiated_caps) (GstVideoAggregator * videoaggregator,
GstCaps * caps);
void (*find_best_format) (GstVideoAggregator * vagg, void (*find_best_format) (GstVideoAggregator * vagg,
GstCaps * downstream_caps, GstCaps * downstream_caps,
GstVideoInfo * best_info, GstVideoInfo * best_info,

View file

@ -870,8 +870,9 @@ set_functions (GstCompositor * self, GstVideoInfo * info)
} }
static GstCaps * static GstCaps *
_fixate_caps (GstVideoAggregator * vagg, GstCaps * caps) _fixate_caps (GstAggregator * agg, GstCaps * caps)
{ {
GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (agg);
GList *l; GList *l;
gint best_width = -1, best_height = -1; gint best_width = -1, best_height = -1;
gint best_fps_n = -1, best_fps_d = -1; gint best_fps_n = -1, best_fps_d = -1;
@ -945,21 +946,21 @@ _fixate_caps (GstVideoAggregator * vagg, GstCaps * caps)
} }
static gboolean static gboolean
_negotiated_caps (GstVideoAggregator * vagg, GstCaps * caps) _negotiated_caps (GstAggregator * agg, GstCaps * caps)
{ {
GstVideoInfo v_info; GstVideoInfo v_info;
GST_DEBUG_OBJECT (vagg, "Negotiated caps %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (agg, "Negotiated caps %" GST_PTR_FORMAT, caps);
if (!gst_video_info_from_caps (&v_info, caps)) if (!gst_video_info_from_caps (&v_info, caps))
return FALSE; return FALSE;
if (!set_functions (GST_COMPOSITOR (vagg), &v_info)) { if (!set_functions (GST_COMPOSITOR (agg), &v_info)) {
GST_ERROR_OBJECT (vagg, "Failed to setup vfuncs"); GST_ERROR_OBJECT (agg, "Failed to setup vfuncs");
return FALSE; return FALSE;
} }
return TRUE; return GST_AGGREGATOR_CLASS (parent_class)->negotiated_src_caps (agg, caps);
} }
static GstFlowReturn static GstFlowReturn
@ -1090,8 +1091,8 @@ gst_compositor_class_init (GstCompositorClass * klass)
agg_class->sinkpads_type = GST_TYPE_COMPOSITOR_PAD; agg_class->sinkpads_type = GST_TYPE_COMPOSITOR_PAD;
agg_class->sink_query = _sink_query; agg_class->sink_query = _sink_query;
videoaggregator_class->fixate_caps = _fixate_caps; agg_class->fixate_src_caps = _fixate_caps;
videoaggregator_class->negotiated_caps = _negotiated_caps; agg_class->negotiated_src_caps = _negotiated_caps;
videoaggregator_class->aggregate_frames = gst_compositor_aggregate_frames; videoaggregator_class->aggregate_frames = gst_compositor_aggregate_frames;
g_object_class_install_property (gobject_class, PROP_BACKGROUND, g_object_class_install_property (gobject_class, PROP_BACKGROUND,