audioaggregator: Sync property values to output timestamp

This is what videoaggregator already does since 2019, and it makes
sense. The properties need to change at every output frame based on
the output time because they may change even though the input frame is
not changing. See:

6a8c15f3bd

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/3851>
This commit is contained in:
Nirbheek Chauhan 2023-01-31 05:37:52 -08:00 committed by GStreamer Marge Bot
parent 865227b750
commit 1913ff18b1

View file

@ -521,7 +521,6 @@ static GstBuffer *gst_audio_aggregator_do_clip (GstAggregator * agg,
GstAggregatorPad * bpad, GstBuffer * buffer);
static GstFlowReturn gst_audio_aggregator_aggregate (GstAggregator * agg,
gboolean timeout);
static gboolean sync_pad_values (GstElement * aagg, GstPad * pad, gpointer ud);
static gboolean gst_audio_aggregator_negotiated_src_caps (GstAggregator * agg,
GstCaps * caps);
static GstFlowReturn
@ -2150,30 +2149,6 @@ gst_audio_aggregator_create_output_buffer (GstAudioAggregator * aagg,
return outbuf;
}
static gboolean
sync_pad_values (GstElement * aagg, GstPad * pad, gpointer user_data)
{
GstAudioAggregatorPad *aapad = GST_AUDIO_AGGREGATOR_PAD (pad);
GstAggregatorPad *bpad = GST_AGGREGATOR_PAD_CAST (pad);
GstClockTime timestamp, stream_time;
if (aapad->priv->buffer == NULL)
return TRUE;
timestamp = GST_BUFFER_PTS (aapad->priv->buffer);
GST_OBJECT_LOCK (bpad);
stream_time = gst_segment_to_stream_time (&bpad->segment, GST_FORMAT_TIME,
timestamp);
GST_OBJECT_UNLOCK (bpad);
/* sync object properties on stream time */
/* TODO: Ideally we would want to do that on every sample */
if (GST_CLOCK_TIME_IS_VALID (stream_time))
gst_object_sync_values (GST_OBJECT_CAST (pad), stream_time);
return TRUE;
}
static GstSample *
gst_audio_aggregator_peek_next_sample (GstAggregator * agg,
GstAggregatorPad * aggpad)
@ -2201,6 +2176,18 @@ gst_audio_aggregator_peek_next_sample (GstAggregator * agg,
return sample;
}
static gboolean
sync_pad_values (GstElement * aagg, GstPad * pad, gpointer user_data)
{
gint64 *outbuf_stream_time = user_data;
/* sync object properties on stream time */
if (GST_CLOCK_TIME_IS_VALID (*outbuf_stream_time))
gst_object_sync_values (GST_OBJECT_CAST (pad), *outbuf_stream_time);
return TRUE;
}
static GstFlowReturn
gst_audio_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
{
@ -2244,13 +2231,11 @@ gst_audio_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
guint blocksize;
GstAudioAggregatorPad *srcpad = GST_AUDIO_AGGREGATOR_PAD (agg->srcpad);
GstSegment *agg_segment = &GST_AGGREGATOR_PAD (agg->srcpad)->segment;
GstClockTime outbuf_stream_time;
element = GST_ELEMENT (agg);
aagg = GST_AUDIO_AGGREGATOR (agg);
/* Sync pad properties to the stream time */
gst_element_foreach_sink_pad (element, sync_pad_values, NULL);
GST_AUDIO_AGGREGATOR_LOCK (aagg);
GST_OBJECT_LOCK (agg);
@ -2479,8 +2464,20 @@ gst_audio_aggregator_aggregate (GstAggregator * agg, gboolean timeout)
aagg->priv->selected_samples_info);
}
GST_OBJECT_LOCK (agg);
/* Calculate the stream time of the output buffer using its PTS. See below
* near gst_aggregator_finish_buffer() for the calculation for that. */
if (agg_segment->rate > 0.0) {
outbuf_stream_time = gst_segment_to_stream_time (agg_segment,
GST_FORMAT_TIME, agg_segment->position);
} else {
outbuf_stream_time = gst_segment_to_stream_time (agg_segment,
GST_FORMAT_TIME, next_timestamp);
}
/* Sync pad properties to the stream time */
gst_element_foreach_sink_pad (element, sync_pad_values, &outbuf_stream_time);
GST_OBJECT_LOCK (agg);
// mix_buffer() will shortly release the object lock so we need to
// ensure that the pad list stays valid.
n_sinkpads = element->numsinkpads;