ahcsrc: Fix latency reporting

Currently it was wrongly reporting min/max as being the shortest and
longest possible frame duration. This is not how latency works in
GStreamer.

Fix by reporting min latency as being the longest possible duration of
one frame. As we don't know how many buffers the stack can accumulate, we
simply assume that max latency is the same (the usual default behaviour).
This commit is contained in:
Nicolas Dufresne 2016-01-08 16:16:09 -05:00
parent 9a53d79876
commit 09dbc5b298

View file

@ -2492,15 +2492,14 @@ gst_ahc_src_query (GstBaseSrc * bsrc, GstQuery * query)
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
GstClockTime min, max;
GstClockTime min;
gst_query_parse_latency (query, NULL, &min, &max);
min = gst_util_uint64_scale (GST_SECOND, 1000, self->fps_max);
max = gst_util_uint64_scale (GST_SECOND, 1000, self->fps_min);
/* Allow of 1 frame latency base on the longer frame duration */
gst_query_parse_latency (query, NULL, &min, NULL);
min = gst_util_uint64_scale (GST_SECOND, 1000, self->fps_min);
GST_DEBUG_OBJECT (self,
"Reporting latency min: %" GST_TIME_FORMAT " max: %" GST_TIME_FORMAT,
GST_TIME_ARGS (min), GST_TIME_ARGS (max));
gst_query_set_latency (query, TRUE, min, max);
"Reporting latency min: %" GST_TIME_FORMAT, GST_TIME_ARGS (min));
gst_query_set_latency (query, TRUE, min, min);
return TRUE;
break;