avidemux: tweak KEY_UNIT SNAP seek handling

Previously, seeking to position y where y is (strictly) within a keyframe
would seek to that keyframe both with SNAP_BEFORE and SNAP_AFTER,
where the latter is now adjusted to really snap to the next keyframe.
This commit is contained in:
Mark Nauwelaerts 2016-12-29 11:26:33 +01:00
parent 3362dceb25
commit 494f4beb40

View file

@ -72,8 +72,8 @@ static GstStaticPadTemplate sink_templ = GST_STATIC_PAD_TEMPLATE ("sink",
#ifndef GST_DISABLE_GST_DEBUG #ifndef GST_DISABLE_GST_DEBUG
static const char *const snap_types[2][2] = { static const char *const snap_types[2][2] = {
{"any", "before"}, {"any", "after"},
{"after", "nearest"}, {"before", "nearest"},
}; };
#endif #endif
@ -2608,8 +2608,9 @@ gst_avi_demux_index_entry_search (GstAviIndexEntry * entry, guint64 * total)
* @avi: Avi object * @avi: Avi object
* @stream: the stream * @stream: the stream
* @time: a time position * @time: a time position
* @next: whether to look for entry before or after @time
* *
* Finds the index entry which time is less or equal than the requested time. * Finds the index entry which time is less/more or equal than the requested time.
* Try to avoid binary search when we can convert the time to an index * Try to avoid binary search when we can convert the time to an index
* position directly (for example for video frames with a fixed duration). * position directly (for example for video frames with a fixed duration).
* *
@ -2617,7 +2618,7 @@ gst_avi_demux_index_entry_search (GstAviIndexEntry * entry, guint64 * total)
*/ */
static guint static guint
gst_avi_demux_index_for_time (GstAviDemux * avi, gst_avi_demux_index_for_time (GstAviDemux * avi,
GstAviStream * stream, guint64 time) GstAviStream * stream, guint64 time, gboolean next)
{ {
guint index = -1; guint index = -1;
guint64 total; guint64 total;
@ -2638,6 +2639,14 @@ gst_avi_demux_index_for_time (GstAviDemux * avi,
total = avi_stream_convert_time_to_frames_unchecked (stream, time); total = avi_stream_convert_time_to_frames_unchecked (stream, time);
} else { } else {
index = avi_stream_convert_time_to_frames_unchecked (stream, time); index = avi_stream_convert_time_to_frames_unchecked (stream, time);
/* this entry typically undershoots the target time,
* so check a bit more if next needed */
if (next) {
GstClockTime itime =
avi_stream_convert_frames_to_time_unchecked (stream, index);
if (itime < time && index + 1 < stream->idx_n)
index++;
}
} }
} else if (stream->strh->type == GST_RIFF_FCC_auds) { } else if (stream->strh->type == GST_RIFF_FCC_auds) {
/* constant rate stream */ /* constant rate stream */
@ -2655,7 +2664,7 @@ gst_avi_demux_index_for_time (GstAviDemux * avi,
entry = gst_util_array_binary_search (stream->index, entry = gst_util_array_binary_search (stream->index,
stream->idx_n, sizeof (GstAviIndexEntry), stream->idx_n, sizeof (GstAviIndexEntry),
(GCompareDataFunc) gst_avi_demux_index_entry_search, (GCompareDataFunc) gst_avi_demux_index_entry_search,
GST_SEARCH_MODE_BEFORE, &total, NULL); next ? GST_SEARCH_MODE_AFTER : GST_SEARCH_MODE_BEFORE, &total, NULL);
if (entry == NULL) { if (entry == NULL) {
GST_LOG_OBJECT (avi, "not found, assume index 0"); GST_LOG_OBJECT (avi, "not found, assume index 0");
@ -4447,6 +4456,7 @@ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
gboolean keyframe, before, after; gboolean keyframe, before, after;
guint i, index; guint i, index;
GstAviStream *stream; GstAviStream *stream;
gboolean next;
seek_time = segment->position; seek_time = segment->position;
keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT); keyframe = ! !(flags & GST_SEEK_FLAG_KEY_UNIT);
@ -4461,20 +4471,18 @@ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
* which is mostly correct... */ * which is mostly correct... */
stream = &avi->stream[avi->main_stream]; stream = &avi->stream[avi->main_stream];
next = after && !before;
if (segment->rate < 0)
next = !next;
/* get the entry index for the requested position */ /* get the entry index for the requested position */
index = gst_avi_demux_index_for_time (avi, stream, seek_time); index = gst_avi_demux_index_for_time (avi, stream, seek_time, next);
GST_DEBUG_OBJECT (avi, "Got entry %u", index); GST_DEBUG_OBJECT (avi, "Got entry %u", index);
if (index == -1) if (index == -1)
return FALSE; return FALSE;
/* check if we are already on a keyframe */ /* check if we are already on a keyframe */
if (!ENTRY_IS_KEYFRAME (&stream->index[index])) { if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
gboolean next;
next = after && !before;
if (segment->rate < 0)
next = !next;
if (next) { if (next) {
GST_DEBUG_OBJECT (avi, "not keyframe, searching forward"); GST_DEBUG_OBJECT (avi, "not keyframe, searching forward");
/* now go to the next keyframe, this is where we should start /* now go to the next keyframe, this is where we should start
@ -4518,7 +4526,7 @@ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment,
continue; continue;
/* get the entry index for the requested position */ /* get the entry index for the requested position */
index = gst_avi_demux_index_for_time (avi, ostream, seek_time); index = gst_avi_demux_index_for_time (avi, ostream, seek_time, FALSE);
if (index == -1) if (index == -1)
continue; continue;
@ -4686,7 +4694,7 @@ avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
GstSeekFlags flags; GstSeekFlags flags;
GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type; GstSeekType cur_type = GST_SEEK_TYPE_NONE, stop_type;
gint64 cur, stop; gint64 cur, stop;
gboolean keyframe, before, after; gboolean keyframe, before, after, next;
GstAviStream *stream; GstAviStream *stream;
guint index; guint index;
guint n, str_num; guint n, str_num;
@ -4746,8 +4754,12 @@ avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
str_num = avi->main_stream; str_num = avi->main_stream;
stream = &avi->stream[str_num]; stream = &avi->stream[str_num];
next = after && !before;
if (seeksegment.rate < 0)
next = !next;
/* get the entry index for the requested position */ /* get the entry index for the requested position */
index = gst_avi_demux_index_for_time (avi, stream, cur); index = gst_avi_demux_index_for_time (avi, stream, cur, next);
GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT, GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT,
str_num, index, GST_TIME_ARGS (cur)); str_num, index, GST_TIME_ARGS (cur));
if (index == -1) if (index == -1)
@ -4755,12 +4767,6 @@ avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
/* check if we are already on a keyframe */ /* check if we are already on a keyframe */
if (!ENTRY_IS_KEYFRAME (&stream->index[index])) { if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
gboolean next;
next = after && !before;
if (seeksegment.rate < 0)
next = !next;
if (next) { if (next) {
GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward"); GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
/* now go to the next keyframe, this is where we should start /* now go to the next keyframe, this is where we should start
@ -4799,7 +4805,7 @@ avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
continue; continue;
/* get the entry index for the requested position */ /* get the entry index for the requested position */
idx = gst_avi_demux_index_for_time (avi, str, cur); idx = gst_avi_demux_index_for_time (avi, str, cur, FALSE);
GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT, n, GST_DEBUG_OBJECT (avi, "str %u: Found entry %u for %" GST_TIME_FORMAT, n,
idx, GST_TIME_ARGS (cur)); idx, GST_TIME_ARGS (cur));
if (idx == -1) if (idx == -1)
@ -4807,7 +4813,7 @@ avi_demux_handle_seek_push (GstAviDemux * avi, GstPad * pad, GstEvent * event)
/* check if we are already on a keyframe */ /* check if we are already on a keyframe */
if (!ENTRY_IS_KEYFRAME (&str->index[idx])) { if (!ENTRY_IS_KEYFRAME (&str->index[idx])) {
if (after && !before) { if (next) {
GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward"); GST_DEBUG_OBJECT (avi, "Entry is not a keyframe - searching forward");
/* now go to the next keyframe, this is where we should start /* now go to the next keyframe, this is where we should start
* decoding from. */ * decoding from. */