avi: more cleanups

Remove some duplicate counters.
Be smarter when updateing the current the timestamp and offset in the stream
because we can reuse previously calculated values when simply go forward one
step.
Correctly set metadata on outgoing buffers.
This commit is contained in:
Wim Taymans 2009-09-22 14:44:42 +02:00 committed by Wim Taymans
parent 0d70fe30a8
commit c199b1d039
2 changed files with 155 additions and 100 deletions

View file

@ -448,37 +448,37 @@ gst_avi_demux_handle_src_query (GstPad * pad, GstQuery * query)
gint64 pos = 0; gint64 pos = 0;
GST_DEBUG ("pos query for stream %d: frames %d, bytes %" G_GUINT64_FORMAT, GST_DEBUG ("pos query for stream %d: frames %d, bytes %" G_GUINT64_FORMAT,
stream->num, stream->current_frame, stream->current_byte); stream->num, stream->current_entry, stream->current_total);
if (stream->strh->type == GST_RIFF_FCC_auds) { if (stream->strh->type == GST_RIFF_FCC_auds) {
if (stream->is_vbr) { if (stream->is_vbr) {
/* VBR */ /* VBR */
pos = gst_util_uint64_scale ((gint64) stream->current_frame * pos = gst_util_uint64_scale ((gint64) stream->current_entry *
stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate); stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate);
GST_DEBUG_OBJECT (avi, "VBR convert frame %u, time %" GST_DEBUG_OBJECT (avi, "VBR convert frame %u, time %"
GST_TIME_FORMAT, stream->current_frame, GST_TIME_ARGS (pos)); GST_TIME_FORMAT, stream->current_entry, GST_TIME_ARGS (pos));
} else if (stream->strf.auds->av_bps != 0) { } else if (stream->strf.auds->av_bps != 0) {
/* CBR */ /* CBR */
pos = gst_util_uint64_scale (stream->current_byte, GST_SECOND, pos = gst_util_uint64_scale (stream->current_total, GST_SECOND,
(guint64) stream->strf.auds->av_bps); (guint64) stream->strf.auds->av_bps);
GST_DEBUG_OBJECT (avi, GST_DEBUG_OBJECT (avi,
"CBR convert bytes %" G_GUINT64_FORMAT ", time %" GST_TIME_FORMAT, "CBR convert bytes %" G_GUINT64_FORMAT ", time %" GST_TIME_FORMAT,
stream->current_byte, GST_TIME_ARGS (pos)); stream->current_total, GST_TIME_ARGS (pos));
} else if (stream->idx_n != 0 && stream->total_bytes != 0) { } else if (stream->idx_n != 0 && stream->total_bytes != 0) {
/* calculate timestamps based on percentage of length */ /* calculate timestamps based on percentage of length */
guint64 xlen = avi->avih->us_frame * guint64 xlen = avi->avih->us_frame *
avi->avih->tot_frames * GST_USECOND; avi->avih->tot_frames * GST_USECOND;
if (stream->is_vbr) { if (stream->is_vbr) {
pos = gst_util_uint64_scale (xlen, stream->current_frame, pos = gst_util_uint64_scale (xlen, stream->current_entry,
stream->idx_n); stream->idx_n);
GST_DEBUG_OBJECT (avi, "VBR perc convert frame %u, time %" GST_DEBUG_OBJECT (avi, "VBR perc convert frame %u, time %"
GST_TIME_FORMAT, stream->current_frame, GST_TIME_ARGS (pos)); GST_TIME_FORMAT, stream->current_entry, GST_TIME_ARGS (pos));
} else { } else {
pos = gst_util_uint64_scale (xlen, stream->current_byte, pos = gst_util_uint64_scale (xlen, stream->current_total,
stream->total_bytes); stream->total_bytes);
GST_DEBUG_OBJECT (avi, "CBR perc convert bytes %" G_GUINT64_FORMAT GST_DEBUG_OBJECT (avi, "CBR perc convert bytes %" G_GUINT64_FORMAT
", time %" GST_TIME_FORMAT, stream->current_byte, ", time %" GST_TIME_FORMAT, stream->current_total,
GST_TIME_ARGS (pos)); GST_TIME_ARGS (pos));
} }
} else { } else {
@ -487,10 +487,10 @@ gst_avi_demux_handle_src_query (GstPad * pad, GstQuery * query)
} }
} else { } else {
if (stream->strh->rate != 0) { if (stream->strh->rate != 0) {
pos = gst_util_uint64_scale ((guint64) stream->current_frame * pos = gst_util_uint64_scale ((guint64) stream->current_entry *
stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate); stream->strh->scale, GST_SECOND, (guint64) stream->strh->rate);
} else { } else {
pos = stream->current_frame * avi->avih->us_frame * GST_USECOND; pos = stream->current_entry * avi->avih->us_frame * GST_USECOND;
} }
} }
if (res) { if (res) {
@ -1755,8 +1755,8 @@ gst_avi_demux_parse_stream (GstAviDemux * avi, GstBuffer * buf)
stream->total_bytes = 0; stream->total_bytes = 0;
stream->idx_n = 0; stream->idx_n = 0;
stream->total_blocks = 0; stream->total_blocks = 0;
stream->current_frame = 0; stream->current_entry = 0;
stream->current_byte = 0; stream->current_total = 0;
gst_pad_set_element_private (pad, stream); gst_pad_set_element_private (pad, stream);
avi->num_streams++; avi->num_streams++;
gst_pad_set_caps (pad, caps); gst_pad_set_caps (pad, caps);
@ -1975,55 +1975,61 @@ gst_avi_demux_index_for_time (GstAviDemux * avi,
index = entry - stream->index; index = entry - stream->index;
GST_LOG_OBJECT (avi, "found at %u", index); GST_LOG_OBJECT (avi, "found at %u", index);
} }
} else {
GST_LOG_OBJECT (avi, "converted time to index %u", index);
} }
return index; return index;
} }
static void static void
gst_avi_demux_get_entry_info (GstAviDemux * avi, GstAviStream * stream, gst_avi_demux_get_buffer_info (GstAviDemux * avi, GstAviStream * stream,
guint entry_n, GstClockTime * timestamp, GstClockTime * duration, guint entry_n, GstClockTime * timestamp, GstClockTime * ts_end,
guint64 * offset, guint64 * size, gboolean * keyframe) guint64 * offset, guint64 * offset_end)
{ {
GstAviIndexEntry *entry; GstAviIndexEntry *entry;
GstClockTime next_ts = 0, ts = 0;
entry = &stream->index[entry_n]; entry = &stream->index[entry_n];
if (stream->is_vbr) { if (stream->is_vbr) {
/* VBR stream next timestamp */ /* VBR stream next timestamp */
if (stream->strh->type == GST_RIFF_FCC_auds) { if (stream->strh->type == GST_RIFF_FCC_auds) {
if (timestamp || duration) if (timestamp)
ts = avi_stream_convert_frames_to_time_unchecked (stream, entry->total); *timestamp =
if (duration) avi_stream_convert_frames_to_time_unchecked (stream, entry->total);
next_ts = avi_stream_convert_frames_to_time_unchecked (stream, if (ts_end)
*ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
entry->total + entry->size); entry->total + entry->size);
} else { } else {
if (timestamp || duration) if (timestamp)
ts = avi_stream_convert_frames_to_time_unchecked (stream, entry_n); *timestamp =
if (duration) avi_stream_convert_frames_to_time_unchecked (stream, entry_n);
next_ts = avi_stream_convert_frames_to_time_unchecked (stream, if (ts_end)
*ts_end = avi_stream_convert_frames_to_time_unchecked (stream,
entry_n + 1); entry_n + 1);
} }
} else { } else {
/* constant rate stream */ /* constant rate stream */
if (timestamp || duration) if (timestamp)
ts = avi_stream_convert_bytes_to_time_unchecked (stream, entry->total); *timestamp =
if (duration) avi_stream_convert_bytes_to_time_unchecked (stream, entry->total);
next_ts = avi_stream_convert_bytes_to_time_unchecked (stream, if (ts_end)
*ts_end = avi_stream_convert_bytes_to_time_unchecked (stream,
entry->total + entry->size); entry->total + entry->size);
} }
if (timestamp) if (stream->strh->type == GST_RIFF_FCC_vids) {
*timestamp = ts; /* video offsets are the frame number */
if (duration) if (offset)
*duration = next_ts - ts; *offset = entry_n;
if (offset_end)
if (offset) *offset_end = entry_n + 1;
*offset = entry->offset; } else {
if (size) /* no offsets for audio */
*size = entry->size; if (offset)
if (keyframe) *offset = -1;
*keyframe = ENTRY_IS_KEYFRAME (entry); if (offset_end)
*offset_end = -1;
}
} }
@ -3853,7 +3859,7 @@ pull_range_failed:
} }
} }
/* move a stream to an offset */ /* move a stream to @index */
static void static void
gst_avi_demux_move_stream (GstAviDemux * avi, GstAviStream * stream, gst_avi_demux_move_stream (GstAviDemux * avi, GstAviStream * stream,
GstSegment * segment, guint index) GstSegment * segment, guint index)
@ -3868,6 +3874,8 @@ gst_avi_demux_move_stream (GstAviDemux * avi, GstAviStream * stream,
* are enough decoded frames to fill the segment. */ * are enough decoded frames to fill the segment. */
next_key = gst_avi_demux_index_next (avi, stream, index, TRUE); next_key = gst_avi_demux_index_next (avi, stream, index, TRUE);
/* FIXME, we go back to 0, we should look at segment.start. We will however
* stop earlier when the see the timestamp < segment.start */
stream->start_entry = 0; stream->start_entry = 0;
stream->step_entry = index; stream->step_entry = index;
stream->current_entry = index; stream->current_entry = index;
@ -3881,9 +3889,23 @@ gst_avi_demux_move_stream (GstAviDemux * avi, GstAviStream * stream,
stream->stop_entry = gst_avi_demux_index_last (avi, stream); stream->stop_entry = gst_avi_demux_index_last (avi, stream);
} }
if (stream->current_entry != index) { if (stream->current_entry != index) {
GST_DEBUG_OBJECT (avi, "Move DISCONT from %u to %u",
stream->current_entry, index);
stream->current_entry = index; stream->current_entry = index;
stream->discont = TRUE; stream->discont = TRUE;
} }
/* update the buffer info */
gst_avi_demux_get_buffer_info (avi, stream, index,
&stream->current_timestamp, &stream->current_ts_end,
&stream->current_offset, &stream->current_offset_end);
GST_DEBUG_OBJECT (avi, "Moved to %u, ts %" GST_TIME_FORMAT
", ts_end %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
", off_end %" G_GUINT64_FORMAT, index,
GST_TIME_ARGS (stream->current_timestamp),
GST_TIME_ARGS (stream->current_ts_end), stream->current_offset,
stream->current_offset_end);
} }
/* /*
@ -3897,26 +3919,23 @@ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment)
guint i, index; guint i, index;
GstAviStream *stream; GstAviStream *stream;
GstClockTime timestamp, duration; GstClockTime timestamp, duration;
gboolean kentry;
seek_time = segment->last_stop; seek_time = segment->last_stop;
keyframe = !!(segment->flags & GST_SEEK_FLAG_KEY_UNIT); keyframe = !!(segment->flags & GST_SEEK_FLAG_KEY_UNIT);
GST_DEBUG_OBJECT (avi, "seek to: %" GST_TIME_FORMAT
" keyframe seeking:%d", GST_TIME_ARGS (seek_time), keyframe);
/* FIXME, this code assumes the main stream with keyframes is stream 0, /* FIXME, this code assumes the main stream with keyframes is stream 0,
* which is mostly correct... */ * which is mostly correct... */
stream = &avi->stream[0]; stream = &avi->stream[0];
/* get the entry index for the requested position */ /* get the entry index for the requested position */
index = gst_avi_demux_index_for_time (avi, stream, seek_time); index = gst_avi_demux_index_for_time (avi, stream, seek_time);
/* take a look at the entry info */
gst_avi_demux_get_entry_info (avi, stream, index,
NULL, NULL, NULL, NULL, &kentry);
GST_DEBUG_OBJECT (avi, "Got entry %u", index); GST_DEBUG_OBJECT (avi, "Got entry %u", index);
/* check if we are already on a keyframe */ /* check if we are already on a keyframe */
if (!kentry) { if (!ENTRY_IS_KEYFRAME (&stream->index[index])) {
GST_DEBUG_OBJECT (avi, "not keyframe, searching back"); GST_DEBUG_OBJECT (avi, "not keyframe, searching back");
/* now go to the previous keyframe, this is where we should start /* now go to the previous keyframe, this is where we should start
* decoding from. */ * decoding from. */
@ -3925,8 +3944,8 @@ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment)
} }
/* take a look at the final entry */ /* take a look at the final entry */
gst_avi_demux_get_entry_info (avi, stream, index, gst_avi_demux_get_buffer_info (avi, stream, index,
&timestamp, &duration, NULL, NULL, NULL); &timestamp, &duration, NULL, NULL);
GST_DEBUG_OBJECT (avi, GST_DEBUG_OBJECT (avi,
"Got keyframe entry %d [ts:%" GST_TIME_FORMAT "Got keyframe entry %d [ts:%" GST_TIME_FORMAT
@ -3939,7 +3958,11 @@ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment)
seek_time = timestamp; seek_time = timestamp;
} }
/* move the main stream */ /* the seek time is also the last_stop and stream time */
segment->last_stop = seek_time;
segment->time = seek_time;
/* move the main stream to this position */
gst_avi_demux_move_stream (avi, stream, segment, index); gst_avi_demux_move_stream (avi, stream, segment, index);
/* now set DISCONT and align the other streams */ /* now set DISCONT and align the other streams */
@ -3953,20 +3976,13 @@ gst_avi_demux_do_seek (GstAviDemux * avi, GstSegment * segment)
/* get the entry index for the requested position */ /* get the entry index for the requested position */
index = gst_avi_demux_index_for_time (avi, ostream, seek_time); index = gst_avi_demux_index_for_time (avi, ostream, seek_time);
gst_avi_demux_get_entry_info (avi, ostream, index, if (!ENTRY_IS_KEYFRAME (&ostream->index[index]))
NULL, NULL, NULL, NULL, &kentry);
if (!kentry) {
index = gst_avi_demux_index_prev (avi, ostream, index, TRUE); index = gst_avi_demux_index_prev (avi, ostream, index, TRUE);
}
gst_avi_demux_move_stream (avi, ostream, segment, index); gst_avi_demux_move_stream (avi, ostream, segment, index);
} }
GST_DEBUG_OBJECT (avi, "done seek to: %" GST_TIME_FORMAT,
GST_DEBUG_OBJECT (avi, "seek: %" GST_TIME_FORMAT GST_TIME_ARGS (seek_time));
" keyframe seeking:%d", GST_TIME_ARGS (seek_time), keyframe);
/* the seek time is also the last_stop and stream time */
segment->last_stop = seek_time;
segment->time = seek_time;
return TRUE; return TRUE;
} }
@ -4442,14 +4458,14 @@ static GstFlowReturn
gst_avi_demux_advance (GstAviDemux * avi, GstAviStream * stream, gst_avi_demux_advance (GstAviDemux * avi, GstAviStream * stream,
GstFlowReturn ret) GstFlowReturn ret)
{ {
guint i; guint old_entry, new_entry;
/* move to the next entry */ old_entry = stream->current_entry;
stream->current_entry++; /* move forwards */
stream->current_frame++; new_entry = old_entry + 1;
/* see if we reached the end */ /* see if we reached the end */
if (stream->current_entry >= stream->stop_entry) { if (new_entry >= stream->stop_entry) {
if (avi->segment.rate < 0.0) { if (avi->segment.rate < 0.0) {
if (stream->step_entry == stream->start_entry) { if (stream->step_entry == stream->start_entry) {
/* we stepped all the way to the start, eos */ /* we stepped all the way to the start, eos */
@ -4460,31 +4476,52 @@ gst_avi_demux_advance (GstAviDemux * avi, GstAviStream * stream,
stream->stop_entry = stream->step_entry; stream->stop_entry = stream->step_entry;
stream->step_entry = gst_avi_demux_index_prev (avi, stream, stream->step_entry = gst_avi_demux_index_prev (avi, stream,
stream->stop_entry, TRUE); stream->stop_entry, TRUE);
stream->current_entry = stream->step_entry;
GST_DEBUG_OBJECT (avi, GST_DEBUG_OBJECT (avi,
"reverse playback jump: start %u, step %u, stop %u", "reverse playback jump: start %u, step %u, stop %u",
stream->start_entry, stream->step_entry, stream->stop_entry); stream->start_entry, stream->step_entry, stream->stop_entry);
/* mark DISCONT */ /* and start from the previous keyframe now */
for (i = 0; i < avi->num_streams; i++) { new_entry = stream->step_entry;
avi->stream[i].last_flow = GST_FLOW_OK;
avi->stream[i].discont = TRUE;
}
} else { } else {
/* EOS */ /* EOS */
GST_DEBUG_OBJECT (avi, "forward reached stop %u", stream->stop_entry); GST_DEBUG_OBJECT (avi, "forward reached stop %u", stream->stop_entry);
goto eos; goto eos;
} }
} }
if (new_entry != old_entry) {
stream->current_entry = new_entry;
stream->current_total = stream->index[new_entry].total;
if (new_entry == old_entry + 1) {
GST_DEBUG_OBJECT (avi, "moved forwards from %u to %u",
old_entry, new_entry);
/* we simply moved one step forwards, reuse current info */
stream->current_timestamp = stream->current_ts_end;
stream->current_offset = stream->current_offset_end;
gst_avi_demux_get_buffer_info (avi, stream, new_entry,
NULL, &stream->current_ts_end, NULL, &stream->current_offset_end);
} else {
GST_DEBUG_OBJECT (avi, "DISCONT move from %u to %u", old_entry,
new_entry);
/* we moved DISCONT, full update */
gst_avi_demux_get_buffer_info (avi, stream, new_entry,
&stream->current_timestamp, &stream->current_ts_end,
&stream->current_offset, &stream->current_offset_end);
/* and MARK discont for this stream */
stream->last_flow = GST_FLOW_OK;
stream->discont = TRUE;
}
}
return ret; return ret;
/* ERROR */ /* ERROR */
eos: eos:
{ {
GST_DEBUG_OBJECT (avi, "we are EOS"); GST_DEBUG_OBJECT (avi, "we are EOS");
/* setting current_time to -1 marks EOS */ /* setting current_timestamp to -1 marks EOS */
stream->current_time = -1; stream->current_timestamp = -1;
return GST_FLOW_UNEXPECTED; return GST_FLOW_UNEXPECTED;
} }
} }
@ -4493,25 +4530,27 @@ static GstFlowReturn
gst_avi_demux_loop_data (GstAviDemux * avi) gst_avi_demux_loop_data (GstAviDemux * avi)
{ {
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
guint64 min_time;
guint stream_num, i; guint stream_num, i;
guint64 min_time;
GstAviStream *stream; GstAviStream *stream;
gboolean processed = FALSE; gboolean processed = FALSE;
GstBuffer *buf; GstBuffer *buf;
guint64 offset, size; guint64 offset, size;
GstClockTime timestamp, duration; GstClockTime timestamp, duration;
guint64 out_offset, out_offset_end;
gboolean keyframe; gboolean keyframe;
GstAviIndexEntry *entry;
do { do {
min_time = G_MAXUINT64;
/* first find the stream with the lowest current position, this is the one /* first find the stream with the lowest current position, this is the one
* we should push from next */ * we should push from next */
min_time = G_MAXUINT64;
stream_num = -1; stream_num = -1;
for (i = 0; i < avi->num_streams; i++) { for (i = 0; i < avi->num_streams; i++) {
guint64 position; guint64 position;
stream = &avi->stream[i]; stream = &avi->stream[i];
position = stream->current_time; position = stream->current_timestamp;
/* position of -1 is EOS */ /* position of -1 is EOS */
if (position != -1 && position < min_time) { if (position != -1 && position < min_time) {
@ -4537,20 +4576,27 @@ gst_avi_demux_loop_data (GstAviDemux * avi)
} }
/* get the timing info for the entry */ /* get the timing info for the entry */
gst_avi_demux_get_entry_info (avi, stream, stream->current_entry, timestamp = stream->current_timestamp;
&timestamp, &duration, &offset, &size, &keyframe); duration = stream->current_ts_end - timestamp;
out_offset = stream->current_offset;
out_offset_end = stream->current_offset_end;
/* get the entry data info */
entry = &stream->index[stream->current_entry];
offset = entry->offset;
size = entry->size;
keyframe = ENTRY_IS_KEYFRAME (entry);
/* skip empty entries */ /* skip empty entries */
if (size == 0) { if (size == 0) {
GST_DEBUG_OBJECT (avi, "Skipping entry %d (%d, %p)", GST_DEBUG_OBJECT (avi, "Skipping entry %u (%u, %p)",
stream->current_entry, size, stream->pad); stream->current_entry, size, stream->pad);
goto next; goto next;
} }
if (avi->segment.rate > 0.0) { if (avi->segment.rate > 0.0) {
/* only check this for fowards playback for now */ /* only check this for fowards playback for now */
if (keyframe && GST_CLOCK_TIME_IS_VALID (timestamp) if (keyframe && GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
&& GST_CLOCK_TIME_IS_VALID (avi->segment.stop)
&& (timestamp > avi->segment.stop)) { && (timestamp > avi->segment.stop)) {
goto eos_stop; goto eos_stop;
} }
@ -4580,16 +4626,17 @@ gst_avi_demux_loop_data (GstAviDemux * avi)
GST_BUFFER_TIMESTAMP (buf) = timestamp; GST_BUFFER_TIMESTAMP (buf) = timestamp;
GST_BUFFER_DURATION (buf) = duration; GST_BUFFER_DURATION (buf) = duration;
GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = out_offset;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET_END (buf) = out_offset_end;
gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad)); gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
GST_DEBUG_OBJECT (avi, "Pushing buffer of size %d, offset %" GST_DEBUG_OBJECT (avi, "Pushing buffer of size %u, ts %"
G_GUINT64_FORMAT " and time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT ", off %" G_GUINT64_FORMAT
GST_TIME_FORMAT " on pad %s", ", off_end %" G_GUINT64_FORMAT " on pad %s",
GST_BUFFER_SIZE (buf), GST_BUFFER_OFFSET (buf), GST_BUFFER_SIZE (buf), GST_TIME_ARGS (timestamp),
GST_TIME_ARGS (timestamp), GST_PAD_NAME (stream->pad)); GST_TIME_ARGS (duration), GST_BUFFER_OFFSET (buf),
GST_BUFFER_OFFSET_END (buf), GST_PAD_NAME (stream->pad));
/* update current position in the segment */ /* update current position in the segment */
gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, timestamp); gst_segment_set_last_stop (&avi->segment, GST_FORMAT_TIME, timestamp);
@ -4605,7 +4652,7 @@ gst_avi_demux_loop_data (GstAviDemux * avi)
/* mark as processed, we increment the frame and byte counters then /* mark as processed, we increment the frame and byte counters then
* leave the while loop and return the GstFlowReturn */ * leave the while loop and return the GstFlowReturn */
processed = TRUE; processed = TRUE;
GST_DEBUG_OBJECT (avi, "Processed buffer %d: %s", stream->current_entry, GST_DEBUG_OBJECT (avi, "Processed buffer %u: %s", stream->current_entry,
gst_flow_get_name (ret)); gst_flow_get_name (ret));
if (avi->segment.rate < 0) { if (avi->segment.rate < 0) {
@ -4618,6 +4665,7 @@ gst_avi_demux_loop_data (GstAviDemux * avi)
} }
} }
next: next:
/* move to next item */
ret = gst_avi_demux_advance (avi, stream, ret); ret = gst_avi_demux_advance (avi, stream, ret);
/* combine flows */ /* combine flows */
@ -4791,8 +4839,8 @@ gst_avi_demux_stream_data (GstAviDemux * avi)
if (G_UNLIKELY (format != GST_FORMAT_TIME)) if (G_UNLIKELY (format != GST_FORMAT_TIME))
goto wrong_format; goto wrong_format;
stream->current_frame++; stream->current_entry++;
stream->current_byte += size; stream->current_total += size;
/* invert the picture if needed */ /* invert the picture if needed */
buf = gst_avi_demux_invert (stream, buf); buf = gst_avi_demux_invert (stream, buf);
@ -4803,10 +4851,13 @@ gst_avi_demux_stream_data (GstAviDemux * avi)
GST_BUFFER_TIMESTAMP (buf) = next_ts; GST_BUFFER_TIMESTAMP (buf) = next_ts;
GST_BUFFER_DURATION (buf) = dur_ts - next_ts; GST_BUFFER_DURATION (buf) = dur_ts - next_ts;
if (stream->strh->type == GST_RIFF_FCC_vids) if (stream->strh->type == GST_RIFF_FCC_vids) {
GST_BUFFER_OFFSET (buf) = stream->current_frame - 1; GST_BUFFER_OFFSET (buf) = stream->current_entry - 1;
else GST_BUFFER_OFFSET_END (buf) = stream->current_entry;
} else {
GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE; GST_BUFFER_OFFSET (buf) = GST_BUFFER_OFFSET_NONE;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_NONE;
}
gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad)); gst_buffer_set_caps (buf, GST_PAD_CAPS (stream->pad));
GST_DEBUG_OBJECT (avi, GST_DEBUG_OBJECT (avi,

View file

@ -83,11 +83,15 @@ typedef struct {
guint start_entry; guint start_entry;
guint step_entry; guint step_entry;
guint stop_entry; guint stop_entry;
/* current position (byte, frame, time) and other status vars */
/* current index entry */
guint current_entry; guint current_entry;
guint current_frame; /* position (byte, frame, time) for current_entry */
guint64 current_byte; guint current_total;
guint64 current_time; GstClockTime current_timestamp;
GstClockTime current_ts_end;
guint64 current_offset;
guint64 current_offset_end;
GstFlowReturn last_flow; GstFlowReturn last_flow;
gboolean discont; gboolean discont;