asfdemux: Support reverse playback

Support reverse playback for ASF format.
Parse packets and queue the payloads, then push
the payload buffer to decoder in reverse order.
Video buffers are pushed from KeyFrame to next
Keyframe.

https://bugzilla.gnome.org/show_bug.cgi?id=757341
This commit is contained in:
Ravi Kiran K N 2015-10-30 11:06:11 +05:30 committed by Nicolas Dufresne
parent 59f5866cd2
commit 9ee60482b2
3 changed files with 337 additions and 71 deletions

View file

@ -29,6 +29,8 @@
#include <gst/gstinfo.h>
#include <string.h>
#define GST_ASF_PAYLOAD_KF_COMPLETE(stream, payload) (stream->is_video && payload->keyframe && payload->buf_filled >= payload->mo_size)
/* we are unlikely to deal with lengths > 2GB here any time soon, so just
* return a signed int and use that for error reporting */
static inline gint
@ -89,31 +91,72 @@ asf_packet_create_payload_buffer (AsfPacket * packet, const guint8 ** p_data,
}
static AsfPayload *
asf_payload_find_previous_fragment (AsfPayload * payload, AsfStream * stream)
asf_payload_search_payloads_queue (AsfPayload * payload, GArray * payload_list)
{
AsfPayload *ret;
AsfPayload *ret = NULL;
gint idx;
for (idx = payload_list->len - 1; idx >= 0; idx--) {
ret = &g_array_index (payload_list, AsfPayload, idx);
if (G_UNLIKELY (stream->payloads->len == 0)) {
GST_DEBUG ("No previous fragments to merge with for stream %u", stream->id);
return NULL;
}
ret =
&g_array_index (stream->payloads, AsfPayload, stream->payloads->len - 1);
if (G_UNLIKELY (ret->mo_size != payload->mo_size ||
ret->mo_number != payload->mo_number || ret->mo_offset != 0)) {
if (payload->mo_size != 0) {
GST_WARNING ("Previous fragment does not match continued fragment");
return NULL;
} else {
/* Warn about this case, but accept it anyway: files in the wild sometimes
* have continued packets where the subsequent fragments say that they're
* zero-sized. */
GST_WARNING ("Previous fragment found, but current fragment has "
"zero size, accepting anyway");
if (G_UNLIKELY (ret->mo_size == payload->mo_size &&
ret->mo_number == payload->mo_number)) {
return ret;
}
}
return NULL;
}
static AsfPayload *
asf_payload_find_previous_fragment (GstASFDemux * demux, AsfPayload * payload,
AsfStream * stream)
{
AsfPayload *ret = NULL;
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
/* Search in queued payloads list */
ret = asf_payload_search_payloads_queue (payload, stream->payloads);
if (ret) {
GST_DEBUG
("previous fragments found in payloads queue for reverse playback : object ID %d",
ret->mo_number);
return ret;
}
/* Search in payloads 'to be queued' list */
ret = asf_payload_search_payloads_queue (payload, stream->payloads_rev);
if (ret) {
GST_DEBUG
("previous fragments found in temp payload queue for reverse playback : object ID %d",
ret->mo_number);
return ret;
}
} else {
if (G_UNLIKELY (stream->payloads->len == 0)) {
GST_DEBUG ("No previous fragments to merge with for stream %u",
stream->id);
return NULL;
}
ret =
&g_array_index (stream->payloads, AsfPayload,
stream->payloads->len - 1);
if (G_UNLIKELY (ret->mo_size != payload->mo_size ||
ret->mo_number != payload->mo_number || ret->mo_offset != 0)) {
if (payload->mo_size != 0) {
GST_WARNING ("Previous fragment does not match continued fragment");
return NULL;
} else {
/* Warn about this case, but accept it anyway: files in the wild sometimes
* have continued packets where the subsequent fragments say that they're
* zero-sized. */
GST_WARNING ("Previous fragment found, but current fragment has "
"zero size, accepting anyway");
}
}
}
#if 0
if (this_fragment->mo_offset + this_payload_len > first_fragment->mo_size) {
GST_WARNING ("Merged fragments would be bigger than the media object");
@ -128,8 +171,8 @@ asf_payload_find_previous_fragment (AsfPayload * payload, AsfStream * stream)
* payload doesn't have a duration, maybe we can calculate a duration for it
* (if the previous timestamp is smaller etc. etc.) */
static void
gst_asf_payload_queue_for_stream (GstASFDemux * demux, AsfPayload * payload,
AsfStream * stream)
gst_asf_payload_queue_for_stream_forward (GstASFDemux * demux,
AsfPayload * payload, AsfStream * stream)
{
GST_DEBUG_OBJECT (demux, "Got payload for stream %d ts:%" GST_TIME_FORMAT,
stream->id, GST_TIME_ARGS (payload->ts));
@ -193,6 +236,44 @@ gst_asf_payload_queue_for_stream (GstASFDemux * demux, AsfPayload * payload,
g_array_append_vals (stream->payloads, payload, 1);
}
static void
gst_asf_payload_queue_for_stream_reverse (GstASFDemux * demux,
AsfPayload * payload, AsfStream * stream)
{
GST_DEBUG_OBJECT (demux, "Got payload for stream %d ts:%" GST_TIME_FORMAT,
stream->id, GST_TIME_ARGS (payload->ts));
if (demux->multiple_payloads) {
/* store the payload in temporary buffer, until we parse all payloads in this packet */
g_array_append_vals (stream->payloads_rev, payload, 1);
} else {
if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (payload->ts))) {
g_array_append_vals (stream->payloads, payload, 1);
if (GST_ASF_PAYLOAD_KF_COMPLETE (stream, payload)) {
stream->kf_pos = stream->payloads->len - 1;
}
} else {
gst_buffer_unref (payload->buf);
}
}
}
static void
gst_asf_payload_queue_for_stream (GstASFDemux * demux, AsfPayload * payload,
AsfStream * stream)
{
GST_DEBUG_OBJECT (demux, "Got payload for stream %d ts:%" GST_TIME_FORMAT,
stream->id, GST_TIME_ARGS (payload->ts));
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
gst_asf_payload_queue_for_stream_reverse (demux, payload, stream);
} else {
gst_asf_payload_queue_for_stream_forward (demux, payload, stream);
}
}
static void
asf_payload_parse_replicated_data_extensions (AsfStream * stream,
AsfPayload * payload)
@ -357,6 +438,9 @@ gst_asf_demux_parse_payload (GstASFDemux * demux, AsfPacket * packet,
return TRUE;
}
if (!stream->is_video)
stream->kf_pos = 0;
if (G_UNLIKELY (!is_compressed)) {
GST_LOG_OBJECT (demux, "replicated data length: %u", payload.rep_data_len);
@ -396,6 +480,38 @@ gst_asf_demux_parse_payload (GstASFDemux * demux, AsfPacket * packet,
payload_len);
payload.buf_filled = payload_len;
gst_asf_payload_queue_for_stream (demux, &payload, stream);
} else if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
/* Handle fragmented payloads for reverse playback */
AsfPayload *prev;
const guint8 *payload_data = *p_data;
prev = asf_payload_find_previous_fragment (demux, &payload, stream);
if (prev) {
gint idx;
AsfPayload *p;
gst_buffer_fill (prev->buf, payload.mo_offset,
payload_data, payload_len);
prev->buf_filled += payload_len;
if (payload.keyframe && payload.mo_offset == 0) {
stream->reverse_kf_ready = TRUE;
for (idx = stream->payloads->len - 1; idx >= 0; idx--) {
p = &g_array_index (stream->payloads, AsfPayload, idx);
if (p->mo_number == payload.mo_number) {
/* Mark position of KF for reverse play */
stream->kf_pos = idx;
}
}
}
} else {
payload.buf = gst_buffer_new_allocate (NULL, payload.mo_size, NULL); /* can we use (mo_size - offset) for size? */
gst_buffer_fill (payload.buf, payload.mo_offset,
payload_data, payload_len);
payload.buf_filled = payload.mo_size - (payload.mo_offset);
gst_asf_payload_queue_for_stream (demux, &payload, stream);
}
*p_data += payload_len;
*p_size -= payload_len;
} else {
const guint8 *payload_data = *p_data;
@ -408,7 +524,8 @@ gst_asf_demux_parse_payload (GstASFDemux * demux, AsfPacket * packet,
if (payload.mo_offset != 0) {
AsfPayload *prev;
if ((prev = asf_payload_find_previous_fragment (&payload, stream))) {
if ((prev =
asf_payload_find_previous_fragment (demux, &payload, stream))) {
if (prev->buf == NULL || (payload.mo_size > 0
&& payload.mo_size != prev->mo_size)
|| payload.mo_offset >= gst_buffer_get_size (prev->buf)
@ -587,9 +704,20 @@ gst_asf_demux_parse_packet (GstASFDemux * demux, GstBuffer * buf)
GST_LOG_OBJECT (demux, "padding : %u", packet.padding);
GST_LOG_OBJECT (demux, "send time : %" GST_TIME_FORMAT,
GST_TIME_ARGS (packet.send_time));
GST_LOG_OBJECT (demux, "duration : %" GST_TIME_FORMAT,
GST_TIME_ARGS (packet.duration));
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)
&& demux->seek_to_cur_pos == TRUE) {
/* For reverse playback, initially parse packets forward until we reach packet with 'seek' timestamp */
if (packet.send_time - demux->preroll > demux->segment.stop) {
demux->seek_to_cur_pos = FALSE;
}
ret = GST_ASF_DEMUX_PARSE_PACKET_ERROR_NONE;
goto done;
}
if (G_UNLIKELY (packet.padding == (guint) - 1 || size < packet.padding)) {
GST_WARNING_OBJECT (demux, "No padding, or padding bigger than buffer");
ret = GST_ASF_DEMUX_PARSE_PACKET_ERROR_RECOVERABLE;
@ -617,6 +745,7 @@ gst_asf_demux_parse_packet (GstASFDemux * demux, GstBuffer * buf)
if (has_multiple_payloads) {
guint i, num, lentype;
demux->multiple_payloads = TRUE;
if (G_UNLIKELY (size < 1)) {
GST_WARNING_OBJECT (demux, "No room more in buffer");
@ -643,8 +772,29 @@ gst_asf_demux_parse_packet (GstASFDemux * demux, GstBuffer * buf)
break;
}
}
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
/* In reverse playback, we parsed the packet (with multiple payloads) and stored the payloads in temporary queue.
Now, add them to the stream's payload queue */
for (i = 0; i < demux->num_streams; i++) {
AsfStream *s = &demux->stream[i];
while (s->payloads_rev->len > 0) {
AsfPayload *p;
p = &g_array_index (s->payloads_rev, AsfPayload,
s->payloads_rev->len - 1);
g_array_append_vals (s->payloads, p, 1);
if (GST_ASF_PAYLOAD_KF_COMPLETE (s, p)) {
/* Mark position of KF for reverse play */
s->kf_pos = s->payloads->len - 1;
}
g_array_remove_index (s->payloads_rev, (s->payloads_rev->len - 1));
}
}
}
} else {
GST_LOG_OBJECT (demux, "Parsing single payload");
demux->multiple_payloads = FALSE;
if (G_UNLIKELY (!gst_asf_demux_parse_payload (demux, &packet, -1, &data,
&size))) {
GST_WARNING_OBJECT (demux, "Failed to parse payload");

View file

@ -175,6 +175,21 @@ gst_asf_demux_free_stream (GstASFDemux * demux, AsfStream * stream)
g_array_free (stream->payloads, TRUE);
stream->payloads = NULL;
}
if (stream->payloads_rev) {
while (stream->payloads_rev->len > 0) {
AsfPayload *payload;
guint last;
last = stream->payloads_rev->len - 1;
payload = &g_array_index (stream->payloads_rev, AsfPayload, last);
gst_buffer_replace (&payload->buf, NULL);
g_array_remove_index (stream->payloads_rev, last);
}
g_array_free (stream->payloads_rev, TRUE);
stream->payloads_rev = NULL;
}
if (stream->ext_props.valid) {
g_free (stream->ext_props.payload_extensions);
stream->ext_props.payload_extensions = NULL;
@ -631,6 +646,7 @@ gst_asf_demux_handle_seek_event (GstASFDemux * demux, GstEvent * event)
gboolean eos;
guint32 seqnum;
GstEvent *fevent;
gint i;
gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
&stop_type, &stop);
@ -657,8 +673,11 @@ gst_asf_demux_handle_seek_event (GstASFDemux * demux, GstEvent * event)
}
if (G_UNLIKELY (rate <= 0.0)) {
GST_LOG_OBJECT (demux, "backward playback is not supported yet");
return FALSE;
GST_LOG_OBJECT (demux, "backward playback");
demux->seek_to_cur_pos = TRUE;
for (i = 0; i < demux->num_streams; i++) {
demux->stream[i].reverse_kf_ready = FALSE;
}
}
seqnum = gst_event_get_seqnum (event);
@ -793,10 +812,17 @@ gst_asf_demux_handle_seek_event (GstASFDemux * demux, GstEvent * event)
GST_OBJECT_LOCK (demux);
demux->segment = segment;
demux->packet = packet;
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
demux->packet = (gint64) gst_util_uint64_scale (demux->num_packets,
stop, demux->play_time);
} else {
demux->packet = packet;
}
demux->need_newsegment = TRUE;
demux->segment_seqnum = seqnum;
demux->speed_packets = speed_count;
demux->speed_packets =
GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment) ? 1 : speed_count;
gst_asf_demux_reset_stream_state_after_discont (demux);
GST_OBJECT_UNLOCK (demux);
@ -1529,47 +1555,77 @@ gst_asf_demux_find_stream_with_complete_payload (GstASFDemux * demux)
AsfPayload *payload = NULL;
gint last_idx;
/* find last payload with timestamp */
for (last_idx = stream->payloads->len - 1;
last_idx >= 0 && (payload == NULL
|| !GST_CLOCK_TIME_IS_VALID (payload->ts)); --last_idx) {
payload = &g_array_index (stream->payloads, AsfPayload, last_idx);
}
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
/* Reverse playback */
/* if this is first payload after seek we might need to update the segment */
if (GST_CLOCK_TIME_IS_VALID (payload->ts))
gst_asf_demux_check_segment_ts (demux, payload->ts);
if (G_UNLIKELY (GST_CLOCK_TIME_IS_VALID (payload->ts) &&
(payload->ts < demux->segment.start))) {
if (G_UNLIKELY (demux->keyunit_sync && payload->keyframe)) {
GST_DEBUG_OBJECT (stream->pad,
"Found keyframe, updating segment start to %" GST_TIME_FORMAT,
GST_TIME_ARGS (payload->ts));
demux->segment.start = payload->ts;
demux->segment.time = payload->ts;
if (stream->is_video) {
/* We have to push payloads from KF to the first frame we accumulated (reverse order) */
if (stream->reverse_kf_ready) {
payload =
&g_array_index (stream->payloads, AsfPayload, stream->kf_pos);
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (payload->ts))) {
/* TODO : remove payload from the list? */
continue;
}
} else {
continue;
}
} else {
GST_DEBUG_OBJECT (stream->pad, "Last queued payload has timestamp %"
GST_TIME_FORMAT " which is before our segment start %"
GST_TIME_FORMAT ", not pushing yet", GST_TIME_ARGS (payload->ts),
GST_TIME_ARGS (demux->segment.start));
continue;
/* find first complete payload with timestamp */
for (j = stream->payloads->len - 1;
j >= 0 && (payload == NULL
|| !GST_CLOCK_TIME_IS_VALID (payload->ts)); --j) {
payload = &g_array_index (stream->payloads, AsfPayload, j);
}
/* If there's a complete payload queued for this stream */
if (!gst_asf_payload_is_complete (payload))
continue;
}
} else {
/* find last payload with timestamp */
for (last_idx = stream->payloads->len - 1;
last_idx >= 0 && (payload == NULL
|| !GST_CLOCK_TIME_IS_VALID (payload->ts)); --last_idx) {
payload = &g_array_index (stream->payloads, AsfPayload, last_idx);
}
/* if this is first payload after seek we might need to update the segment */
if (GST_CLOCK_TIME_IS_VALID (payload->ts))
gst_asf_demux_check_segment_ts (demux, payload->ts);
if (G_UNLIKELY (GST_CLOCK_TIME_IS_VALID (payload->ts) &&
(payload->ts < demux->segment.start))) {
if (G_UNLIKELY ((!demux->keyunit_sync) && payload->keyframe)) {
GST_DEBUG_OBJECT (stream->pad,
"Found keyframe, updating segment start to %" GST_TIME_FORMAT,
GST_TIME_ARGS (payload->ts));
demux->segment.start = payload->ts;
demux->segment.time = payload->ts;
} else {
GST_DEBUG_OBJECT (stream->pad, "Last queued payload has timestamp %"
GST_TIME_FORMAT " which is before our segment start %"
GST_TIME_FORMAT ", not pushing yet",
GST_TIME_ARGS (payload->ts),
GST_TIME_ARGS (demux->segment.start));
continue;
}
}
payload = NULL;
/* find first complete payload with timestamp */
for (j = 0;
j < stream->payloads->len && (payload == NULL
|| !GST_CLOCK_TIME_IS_VALID (payload->ts)); ++j) {
payload = &g_array_index (stream->payloads, AsfPayload, j);
}
/* Now see if there's a complete payload queued for this stream */
if (!gst_asf_payload_is_complete (payload))
continue;
}
/* Now see if there's a complete payload queued for this stream */
payload = NULL;
/* find first complete payload with timestamp */
for (j = 0;
j < stream->payloads->len && (payload == NULL
|| !GST_CLOCK_TIME_IS_VALID (payload->ts)); ++j) {
payload = &g_array_index (stream->payloads, AsfPayload, j);
}
if (!gst_asf_payload_is_complete (payload))
continue;
/* ... and whether its timestamp is lower than the current best */
if (best_stream == NULL || best_payload->ts > payload->ts) {
best_stream = stream;
@ -1603,7 +1659,12 @@ gst_asf_demux_push_complete_payloads (GstASFDemux * demux, gboolean force)
&& !GST_CLOCK_TIME_IS_VALID (demux->segment_ts)))
return GST_FLOW_OK;
payload = &g_array_index (stream->payloads, AsfPayload, 0);
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment) && stream->is_video
&& stream->payloads->len) {
payload = &g_array_index (stream->payloads, AsfPayload, stream->kf_pos);
} else {
payload = &g_array_index (stream->payloads, AsfPayload, 0);
}
/* do we need to send a newsegment event */
if ((G_UNLIKELY (demux->need_newsegment))) {
@ -1620,8 +1681,9 @@ gst_asf_demux_push_complete_payloads (GstASFDemux * demux, gboolean force)
}
/* FIXME : only if ACCURATE ! */
if (G_LIKELY (demux->keyunit_sync
&& GST_CLOCK_TIME_IS_VALID (payload->ts))) {
if (G_LIKELY (!demux->keyunit_sync
&& (GST_CLOCK_TIME_IS_VALID (payload->ts)))
&& !GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
GST_DEBUG ("Adjusting newsegment start to %" GST_TIME_FORMAT,
GST_TIME_ARGS (payload->ts));
demux->segment.start = payload->ts;
@ -1711,7 +1773,8 @@ gst_asf_demux_push_complete_payloads (GstASFDemux * demux, gboolean force)
* position reporting if a live src is playing not so live content
* (e.g. rtspsrc taking some time to fall back to tcp) */
timestamp = payload->ts;
if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
if (GST_CLOCK_TIME_IS_VALID (timestamp)
&& !GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
timestamp += demux->in_gap;
/* Check if we're after the segment already, if so no need to push
@ -1749,6 +1812,15 @@ gst_asf_demux_push_complete_payloads (GstASFDemux * demux, gboolean force)
GST_LOG_OBJECT (stream->pad, "pushing buffer, %" GST_PTR_FORMAT,
payload->buf);
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment) && stream->is_video) {
if (stream->reverse_kf_ready == TRUE && stream->kf_pos == 0) {
GST_BUFFER_FLAG_SET (payload->buf, GST_BUFFER_FLAG_DISCONT);
}
} else if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)) {
GST_BUFFER_FLAG_SET (payload->buf, GST_BUFFER_FLAG_DISCONT);
}
if (stream->active) {
if (G_UNLIKELY (stream->first_buffer)) {
if (stream->streamheader != NULL) {
@ -1775,7 +1847,18 @@ gst_asf_demux_push_complete_payloads (GstASFDemux * demux, gboolean force)
ret = GST_FLOW_OK;
}
payload->buf = NULL;
g_array_remove_index (stream->payloads, 0);
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment) && stream->is_video
&& stream->reverse_kf_ready) {
g_array_remove_index (stream->payloads, stream->kf_pos);
stream->kf_pos--;
if (stream->reverse_kf_ready == TRUE && stream->kf_pos < 0) {
stream->kf_pos = 0;
stream->reverse_kf_ready = FALSE;
}
} else {
g_array_remove_index (stream->payloads, 0);
}
/* Break out as soon as we have an issue */
if (G_UNLIKELY (ret != GST_FLOW_OK))
@ -1895,13 +1978,31 @@ gst_asf_demux_loop (GstASFDemux * demux)
GST_INFO_OBJECT (demux, "Ignoring recoverable parse error");
gst_buffer_unref (buf);
++demux->packet;
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)
&& !demux->seek_to_cur_pos) {
--demux->packet;
if (demux->packet < 0) {
goto eos;
}
} else {
++demux->packet;
}
return;
}
flow = gst_asf_demux_push_complete_payloads (demux, FALSE);
++demux->packet;
if (GST_ASF_DEMUX_IS_REVERSE_PLAYBACK (demux->segment)
&& !demux->seek_to_cur_pos) {
--demux->packet;
if (demux->packet < 0) {
goto eos;
}
} else {
++demux->packet;
}
} else {
guint n;
@ -2440,6 +2541,9 @@ gst_asf_demux_setup_pad (GstASFDemux * demux, GstPad * src_pad,
stream->payloads = g_array_new (FALSE, FALSE, sizeof (AsfPayload));
/* TODO: create this array during reverse play? */
stream->payloads_rev = g_array_new (FALSE, FALSE, sizeof (AsfPayload));
GST_INFO ("Created pad %s for stream %u with caps %" GST_PTR_FORMAT,
GST_PAD_NAME (src_pad), demux->num_streams, caps);
@ -4571,6 +4675,7 @@ gst_asf_demux_change_state (GstElement * element, GstStateChange transition)
demux->index_offset = 0;
demux->base_offset = 0;
demux->flowcombiner = gst_flow_combiner_new ();
break;
}
default:

View file

@ -133,6 +133,11 @@ typedef struct
guint8 par_y;
gboolean interlaced;
/* For reverse playback */
gboolean reverse_kf_ready; /* Found complete KF payload*/
GArray *payloads_rev; /* Temp queue for storing multiple payloads of packet*/
gint kf_pos; /* KF position in payload queue. Payloads from this pos will be pushed */
/* extended stream properties (optional) */
AsfStreamExtProps ext_props;
@ -145,6 +150,8 @@ typedef enum {
GST_ASF_DEMUX_STATE_INDEX
} GstASFDemuxState;
#define GST_ASF_DEMUX_IS_REVERSE_PLAYBACK(seg) (seg.rate < 0.0? TRUE:FALSE)
#define GST_ASF_DEMUX_NUM_VIDEO_PADS 16
#define GST_ASF_DEMUX_NUM_AUDIO_PADS 32
#define GST_ASF_DEMUX_NUM_STREAMS 32
@ -226,6 +233,10 @@ struct _GstASFDemux {
GSList *other_streams; /* remember streams that are in header but have unknown type */
/* For reverse playback */
gboolean seek_to_cur_pos; /* Search packets till we reach 'seek' time */
gboolean multiple_payloads; /* Whether packet has multiple payloads */
/* parsing 3D */
GstASF3DMode asf_3D_mode;
};