partially port the mpegstream plugin to GStreamer 0.9.

Original commit message from CVS:
2005-11-14  Martin Soto  <martinsoto@users.sourceforge.net>

* configure.ac:
* gst/mpegstream/Makefile.am:
* gst/mpegstream/gstdvddemux.c:
* gst/mpegstream/gstdvddemux.h:
* gst/mpegstream/gstmpegdemux.c:
* gst/mpegstream/gstmpegdemux.h:
* gst/mpegstream/gstmpegpacketize.c:
* gst/mpegstream/gstmpegpacketize.h:
* gst/mpegstream/gstmpegparse.c:
* gst/mpegstream/gstmpegparse.h:
* gst/mpegstream/gstmpegstream.c:
* gst/mpegstream/gstrfc2250enc.c:
* gst/mpegstream/gstrfc2250enc.h:
Applied patch from Josef Zlomek <josef.zlomek@xeris.cz> to
partially port the mpegstream plugin to GStreamer 0.9.
This commit is contained in:
Josef Zlomek 2005-11-14 21:20:21 +00:00 committed by Martin Soto
parent b54b0bbe32
commit d45697d6ed
14 changed files with 1329 additions and 589 deletions

View file

@ -1,3 +1,21 @@
2005-11-14 Martin Soto <martinsoto@users.sourceforge.net>
* configure.ac:
* gst/mpegstream/Makefile.am:
* gst/mpegstream/gstdvddemux.c:
* gst/mpegstream/gstdvddemux.h:
* gst/mpegstream/gstmpegdemux.c:
* gst/mpegstream/gstmpegdemux.h:
* gst/mpegstream/gstmpegpacketize.c:
* gst/mpegstream/gstmpegpacketize.h:
* gst/mpegstream/gstmpegparse.c:
* gst/mpegstream/gstmpegparse.h:
* gst/mpegstream/gstmpegstream.c:
* gst/mpegstream/gstrfc2250enc.c:
* gst/mpegstream/gstrfc2250enc.h:
Applied patch from Josef Zlomek <josef.zlomek@xeris.cz> to
partially port the mpegstream plugin to GStreamer 0.9.
2005-11-14 Andy Wingo <wingo@pobox.com>
* configure.ac (GST_PLUGIN_LDFLAGS): -no-undefined for better

View file

@ -213,8 +213,9 @@ AC_SUBST(GST_PLUGIN_LDFLAGS)
dnl these are all the gst plug-ins, compilable without additional libs
GST_PLUGINS_ALL="\
dvdlpcmdec \
iec958 \
iec958 \
mpegaudioparse \
mpegstream \
realmedia \
"
@ -390,6 +391,7 @@ gst/Makefile
gst/dvdlpcmdec/Makefile
gst/iec958/Makefile
gst/mpegaudioparse/Makefile
gst/mpegstream/Makefile
gst/realmedia/Makefile
ext/Makefile
ext/a52dec/Makefile

View file

@ -2,21 +2,20 @@
plugin_LTLIBRARIES = libgstmpegstream.la
libgstmpegstream_la_SOURCES = gstmpegstream.c \
gstmpegparse.c \
gstmpegdemux.c \
gstmpegparse.c \
gstmpegdemux.c \
gstdvddemux.c \
gstmpegpacketize.c \
gstrfc2250enc.c \
gstmpegclock.c
libgstmpegstream_la_CFLAGS = $(GST_CFLAGS)
libgstmpegstream_la_LIBADD =
# gstrfc2250enc.c
libgstmpegstream_la_CFLAGS = $(GST_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
libgstmpegstream_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-@GST_MAJORMINOR@
libgstmpegstream_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
noinst_HEADERS = gstmpegparse.h \
gstmpegdemux.h \
gstdvddemux.h \
gstmpegpacketize.h \
gstrfc2250enc.h \
gstmpegclock.h
EXTRA_DIST = README notes

View file

@ -27,10 +27,11 @@
#include "gstdvddemux.h"
/*
* Start the timestamp sequence at 2 seconds to allow for strange audio
* Move PTM discont back by 0.3 seconds to allow for strange audio
* timestamps when audio crosses a VOBU
*/
#define INITIAL_END_PTM (2 * GST_SECOND)
#define PTM_DISCONT_ADJUST (0.3 * GST_SECOND)
#define INITIAL_END_PTM (-1)
GST_DEBUG_CATEGORY_STATIC (gstdvddemux_debug);
#define GST_CAT_DEFAULT (gstdvddemux_debug)
@ -126,20 +127,23 @@ GST_STATIC_PAD_TEMPLATE ("current_subpicture",
GST_PAD_ALWAYS,
SUBPICTURE_CAPS);
static void gst_dvd_demux_class_init (GstDVDDemuxClass * klass);
static void gst_dvd_demux_base_init (GstDVDDemuxClass * klass);
static void gst_dvd_demux_init (GstDVDDemux * dvd_demux);
static void gst_dvd_demux_send_data (GstMPEGParse * mpeg_parse,
GstData * data, GstClockTime time);
static GstFlowReturn gst_dvd_demux_send_buffer (GstMPEGParse * mpeg_parse,
GstBuffer * buffer, GstClockTime time);
static GstFlowReturn gst_dvd_demux_process_event (GstMPEGParse * mpeg_parse,
GstEvent * event, GstClockTime time);
static void gst_dvd_demux_send_discont
(GstMPEGParse * mpeg_parse, GstClockTime time);
static void gst_dvd_demux_handle_discont
static GstFlowReturn gst_dvd_demux_handle_discont
(GstMPEGParse * mpeg_parse, GstEvent * event);
static gboolean gst_dvd_demux_handle_dvd_event
#if 0
static GstFlowReturn gst_dvd_demux_handle_dvd_event
(GstDVDDemux * dvd_demux, GstEvent * event);
#endif
static GstFlowReturn gst_dvd_demux_send_event
(GstMPEGParse * mpeg_parse, GstEvent * event, GstClockTime time);
static GstMPEGStream *gst_dvd_demux_get_video_stream
(GstMPEGDemux * mpeg_demux,
@ -161,13 +165,18 @@ static void gst_dvd_demux_send_subbuffer
GstMPEGStream * outstream,
GstBuffer * buffer, GstClockTime timestamp, guint offset, guint size);
#if 0
static void gst_dvd_demux_set_cur_audio
(GstDVDDemux * dvd_demux, gint stream_nr);
static void gst_dvd_demux_set_cur_subpicture
(GstDVDDemux * dvd_demux, gint stream_nr);
#endif
static void gst_dvd_demux_reset (GstDVDDemux * dvd_demux);
static void gst_dvd_demux_synchronise_pads (GstMPEGDemux * mpeg_demux,
GstClockTime threshold, GstClockTime new_ts);
static void gst_dvd_demux_sync_stream_to_time (GstMPEGDemux * mpeg_demux,
GstMPEGStream * stream, GstClockTime last_ts);
static GstStateChangeReturn gst_dvd_demux_change_state (GstElement * element,
GstStateChange transition);
@ -213,7 +222,8 @@ gst_dvd_demux_base_init (GstDVDDemuxClass * klass)
GstMPEGDemuxClass *demux_class = GST_MPEG_DEMUX_CLASS (klass);
GstMPEGParseClass *mpeg_parse_class = (GstMPEGParseClass *) klass;
mpeg_parse_class->send_data = gst_dvd_demux_send_data;
mpeg_parse_class->send_buffer = gst_dvd_demux_send_buffer;
mpeg_parse_class->process_event = gst_dvd_demux_process_event;
/* sink pad */
gst_element_class_add_pad_template (element_class,
@ -233,6 +243,7 @@ gst_dvd_demux_base_init (GstDVDDemuxClass * klass)
gst_element_class_add_pad_template (element_class, klass->cur_video_template);
gst_element_class_add_pad_template (element_class, klass->cur_audio_template);
gst_element_class_add_pad_template (element_class,
klass->subpicture_template);
gst_element_class_add_pad_template (element_class,
@ -257,13 +268,15 @@ gst_dvd_demux_class_init (GstDVDDemuxClass * klass)
gstelement_class->change_state = gst_dvd_demux_change_state;
mpeg_parse_class->send_discont = gst_dvd_demux_send_discont;
mpeg_parse_class->handle_discont = gst_dvd_demux_handle_discont;
mpeg_parse_class->send_event = gst_dvd_demux_send_event;
mpeg_demux_class->get_audio_stream = gst_dvd_demux_get_audio_stream;
mpeg_demux_class->get_video_stream = gst_dvd_demux_get_video_stream;
mpeg_demux_class->send_subbuffer = gst_dvd_demux_send_subbuffer;
mpeg_demux_class->process_private = gst_dvd_demux_process_private;
mpeg_demux_class->synchronise_pads = gst_dvd_demux_synchronise_pads;
mpeg_demux_class->sync_stream_to_time = gst_dvd_demux_sync_stream_to_time;
klass->get_subpicture_stream = gst_dvd_demux_get_subpicture_stream;
}
@ -275,8 +288,6 @@ gst_dvd_demux_init (GstDVDDemux * dvd_demux)
GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (dvd_demux);
gint i;
GST_OBJECT_FLAG_SET (dvd_demux, GST_ELEMENT_EVENT_AWARE);
/* Create the pads for the current streams. */
dvd_demux->cur_video =
DEMUX_CLASS (dvd_demux)->new_output_pad (mpeg_demux, "current_video",
@ -297,54 +308,62 @@ gst_dvd_demux_init (GstDVDDemux * dvd_demux)
dvd_demux->cur_subpicture_nr = 0;
dvd_demux->last_end_ptm = INITIAL_END_PTM;
dvd_demux->just_flushed = FALSE;
dvd_demux->discont_time = GST_CLOCK_TIME_NONE;
for (i = 0; i < GST_DVD_DEMUX_NUM_SUBPICTURE_STREAMS; i++) {
dvd_demux->subpicture_stream[i] = NULL;
}
dvd_demux->langcodes = NULL;
dvd_demux->ignore_next_newmedia_discont = FALSE;
}
static void
gst_dvd_demux_send_data (GstMPEGParse * mpeg_parse, GstData * data,
static GstFlowReturn
gst_dvd_demux_send_buffer (GstMPEGParse * mpeg_parse, GstBuffer * buffer,
GstClockTime time)
{
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
static GstFlowReturn
gst_dvd_demux_process_event (GstMPEGParse * mpeg_parse, GstEvent * event,
GstClockTime time)
{
GstDVDDemux *dvd_demux = GST_DVD_DEMUX (mpeg_parse);
if (GST_IS_BUFFER (data)) {
gst_buffer_unref (GST_BUFFER (data));
} else {
GstEvent *event = GST_EVENT (data);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_ANY:
gst_dvd_demux_handle_dvd_event (dvd_demux, event);
break;
case GST_EVENT_FLUSH:
GST_DEBUG_OBJECT (dvd_demux, "flush received");
dvd_demux->just_flushed = TRUE;
/* Propagate the event normally. */
gst_pad_event_default (mpeg_parse->sinkpad, event);
break;
default:
gst_pad_event_default (mpeg_parse->sinkpad, event);
break;
}
switch (GST_EVENT_TYPE (event)) {
#if 0
/* FIXME: I do not know how to port GST_EVENT_ANY to gstreamer 0.9 */
case GST_EVENT_ANY:
if (!gst_dvd_demux_handle_dvd_event (dvd_demux, event))
return GST_FLOW_ERROR;
break;
#endif
case GST_EVENT_FILLER:
case GST_EVENT_NEWSEGMENT:
case GST_EVENT_FLUSH_START:
case GST_EVENT_FLUSH_STOP:
return PARSE_CLASS (dvd_demux)->send_event (mpeg_parse, event,
GST_CLOCK_TIME_NONE);
default:
/* Propagate the event normally. */
if (!gst_pad_event_default (mpeg_parse->sinkpad, event))
return GST_FLOW_ERROR;
break;
}
return GST_FLOW_OK;
}
#if 0
static gboolean
gst_dvd_demux_handle_dvd_event (GstDVDDemux * dvd_demux, GstEvent * event)
{
GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (dvd_demux);
GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (dvd_demux);
GstStructure *structure = event->event_data.structure.structure;
const GstStructure *structure = gst_event_get_structure (event);
const char *event_type = gst_structure_get_string (structure, "event");
g_return_val_if_fail (event != NULL, FALSE);
@ -358,6 +377,17 @@ gst_dvd_demux_handle_dvd_event (GstDVDDemux * dvd_demux, GstEvent * event)
}
#endif
if (!g_str_has_prefix (gst_structure_get_name (structure),
"application/x-gst")) {
/* This isn't a DVD event. */
if (GST_EVENT_TIMESTAMP (event) != GST_CLOCK_TIME_NONE) {
GST_EVENT_TIMESTAMP (event) += mpeg_demux->adjust;
}
gst_pad_event_default (mpeg_parse->sinkpad, event);
return TRUE;
}
if (strcmp (event_type, "dvd-audio-stream-change") == 0) {
gint stream_nr;
@ -385,134 +415,210 @@ gst_dvd_demux_handle_dvd_event (GstDVDDemux * dvd_demux, GstEvent * event)
}
else if (strcmp (event_type, "dvd-nav-packet") == 0) {
GstStructure *structure = event->event_data.structure.structure;
GstClockTimeDiff old_adjust = mpeg_demux->adjust;
GstClockTime start_ptm =
g_value_get_uint64 (gst_structure_get_value (structure, "start_ptm"));
GstClockTime end_ptm =
g_value_get_uint64 (gst_structure_get_value (structure, "end_ptm"));
GstClockTime cell_start =
g_value_get_uint64 (gst_structure_get_value (structure, "cell_start"));
GstClockTime pg_start =
g_value_get_uint64 (gst_structure_get_value (structure, "pg_start"));
if (start_ptm != dvd_demux->last_end_ptm) {
/* Set the adjust value to gap the discontinuity. */
mpeg_demux->adjust += GST_CLOCK_DIFF (dvd_demux->last_end_ptm, start_ptm);
GST_DEBUG_OBJECT (dvd_demux,
"PTM sequence discontinuity: from %0.3fs to "
"%0.3fs, new adjust %0.3fs",
"PTM sequence discontinuity: from %0.3fs to %0.3fs, cell_start %0.3fs, pg_start %0.3fs",
(double) dvd_demux->last_end_ptm / GST_SECOND,
(double) start_ptm / GST_SECOND,
(double) mpeg_demux->adjust / GST_SECOND);
(double) cell_start / GST_SECOND, (double) pg_start / GST_SECOND);
if (pg_start > start_ptm)
mpeg_demux->adjust = pg_start - start_ptm;
else
mpeg_demux->adjust = 0;
/* Disable mpeg_parse's timestamp adjustment in favour of the info
* from DVD nav packets.
* Timestamp adjustment is fairly evil, we would ideally use discont
* events instead. However, our current clocking has a pretty serious
* race condition: imagine that $pipeline is at time 30sec and $audio
* receives a discont to 0sec. Video processes its last buffer and
* calls _wait() on $timestamp, which is 30s - so we wait (hang) 30sec.
* This is unacceptable, obviously, and timestamp adjustment, no matter
* how evil, solves this.
* Before disabling this again, tripple check that al .vob files on our
* websites /media/ directory work fine, especially bullet.vob and
* barrage.vob.
*/
#if 1
/* Try to prevent the mpegparse infrastructure from doing timestamp
adjustment. */
adjustment, and enable synchronising filler events. */
mpeg_parse->use_adjust = FALSE;
mpeg_parse->adjust = 0;
#endif
}
dvd_demux->last_end_ptm = end_ptm;
if (dvd_demux->just_flushed) {
/* Keep video/audio/subtitle pads within 1/2 sec of the SCR */
mpeg_demux->max_gap = 0.5 * GST_SECOND;
mpeg_demux->max_gap_tolerance = 0.05 * GST_SECOND;
}
/* Send a discont after a seek, or if PTM wrapping causes too large a gap */
if (mpeg_demux->just_flushed ||
ABS (GST_CLOCK_DIFF (dvd_demux->last_end_ptm + old_adjust,
start_ptm + mpeg_demux->adjust)) > PTM_DISCONT_ADJUST) {
/* The pipeline was just flushed, schedule a discontinuity with
the next sequence time. We don't do it here to reduce the
time gap between the discontinuity and the subsequent data
blocks. */
#if 1
dvd_demux->discont_time = start_ptm + mpeg_demux->adjust;
#else
dvd_demux->discont_time = start_ptm;
#endif
GST_DEBUG_OBJECT (dvd_demux, "Set discont time to %" G_GINT64_FORMAT,
dvd_demux->discont_time);
if (start_ptm > PTM_DISCONT_ADJUST)
dvd_demux->discont_time = start_ptm - PTM_DISCONT_ADJUST;
else
dvd_demux->discont_time = 0;
dvd_demux->just_flushed = FALSE;
GST_DEBUG_OBJECT (dvd_demux,
"Set mpeg discont time to %" G_GINT64_FORMAT ", adjust %"
G_GINT64_FORMAT, dvd_demux->discont_time, mpeg_demux->adjust);
}
dvd_demux->last_end_ptm = end_ptm;
gst_event_unref (event);
}
} else if (!strcmp (event_type, "dvd-lang-codes")) {
gint num_substreams = 0, num_audstreams = 0, n;
gchar *t;
else {
if (GST_EVENT_TIMESTAMP (event) != GST_CLOCK_TIME_NONE) {
GST_EVENT_TIMESTAMP (event) += mpeg_demux->adjust;
/* reset */
if (dvd_demux->langcodes)
gst_event_unref (dvd_demux->langcodes);
PARSE_CLASS (dvd_demux)->handle_discont (mpeg_parse,
gst_event_new_newsegment (TRUE, 1.0, GST_FORMAT_UNDEFINED, 0, 0, 0));
/* see what kind of streams we have */
dvd_demux->langcodes = event;
/* now create pads for each; first video */
n = 2;
DEMUX_CLASS (dvd_demux)->get_video_stream (mpeg_demux,
0, GST_MPEG_DEMUX_VIDEO_MPEG, &n);
/* audio */
for (n = 0;; n++) {
gint fmt, ifo = 0;
t = g_strdup_printf ("audio-%d-format", num_audstreams);
if (!gst_structure_get_int (structure, t, &fmt)) {
g_free (t);
break;
}
g_free (t);
switch (fmt) {
case 0x0: /* AC-3 */
fmt = GST_DVD_DEMUX_AUDIO_AC3;
break;
case 0x2:
case 0x3: /* MPEG */
fmt = GST_MPEG_DEMUX_AUDIO_MPEG;
break;
case 0x4:
fmt = GST_DVD_DEMUX_AUDIO_LPCM;
break;
case 0x6:
fmt = GST_DVD_DEMUX_AUDIO_DTS;
break;
default:
fmt = GST_MPEG_DEMUX_AUDIO_UNKNOWN;
break;
}
DEMUX_CLASS (dvd_demux)->get_audio_stream (mpeg_demux,
num_audstreams++, fmt, &ifo);
}
gst_pad_event_default (mpeg_parse->sinkpad, event);
/* subtitle */
for (;;) {
t = g_strdup_printf ("subtitle-%d-language", num_substreams);
if (!gst_structure_get_value (structure, t)) {
g_free (t);
break;
}
g_free (t);
CLASS (dvd_demux)->get_subpicture_stream (mpeg_demux,
num_substreams++, GST_DVD_DEMUX_SUBP_DVD, NULL);
}
GST_DEBUG_OBJECT (dvd_demux,
"Created 1 video stream, %d audio streams and %d subpicture streams "
"based on DVD lang codes event; now signalling no-more-pads",
num_audstreams, num_substreams);
/* we know this will be all */
gst_element_no_more_pads (GST_ELEMENT (dvd_demux));
dvd_demux->ignore_next_newmedia_discont = TRUE;
/* Try to prevent the mpegparse infrastructure from doing timestamp
adjustment, and enable synchronising filler events. */
mpeg_parse->use_adjust = FALSE;
mpeg_parse->adjust = 0;
/* Keep video/audio/subtitle pads within 1/2 sec of the SCR */
mpeg_demux->max_gap = 0.5 * GST_SECOND;
mpeg_demux->max_gap_tolerance = 0.05 * GST_SECOND;
} else {
GST_DEBUG_OBJECT (dvd_demux, "dvddemux Forwarding DVD event %s to all pads",
event_type);
PARSE_CLASS (dvd_demux)->send_event (mpeg_parse, event,
GST_CLOCK_TIME_NONE);
}
return TRUE;
}
#endif
static void
gst_dvd_demux_send_discont (GstMPEGParse * mpeg_parse, GstClockTime time)
static GstFlowReturn
gst_dvd_demux_send_event (GstMPEGParse * mpeg_parse, GstEvent * event,
GstClockTime ts)
{
GstDVDDemux *dvd_demux = GST_DVD_DEMUX (mpeg_parse);
GstEvent *discont;
gint i;
GST_DEBUG_OBJECT (dvd_demux, "sending discontinuity: %0.3fs",
(double) time / GST_SECOND);
GST_MPEG_PARSE_CLASS (parent_class)->send_discont (mpeg_parse, time);
discont = gst_event_new_discontinuous (FALSE, GST_FORMAT_TIME, time, NULL);
if (!discont) {
GST_ELEMENT_ERROR (GST_ELEMENT (dvd_demux),
RESOURCE, FAILED, (NULL), ("Allocation failed"));
return;
}
for (i = 0; i < GST_DVD_DEMUX_NUM_SUBPICTURE_STREAMS; i++) {
if (dvd_demux->subpicture_stream[i] &&
GST_PAD_IS_USABLE (dvd_demux->subpicture_stream[i]->pad)) {
gst_event_ref (discont);
gst_pad_push (dvd_demux->subpicture_stream[i]->pad, GST_DATA (discont));
if (dvd_demux->subpicture_stream[i]) {
if (GST_PAD_IS_USABLE (dvd_demux->subpicture_stream[i]->pad)) {
gst_event_ref (event);
gst_pad_push_event (dvd_demux->subpicture_stream[i]->pad, event);
}
if (GST_CLOCK_TIME_IS_VALID (ts))
dvd_demux->subpicture_stream[i]->cur_ts = ts;
}
}
/* Distribute the event to the "current" pads. */
if (GST_PAD_IS_USABLE (dvd_demux->cur_video)) {
gst_event_ref (discont);
gst_pad_push (dvd_demux->cur_video, GST_DATA (discont));
gst_event_ref (event);
gst_pad_push_event (dvd_demux->cur_video, event);
}
if (GST_PAD_IS_USABLE (dvd_demux->cur_audio)) {
gst_event_ref (discont);
gst_pad_push (dvd_demux->cur_audio, GST_DATA (discont));
gst_event_ref (event);
gst_pad_push_event (dvd_demux->cur_audio, event);
}
if (GST_PAD_IS_USABLE (dvd_demux->cur_subpicture)) {
gst_event_ref (discont);
gst_pad_push (dvd_demux->cur_subpicture, GST_DATA (discont));
gst_event_ref (event);
gst_pad_push_event (dvd_demux->cur_subpicture, event);
}
gst_event_unref (discont);
GST_MPEG_PARSE_CLASS (parent_class)->send_event (mpeg_parse, event, ts);
return GST_FLOW_OK;
}
static void
static GstFlowReturn
gst_dvd_demux_handle_discont (GstMPEGParse * mpeg_parse, GstEvent * event)
{
#if 0
GstDVDDemux *dvd_demux = GST_DVD_DEMUX (mpeg_parse);
if (GST_EVENT_DISCONT_NEW_MEDIA (event)) {
gst_dvd_demux_reset (dvd_demux);
/* HACK */
if (dvd_demux->ignore_next_newmedia_discont)
GST_EVENT_DISCONT_NEW_MEDIA (event) = FALSE;
else
gst_dvd_demux_reset (dvd_demux);
}
#endif
/* let parent handle and forward discont */
if (GST_MPEG_PARSE_CLASS (parent_class)->handle_discont != NULL)
GST_MPEG_PARSE_CLASS (parent_class)->handle_discont (mpeg_parse, event);
return GST_FLOW_OK;
}
static GstMPEGStream *
@ -531,15 +637,16 @@ gst_dvd_demux_get_video_stream (GstMPEGDemux * mpeg_demux,
"mpegversion", G_TYPE_INT, mpeg_version,
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
if (!gst_pad_set_explicit_caps (dvd_demux->cur_video, caps)) {
if (!gst_pad_set_caps (dvd_demux->cur_video, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
CORE, NEGOTIATION, (NULL), ("failed to set caps"));
} else {
dvd_demux->mpeg_version = mpeg_version;
}
gst_caps_free (caps);
gst_caps_unref (caps);
}
dvd_demux->mpeg_version = mpeg_version;
return str;
}
@ -553,12 +660,14 @@ gst_dvd_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
GstDVDLPCMStream *lpcm_str = NULL;
gboolean add_pad = FALSE;
GstCaps *caps;
const gchar *codec = NULL, *lang_code = NULL;
g_return_val_if_fail (stream_nr < GST_MPEG_DEMUX_NUM_AUDIO_STREAMS, NULL);
g_return_val_if_fail (type > GST_MPEG_DEMUX_AUDIO_UNKNOWN &&
type < GST_DVD_DEMUX_AUDIO_LAST, NULL);
if (type < GST_MPEG_DEMUX_AUDIO_LAST) {
/* FIXME: language codes on MPEG audio streams */
return parent_class->get_audio_stream (mpeg_demux, stream_nr, type, info);
}
@ -567,6 +676,14 @@ gst_dvd_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
}
str = mpeg_demux->audio_stream[stream_nr];
/* If the stream type is changing, recreate the pad */
if (str && str->type != type) {
gst_element_remove_pad (GST_ELEMENT (mpeg_demux), str->pad);
g_free (str);
str = mpeg_demux->audio_stream[stream_nr] = NULL;
}
if (str == NULL) {
gchar *name;
@ -584,11 +701,8 @@ gst_dvd_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
str->type = GST_MPEG_DEMUX_AUDIO_UNKNOWN;
g_free (name);
add_pad = TRUE;
mpeg_demux->audio_stream[stream_nr] = str;
} else {
/* This stream may have been created by a derived class, reset the
size. */
/* Stream size may have changed, reset it. */
if (type != GST_DVD_DEMUX_AUDIO_LPCM) {
str = g_renew (GstMPEGStream, str, 1);
} else {
@ -597,6 +711,8 @@ gst_dvd_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
}
}
mpeg_demux->audio_stream[stream_nr] = str;
if (type != str->type ||
(type == GST_DVD_DEMUX_AUDIO_LPCM &&
sample_info != lpcm_str->sample_info)) {
@ -650,15 +766,17 @@ gst_dvd_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
lpcm_str->dynamic_range = dynamic_range;
lpcm_str->mute = mute;
lpcm_str->emphasis = emphasis;
codec = "LPCM audio";
break;
case GST_DVD_DEMUX_AUDIO_AC3:
caps = gst_caps_new_simple ("audio/x-ac3", NULL);
codec = "AC-3 audio";
break;
case GST_DVD_DEMUX_AUDIO_DTS:
caps = gst_caps_new_simple ("audio/x-dts", NULL);
codec = "DTS audio";
break;
default:
@ -666,15 +784,51 @@ gst_dvd_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
break;
}
gst_pad_set_explicit_caps (str->pad, caps);
if (!gst_pad_set_caps (str->pad, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
CORE, NEGOTIATION, (NULL), ("failed to set caps on pad %s:%s",
gst_element_get_name (dvd_demux), gst_pad_get_name (str->pad)));
}
if (str->number == dvd_demux->cur_audio_nr) {
/* This is the current audio stream. Use the same caps. */
gst_pad_set_explicit_caps (dvd_demux->cur_audio, gst_caps_copy (caps));
if (!gst_pad_set_caps (dvd_demux->cur_audio, gst_caps_copy (caps))) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
CORE, NEGOTIATION, (NULL), ("failed to set caps on pad %s:%s",
gst_element_get_name (dvd_demux),
gst_pad_get_name (dvd_demux->cur_audio)));
}
}
if (add_pad)
if (add_pad) {
if (dvd_demux->langcodes) {
gchar *t;
t = g_strdup_printf ("audio-%d-language", stream_nr);
lang_code =
gst_structure_get_string (gst_event_get_structure (dvd_demux->
langcodes), t);
g_free (t);
}
gst_element_add_pad (GST_ELEMENT (mpeg_demux), str->pad);
if (codec || lang_code) {
GstTagList *list = gst_tag_list_new ();
if (codec) {
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_AUDIO_CODEC, codec, NULL);
}
if (lang_code) {
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_LANGUAGE_CODE, lang_code, NULL);
}
gst_element_found_tags_for_pad (GST_ELEMENT (mpeg_demux),
str->pad, list);
}
}
str->type = type;
}
@ -691,6 +845,7 @@ gst_dvd_demux_get_subpicture_stream (GstMPEGDemux * mpeg_demux,
gchar *name;
GstCaps *caps;
gboolean add_pad = FALSE;
const gchar *lang_code = NULL;
g_return_val_if_fail (stream_nr < GST_DVD_DEMUX_NUM_SUBPICTURE_STREAMS, NULL);
g_return_val_if_fail (type > GST_DVD_DEMUX_SUBP_UNKNOWN &&
@ -718,16 +873,45 @@ gst_dvd_demux_get_subpicture_stream (GstMPEGDemux * mpeg_demux,
if (str->type != GST_DVD_DEMUX_SUBP_DVD) {
/* We need to set new caps for this pad. */
caps = gst_caps_new_simple ("video/x-dvd-subpicture", NULL);
gst_pad_set_explicit_caps (str->pad, caps);
if (!gst_pad_set_caps (str->pad, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
CORE, NEGOTIATION, (NULL), ("failed to set caps on pad %s:%s",
gst_element_get_name (dvd_demux), gst_pad_get_name (str->pad)));
}
if (str->number == dvd_demux->cur_subpicture_nr) {
/* This is the current subpicture stream. Use the same caps. */
gst_pad_set_explicit_caps (dvd_demux->cur_subpicture, caps);
if (!gst_pad_set_caps (dvd_demux->cur_subpicture, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
CORE, NEGOTIATION, (NULL), ("failed to set caps on pad %s:%s",
gst_element_get_name (dvd_demux), gst_pad_get_name (str->pad)));
}
}
gst_caps_free (caps);
if (add_pad)
gst_caps_unref (caps);
if (add_pad) {
gst_element_add_pad (GST_ELEMENT (mpeg_demux), str->pad);
if (dvd_demux->langcodes) {
gchar *t;
t = g_strdup_printf ("subtitle-%d-language", stream_nr);
lang_code =
gst_structure_get_string (gst_event_get_structure (dvd_demux->
langcodes), t);
g_free (t);
if (lang_code) {
GstTagList *list = gst_tag_list_new ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_LANGUAGE_CODE, lang_code, NULL);
gst_element_found_tags_for_pad (GST_ELEMENT (mpeg_demux),
str->pad, list);
}
}
}
str->type = GST_DVD_DEMUX_SUBP_DVD;
}
@ -885,7 +1069,6 @@ gst_dvd_demux_process_private (GstMPEGDemux * mpeg_demux,
}
}
static void
gst_dvd_demux_send_subbuffer (GstMPEGDemux * mpeg_demux,
GstMPEGStream * outstream, GstBuffer * buffer,
@ -909,6 +1092,8 @@ gst_dvd_demux_send_subbuffer (GstMPEGDemux * mpeg_demux,
dvd_demux->discont_time = GST_CLOCK_TIME_NONE;
}
dvd_demux->ignore_next_newmedia_discont = FALSE;
/* You never know what happens to a buffer when you send it. Just
in case, we keep a reference to the buffer during the execution
of this function. */
@ -952,19 +1137,19 @@ gst_dvd_demux_send_subbuffer (GstMPEGDemux * mpeg_demux,
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buffer) + offset;
gst_pad_push (outpad, GST_DATA (outbuf));
gst_pad_push (outpad, outbuf);
}
gst_buffer_unref (buffer);
}
#if 0
static void
gst_dvd_demux_set_cur_audio (GstDVDDemux * dvd_demux, gint stream_nr)
{
GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (dvd_demux);
GstMPEGStream *str;
const GstCaps *caps;
GstCaps *caps;
g_return_if_fail (stream_nr >= -1 &&
stream_nr < GST_MPEG_DEMUX_NUM_AUDIO_STREAMS);
@ -980,14 +1165,13 @@ gst_dvd_demux_set_cur_audio (GstDVDDemux * dvd_demux, gint stream_nr)
str = mpeg_demux->audio_stream[stream_nr];
if (str != NULL) {
/* (Re)set the caps in the "current" pad. */
caps = GST_RPAD_EXPLICIT_CAPS (str->pad);
caps = GST_PAD_CAPS (str->pad);
if (caps != NULL) {
gst_pad_set_explicit_caps (dvd_demux->cur_audio, caps);
gst_pad_set_caps (dvd_demux->cur_audio, caps);
}
}
}
static void
gst_dvd_demux_set_cur_subpicture (GstDVDDemux * dvd_demux, gint stream_nr)
{
@ -1010,29 +1194,35 @@ gst_dvd_demux_set_cur_subpicture (GstDVDDemux * dvd_demux, gint stream_nr)
GstCaps *caps = NULL;
/* (Re)set the caps in the "current" pad. */
caps = GST_RPAD_EXPLICIT_CAPS (str->pad);
gst_pad_set_explicit_caps (dvd_demux->cur_subpicture, caps);
caps = GST_PAD_CAPS (str->pad);
gst_pad_set_caps (dvd_demux->cur_subpicture, caps);
}
}
#endif
static void
gst_dvd_demux_reset (GstDVDDemux * dvd_demux)
{
int i;
//GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (dvd_demux);
GST_INFO ("Resetting the dvd demuxer");
for (i = 0; i < GST_DVD_DEMUX_NUM_SUBPICTURE_STREAMS; i++) {
if (dvd_demux->subpicture_stream[i]) {
if (GST_PAD_IS_USABLE (dvd_demux->subpicture_stream[i]->pad)) {
gst_pad_push_event (dvd_demux->subpicture_stream[i]->pad,
gst_event_new_eos ());
}
gst_element_remove_pad (GST_ELEMENT (dvd_demux),
dvd_demux->subpicture_stream[i]->pad);
g_free (dvd_demux->subpicture_stream[i]);
dvd_demux->subpicture_stream[i] = NULL;
}
dvd_demux->subpicture_time[i] = 0;
}
gst_pad_set_explicit_caps (dvd_demux->cur_video, NULL);
gst_pad_set_explicit_caps (dvd_demux->cur_audio, NULL);
gst_pad_set_explicit_caps (dvd_demux->cur_subpicture, NULL);
gst_pad_set_caps (dvd_demux->cur_video, NULL);
gst_pad_set_caps (dvd_demux->cur_audio, NULL);
gst_pad_set_caps (dvd_demux->cur_subpicture, NULL);
dvd_demux->cur_video_nr = 0;
dvd_demux->cur_audio_nr = 0;
@ -1040,8 +1230,84 @@ gst_dvd_demux_reset (GstDVDDemux * dvd_demux)
dvd_demux->mpeg_version = 0;
dvd_demux->last_end_ptm = INITIAL_END_PTM;
dvd_demux->just_flushed = FALSE;
dvd_demux->discont_time = GST_CLOCK_TIME_NONE;
#if 0
/* Reset max_gap handling */
mpeg_demux->max_gap = GST_CLOCK_TIME_NONE;
mpeg_demux->max_gap_tolerance = GST_CLOCK_TIME_NONE;
#endif
}
static void
gst_dvd_demux_synchronise_pads (GstMPEGDemux * mpeg_demux,
GstClockTime threshold, GstClockTime new_ts)
{
GstDVDDemux *dvd_demux = GST_DVD_DEMUX (mpeg_demux);
int i;
parent_class->synchronise_pads (mpeg_demux, threshold, new_ts);
for (i = 0; i < GST_DVD_DEMUX_NUM_SUBPICTURE_STREAMS; i++) {
if (dvd_demux->subpicture_stream[i]
&& (dvd_demux->subpicture_stream[i]->cur_ts < threshold)) {
DEMUX_CLASS (mpeg_demux)->sync_stream_to_time (mpeg_demux,
dvd_demux->subpicture_stream[i], new_ts);
dvd_demux->subpicture_stream[i]->cur_ts = new_ts;
}
}
}
static void
gst_dvd_demux_sync_stream_to_time (GstMPEGDemux * mpeg_demux,
GstMPEGStream * stream, GstClockTime last_ts)
{
GstDVDDemux *dvd_demux = GST_DVD_DEMUX (mpeg_demux);
#if 0
GstClockTime start_ts;
GstEvent *filler = NULL;
GstFormat fmt = GST_FORMAT_TIME;
#endif
GstPad *outpad = NULL;
gint cur_nr = 0;
parent_class->sync_stream_to_time (mpeg_demux, stream, last_ts);
switch (GST_MPEG_DEMUX_STREAM_KIND (stream->type)) {
case GST_MPEG_DEMUX_STREAM_VIDEO:
outpad = dvd_demux->cur_video;
cur_nr = dvd_demux->cur_video_nr;
break;
case GST_MPEG_DEMUX_STREAM_AUDIO:
outpad = dvd_demux->cur_audio;
cur_nr = dvd_demux->cur_audio_nr;
break;
case GST_DVD_DEMUX_STREAM_SUBPICTURE:
outpad = dvd_demux->cur_subpicture;
cur_nr = dvd_demux->cur_subpicture_nr;
break;
}
#if 0
/* FIXME: fillers in 0.9 aren't specified properly yet */
if ((outpad != NULL) && (cur_nr == stream->number)) {
if (GST_PAD_PEER (stream->pad)
&& gst_pad_query_position (GST_PAD_PEER (stream->pad), &fmt,
(gint64 *) & start_ts)) {
if (start_ts < last_ts)
filler =
gst_event_new_filler_stamped (start_ts, GST_CLOCK_DIFF (last_ts,
start_ts));
} else
filler = gst_event_new_filler_stamped (last_ts, GST_CLOCK_TIME_NONE);
if (filler) {
if (gst_pad_push_event (stream->pad, filler) != GST_FLOW_OK)
gst_event_unref (filler);
}
}
#endif
}
static GstStateChangeReturn
@ -1052,6 +1318,11 @@ gst_dvd_demux_change_state (GstElement * element, GstStateChange transition)
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_dvd_demux_reset (dvd_demux);
if (dvd_demux->langcodes) {
gst_event_unref (dvd_demux->langcodes);
dvd_demux->langcodes = NULL;
}
dvd_demux->ignore_next_newmedia_discont = FALSE;
break;
default:
break;

View file

@ -91,18 +91,17 @@ struct _GstDVDLPCMStream {
struct _GstDVDDemux {
GstMPEGDemux parent;
GstPad *cur_video; /* Current video stream pad. */
GstPad *cur_audio; /* Current audio stream pad. */
GstPad *cur_subpicture; /* Current subpicture stream pad. */
GstPad *cur_video; /* Current video stream pad. */
GstPad *cur_audio; /* Current audio stream pad. */
GstPad *cur_subpicture; /* Current subpicture stream pad. */
gint cur_video_nr; /* Current video stream number. */
gint cur_audio_nr; /* Current audio stream number. */
gint cur_subpicture_nr; /* Current subpicture stream number. */
GstClockTime last_end_ptm; /* End presentation time of the las nav packet
GstClockTime last_end_ptm; /* End presentation time of the last nav packet
event received. */
gboolean just_flushed; /* The element just received a flush event. */
GstClockTime discont_time; /* If different from GST_CLOCK_TIME_NONE, a
discontinuous event should be sent with the
given time, before sending the next data
@ -112,8 +111,10 @@ struct _GstDVDDemux {
GstMPEGStream *subpicture_stream[GST_DVD_DEMUX_NUM_SUBPICTURE_STREAMS];
/* Subpicture output streams. */
GstClockTime subpicture_time[GST_DVD_DEMUX_NUM_SUBPICTURE_STREAMS];
/* Last timestamp for buffer on each stream */
GstEvent *langcodes;
gboolean ignore_next_newmedia_discont;
};

View file

@ -27,9 +27,7 @@
GST_DEBUG_CATEGORY_STATIC (gstmpegdemux_debug);
#define GST_CAT_DEFAULT (gstmpegdemux_debug)
GST_DEBUG_CATEGORY_EXTERN (GST_CAT_SEEK);
#define PARSE_CLASS(o) GST_MPEG_PARSE_CLASS (G_OBJECT_GET_CLASS (o))
#define CLASS(o) GST_MPEG_DEMUX_CLASS (G_OBJECT_GET_CLASS (o))
/* elementfactory information */
@ -89,13 +87,16 @@ static void gst_mpeg_demux_base_init (GstMPEGDemuxClass * klass);
static void gst_mpeg_demux_class_init (GstMPEGDemuxClass * klass);
static void gst_mpeg_demux_init (GstMPEGDemux * mpeg_demux);
static void gst_mpeg_demux_send_data (GstMPEGParse * mpeg_parse,
GstData * data, GstClockTime time);
static void gst_mpeg_demux_send_discont (GstMPEGParse * mpeg_parse,
static GstFlowReturn gst_mpeg_demux_send_buffer (GstMPEGParse * mpeg_parse,
GstBuffer * buffer, GstClockTime time);
static GstFlowReturn gst_mpeg_demux_process_event (GstMPEGParse * mpeg_parse,
GstEvent * event, GstClockTime time);
static GstFlowReturn gst_mpeg_demux_send_discont (GstMPEGParse * mpeg_parse,
GstClockTime time);
static void gst_mpeg_demux_handle_discont (GstMPEGParse * mpeg_parse,
static GstFlowReturn gst_mpeg_demux_handle_discont (GstMPEGParse * mpeg_parse,
GstEvent * event);
static GstFlowReturn gst_mpeg_demux_send_event (GstMPEGParse * mpeg_parse,
GstEvent * event, GstClockTime time);
static GstPad *gst_mpeg_demux_new_output_pad (GstMPEGDemux * mpeg_demux,
const gchar * name, GstPadTemplate * temp);
@ -125,6 +126,11 @@ static void gst_mpeg_demux_send_subbuffer (GstMPEGDemux * mpeg_demux,
static void gst_mpeg_demux_process_private (GstMPEGDemux * mpeg_demux,
GstBuffer * buffer,
guint stream_nr, GstClockTime timestamp, guint headerlen, guint datalen);
static void gst_mpeg_demux_synchronise_pads (GstMPEGDemux * mpeg_demux,
GstClockTime threshold, GstClockTime new_ts);
#if 0
static void gst_mpeg_demux_sync_stream_to_time (GstMPEGDemux * mpeg_demux,
GstMPEGStream * stream, GstClockTime last_ts);
const GstFormat *gst_mpeg_demux_get_src_formats (GstPad * pad);
@ -133,8 +139,13 @@ static gboolean normal_seek (GstPad * pad, GstEvent * event, gint64 * offset);
static gboolean gst_mpeg_demux_handle_src_event (GstPad * pad,
GstEvent * event);
#endif
static void gst_mpeg_demux_reset (GstMPEGDemux * mpeg_demux);
#if 0
static gboolean gst_mpeg_demux_handle_src_query (GstPad * pad,
GstQueryType type, GstFormat * format, gint64 * value);
#endif
static GstStateChangeReturn gst_mpeg_demux_change_state (GstElement * element,
GstStateChange transition);
@ -198,7 +209,7 @@ gst_mpeg_demux_class_init (GstMPEGDemuxClass * klass)
GstElementClass *gstelement_class;
GstMPEGParseClass *mpeg_parse_class;
parent_class = g_type_class_ref (GST_TYPE_MPEG_PARSE);
parent_class = g_type_class_peek_parent (klass);
gstelement_class = (GstElementClass *) klass;
mpeg_parse_class = (GstMPEGParseClass *) klass;
@ -211,9 +222,11 @@ gst_mpeg_demux_class_init (GstMPEGDemuxClass * klass)
mpeg_parse_class->parse_syshead = gst_mpeg_demux_parse_syshead;
mpeg_parse_class->parse_packet = gst_mpeg_demux_parse_packet;
mpeg_parse_class->parse_pes = gst_mpeg_demux_parse_pes;
mpeg_parse_class->send_data = gst_mpeg_demux_send_data;
mpeg_parse_class->send_buffer = gst_mpeg_demux_send_buffer;
mpeg_parse_class->process_event = gst_mpeg_demux_process_event;
mpeg_parse_class->send_discont = gst_mpeg_demux_send_discont;
mpeg_parse_class->handle_discont = gst_mpeg_demux_handle_discont;
mpeg_parse_class->send_event = gst_mpeg_demux_send_event;
klass->new_output_pad = gst_mpeg_demux_new_output_pad;
klass->init_stream = gst_mpeg_demux_init_stream;
@ -222,6 +235,10 @@ gst_mpeg_demux_class_init (GstMPEGDemuxClass * klass)
klass->get_private_stream = gst_mpeg_demux_get_private_stream;
klass->send_subbuffer = gst_mpeg_demux_send_subbuffer;
klass->process_private = gst_mpeg_demux_process_private;
klass->synchronise_pads = gst_mpeg_demux_synchronise_pads;
#if 0
klass->sync_stream_to_time = gst_mpeg_demux_sync_stream_to_time;
#endif
/* we have our own sink pad template, but don't use it in subclasses */
gst_element_class_add_pad_template (gstelement_class,
@ -245,83 +262,130 @@ gst_mpeg_demux_init (GstMPEGDemux * mpeg_demux)
}
mpeg_demux->adjust = 0;
GST_OBJECT_FLAG_SET (mpeg_demux, GST_ELEMENT_EVENT_AWARE);
mpeg_demux->max_gap = GST_CLOCK_TIME_NONE;
mpeg_demux->max_gap_tolerance = GST_CLOCK_TIME_NONE;
mpeg_demux->just_flushed = FALSE;
}
static void
gst_mpeg_demux_send_data (GstMPEGParse * mpeg_parse, GstData * data,
static GstFlowReturn
gst_mpeg_demux_send_buffer (GstMPEGParse * mpeg_parse, GstBuffer * buffer,
GstClockTime time)
{
/* GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (mpeg_parse); */
if (GST_IS_BUFFER (data)) {
gst_buffer_unref (GST_BUFFER (data));
} else {
GstEvent *event = GST_EVENT (data);
switch (GST_EVENT_TYPE (event)) {
default:
gst_pad_event_default (mpeg_parse->sinkpad, event);
break;
}
}
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
static void
static GstFlowReturn
gst_mpeg_demux_process_event (GstMPEGParse * mpeg_parse, GstEvent * event,
GstClockTime time)
{
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FILLER:
case GST_EVENT_NEWSEGMENT:
case GST_EVENT_FLUSH_START:
case GST_EVENT_FLUSH_STOP:
return PARSE_CLASS (mpeg_parse)->send_event (mpeg_parse, event,
GST_CLOCK_TIME_NONE);
default:
/* Propagate the event normally. */
if (!gst_pad_event_default (mpeg_parse->sinkpad, event))
return GST_FLOW_ERROR;
break;
}
return GST_FLOW_OK;
}
static GstFlowReturn
gst_mpeg_demux_send_discont (GstMPEGParse * mpeg_parse, GstClockTime time)
{
GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (mpeg_parse);
GstEvent *discont;
if (GST_CLOCK_TIME_IS_VALID (time))
time += mpeg_demux->adjust;
if (!mpeg_demux->just_flushed) {
GST_DEBUG_OBJECT (mpeg_parse, "Discont without flush, ts = %llu", time);
/* Add padding to the end to make sure all streams end at the same timestamp */
CLASS (mpeg_demux)->synchronise_pads (mpeg_demux,
mpeg_parse->current_ts + mpeg_demux->adjust + (GST_SECOND / 20),
mpeg_parse->current_ts + mpeg_demux->adjust + (GST_SECOND / 20));
} else {
GST_DEBUG_OBJECT (mpeg_parse, "Discont after flush, ts = %llu", time);
}
mpeg_demux->just_flushed = FALSE;
return parent_class->send_discont (mpeg_parse, time);
}
static GstFlowReturn
gst_mpeg_demux_send_event (GstMPEGParse * mpeg_parse, GstEvent * event,
GstClockTime time)
{
/*
* Distribute the event to all active pads
*/
GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (mpeg_parse);
gint i;
GST_DEBUG_OBJECT (mpeg_demux, "discont %" G_GUINT64_FORMAT, time);
discont = gst_event_new_discontinuous (FALSE, GST_FORMAT_TIME, time, NULL);
if (!discont) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
RESOURCE, FAILED, (NULL), ("Allocation failed"));
return;
if (GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_START) {
GST_DEBUG_OBJECT (mpeg_demux, "Sending flush event");
mpeg_demux->just_flushed = TRUE;
}
for (i = 0; i < GST_MPEG_DEMUX_NUM_VIDEO_STREAMS; i++) {
if (mpeg_demux->video_stream[i] &&
GST_PAD_IS_USABLE (mpeg_demux->video_stream[i]->pad)) {
gst_event_ref (discont);
gst_pad_push (mpeg_demux->video_stream[i]->pad, GST_DATA (discont));
if (mpeg_demux->video_stream[i]) {
if (GST_PAD_IS_USABLE (mpeg_demux->video_stream[i]->pad)) {
gst_event_ref (event);
gst_pad_push_event (mpeg_demux->video_stream[i]->pad, event);
}
if (GST_CLOCK_TIME_IS_VALID (time))
mpeg_demux->video_stream[i]->cur_ts = time;
}
}
for (i = 0; i < GST_MPEG_DEMUX_NUM_AUDIO_STREAMS; i++) {
if (mpeg_demux->audio_stream[i] &&
GST_PAD_IS_USABLE (mpeg_demux->audio_stream[i]->pad)) {
gst_event_ref (discont);
gst_pad_push (mpeg_demux->audio_stream[i]->pad, GST_DATA (discont));
if (mpeg_demux->audio_stream[i]) {
if (GST_PAD_IS_USABLE (mpeg_demux->audio_stream[i]->pad)) {
gst_event_ref (event);
gst_pad_push_event (mpeg_demux->audio_stream[i]->pad, event);
}
if (GST_CLOCK_TIME_IS_VALID (time))
mpeg_demux->audio_stream[i]->cur_ts = time;
}
}
for (i = 0; i < GST_MPEG_DEMUX_NUM_PRIVATE_STREAMS; i++) {
if (mpeg_demux->private_stream[i] &&
GST_PAD_IS_USABLE (mpeg_demux->private_stream[i]->pad)) {
gst_event_ref (discont);
gst_pad_push (mpeg_demux->private_stream[i]->pad, GST_DATA (discont));
if (mpeg_demux->private_stream[i]) {
if (GST_PAD_IS_USABLE (mpeg_demux->private_stream[i]->pad)) {
gst_event_ref (event);
gst_pad_push_event (mpeg_demux->private_stream[i]->pad, event);
}
if (GST_CLOCK_TIME_IS_VALID (time))
mpeg_demux->private_stream[i]->cur_ts = time;
}
}
gst_event_unref (discont);
return parent_class->send_event (mpeg_parse, event, time);
}
static void
static GstFlowReturn
gst_mpeg_demux_handle_discont (GstMPEGParse * mpeg_parse, GstEvent * event)
{
GstFlowReturn result = GST_FLOW_OK;
#if 0
GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (mpeg_parse);
if (GST_EVENT_DISCONT_NEW_MEDIA (event)) {
gst_mpeg_demux_reset (mpeg_demux);
}
#endif
if (parent_class->handle_discont != NULL)
parent_class->handle_discont (mpeg_parse, event);
result = parent_class->handle_discont (mpeg_parse, event);
return result;
}
static gint
@ -330,11 +394,11 @@ _demux_get_writer_id (GstIndex * index, GstPad * pad)
gint id;
if (!gst_index_get_writer_id (index, GST_OBJECT (pad), &id)) {
GST_CAT_WARNING_OBJECT (GST_CAT_SEEK, index,
GST_WARNING_OBJECT (index,
"can't get index id for %s:%s", GST_DEBUG_PAD_NAME (pad));
return -1;
} else {
GST_CAT_LOG_OBJECT (GST_CAT_SEEK, index,
GST_LOG_OBJECT (index,
"got index id %d for %s:%s", id, GST_DEBUG_PAD_NAME (pad));
return id;
}
@ -348,13 +412,12 @@ gst_mpeg_demux_new_output_pad (GstMPEGDemux * mpeg_demux,
pad = gst_pad_new_from_template (temp, name);
gst_pad_set_formats_function (pad, gst_mpeg_demux_get_src_formats);
gst_pad_set_convert_function (pad, gst_mpeg_parse_convert_src);
gst_pad_set_event_mask_function (pad, gst_mpeg_parse_get_src_event_masks);
#if 0
gst_pad_set_event_function (pad, gst_mpeg_demux_handle_src_event);
gst_pad_set_query_type_function (pad, gst_mpeg_parse_get_src_query_types);
gst_pad_set_query_function (pad, gst_mpeg_parse_handle_src_query);
gst_pad_use_explicit_caps (pad);
gst_pad_set_query_function (pad, gst_mpeg_demux_handle_src_query);
#endif
gst_pad_use_fixed_caps (pad);
return pad;
}
@ -373,6 +436,9 @@ gst_mpeg_demux_init_stream (GstMPEGDemux * mpeg_demux,
if (mpeg_demux->index) {
str->index_id = _demux_get_writer_id (mpeg_demux->index, str->pad);
}
str->cur_ts = 0;
str->scr_offs = 0;
}
static GstMPEGStream *
@ -412,22 +478,33 @@ gst_mpeg_demux_get_video_stream (GstMPEGDemux * mpeg_demux,
}
if (set_caps || video_str->mpeg_version != mpeg_version) {
gchar *codec;
GstTagList *list;
/* We need to set new caps for this pad. */
caps = gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, mpeg_version,
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
if (!gst_pad_set_explicit_caps (str->pad, caps)) {
if (!gst_pad_set_caps (str->pad, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
CORE, NEGOTIATION, (NULL), ("failed to set caps"));
gst_caps_free (caps);
gst_caps_unref (caps);
gst_element_add_pad (GST_ELEMENT (mpeg_demux), str->pad);
return str;
}
gst_caps_free (caps);
gst_caps_unref (caps);
gst_element_add_pad (GST_ELEMENT (mpeg_demux), str->pad);
/* Store the current values. */
video_str->mpeg_version = mpeg_version;
/* set stream metadata */
codec = g_strdup_printf ("MPEG-%d video", mpeg_version);
list = gst_tag_list_new ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_VIDEO_CODEC, codec, NULL);
g_free (codec);
gst_element_found_tags_for_pad (GST_ELEMENT (mpeg_demux), str->pad, list);
}
return str;
@ -447,6 +524,11 @@ gst_mpeg_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
type < GST_MPEG_DEMUX_AUDIO_LAST, NULL);
str = mpeg_demux->audio_stream[stream_nr];
if (str && str->type != type) {
gst_element_remove_pad (GST_ELEMENT (mpeg_demux), str->pad);
g_free (str);
str = mpeg_demux->audio_stream[stream_nr] = NULL;
}
if (str == NULL) {
str = g_new0 (GstMPEGStream, 1);
@ -467,18 +549,26 @@ gst_mpeg_demux_get_audio_stream (GstMPEGDemux * mpeg_demux,
}
if (set_caps) {
GstTagList *list;
/* We need to set new caps for this pad. */
caps = gst_caps_new_simple ("audio/mpeg",
"mpegversion", G_TYPE_INT, 1, NULL);
if (!gst_pad_set_explicit_caps (str->pad, caps)) {
if (!gst_pad_set_caps (str->pad, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_demux),
CORE, NEGOTIATION, (NULL), ("failed to set caps"));
gst_caps_free (caps);
gst_caps_unref (caps);
gst_element_add_pad (GST_ELEMENT (mpeg_demux), str->pad);
return str;
}
gst_caps_free (caps);
gst_caps_unref (caps);
gst_element_add_pad (GST_ELEMENT (mpeg_demux), str->pad);
/* stream metadata */
list = gst_tag_list_new ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_AUDIO_CODEC, "MPEG-1 audio", NULL);
gst_element_found_tags_for_pad (GST_ELEMENT (mpeg_demux), str->pad, list);
}
return str;
@ -744,6 +834,11 @@ done:
if (pts != -1) {
pts += mpeg_parse->adjust;
timestamp = MPEGTIME_TO_GSTTIME (pts) + mpeg_demux->adjust;
/* this apparently happens for some input were headers are
* rewritten to make time start at zero... */
if ((gint64) timestamp < 0)
timestamp = 0;
} else {
timestamp = GST_CLOCK_TIME_NONE;
}
@ -837,10 +932,8 @@ gst_mpeg_demux_parse_pes (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
MPEGTIME_TO_GSTTIME (pts + mpeg_parse->adjust) + mpeg_demux->adjust;
GST_DEBUG_OBJECT (mpeg_demux,
"0x%02x (% " G_GINT64_FORMAT ") PTS = %" G_GUINT64_FORMAT
" (adjusted = %" G_GINT64_FORMAT ")", id, pts,
MPEGTIME_TO_GSTTIME (pts),
MPEGTIME_TO_GSTTIME (pts + mpeg_parse->adjust) + mpeg_demux->adjust);
"0x%02x (% " G_GINT64_FORMAT ") PTS = %" G_GUINT64_FORMAT, id, pts,
MPEGTIME_TO_GSTTIME (pts));
} else {
timestamp = GST_CLOCK_TIME_NONE;
}
@ -921,27 +1014,54 @@ gst_mpeg_demux_send_subbuffer (GstMPEGDemux * mpeg_demux,
GstMPEGStream * outstream, GstBuffer * buffer,
GstClockTime timestamp, guint offset, guint size)
{
GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (mpeg_demux);
GstBuffer *outbuf;
if (timestamp != GST_CLOCK_TIME_NONE && mpeg_demux->index != NULL) {
/* Register a new index position. */
gst_index_add_association (mpeg_demux->index,
outstream->index_id, 0,
GST_FORMAT_BYTES,
GST_BUFFER_OFFSET (buffer), GST_FORMAT_TIME, timestamp, 0);
mpeg_demux->just_flushed = FALSE;
if (timestamp != GST_CLOCK_TIME_NONE) {
outstream->cur_ts = timestamp;
outstream->scr_offs =
GST_CLOCK_DIFF (timestamp, mpeg_parse->current_ts + mpeg_demux->adjust);
if (outstream->scr_offs < 0)
outstream->scr_offs = 0;
if (mpeg_demux->index != NULL) {
/* Register a new index position. */
gst_index_add_association (mpeg_demux->index,
outstream->index_id, 0,
GST_FORMAT_BYTES,
GST_BUFFER_OFFSET (buffer), GST_FORMAT_TIME, timestamp, 0);
}
} else {
outstream->cur_ts =
mpeg_parse->current_ts + mpeg_demux->adjust + outstream->scr_offs;
}
if (!GST_PAD_IS_USABLE (outstream->pad) || (size == 0)) {
return;
}
GST_DEBUG_OBJECT (mpeg_demux, "Creating subbuffer size %d", size);
GST_DEBUG_OBJECT (mpeg_demux, "Creating subbuffer size %d, time=%"
GST_TIME_FORMAT, size, GST_TIME_ARGS (timestamp));
outbuf = gst_buffer_create_sub (buffer, offset, size);
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
GST_BUFFER_OFFSET (outbuf) = GST_BUFFER_OFFSET (buffer) + offset;
gst_pad_push (outstream->pad, GST_DATA (outbuf));
gst_pad_push (outstream->pad, outbuf);
if (GST_CLOCK_TIME_IS_VALID (mpeg_demux->max_gap) &&
GST_CLOCK_TIME_IS_VALID (mpeg_parse->current_ts) &&
(mpeg_parse->current_ts + mpeg_demux->adjust > mpeg_demux->max_gap)) {
GstClockTime threshold =
GST_CLOCK_DIFF (mpeg_parse->current_ts + mpeg_demux->adjust,
mpeg_demux->max_gap);
CLASS (mpeg_demux)->synchronise_pads (mpeg_demux, threshold,
mpeg_parse->current_ts + mpeg_demux->adjust -
mpeg_demux->max_gap_tolerance);
}
}
static void
@ -957,6 +1077,79 @@ gst_mpeg_demux_process_private (GstMPEGDemux * mpeg_demux,
timestamp, headerlen + 4, datalen);
}
static void
gst_mpeg_demux_synchronise_pads (GstMPEGDemux * mpeg_demux,
GstClockTime threshold, GstClockTime new_ts)
{
/*
* Send a filler event to any pad with cur_ts < threshold to catch it up
*/
gint i;
for (i = 0; i < GST_MPEG_DEMUX_NUM_VIDEO_STREAMS; i++)
if (mpeg_demux->video_stream[i]
&& mpeg_demux->video_stream[i]->cur_ts < threshold) {
CLASS (mpeg_demux)->sync_stream_to_time (mpeg_demux,
mpeg_demux->video_stream[i], new_ts);
mpeg_demux->video_stream[i]->cur_ts = new_ts;
}
for (i = 0; i < GST_MPEG_DEMUX_NUM_AUDIO_STREAMS; i++)
if (mpeg_demux->audio_stream[i]
&& mpeg_demux->audio_stream[i]->cur_ts < threshold) {
CLASS (mpeg_demux)->sync_stream_to_time (mpeg_demux,
mpeg_demux->audio_stream[i], new_ts);
mpeg_demux->audio_stream[i]->cur_ts = new_ts;
}
for (i = 0; i < GST_MPEG_DEMUX_NUM_PRIVATE_STREAMS; i++)
if (mpeg_demux->private_stream[i]
&& mpeg_demux->private_stream[i]->cur_ts < threshold) {
CLASS (mpeg_demux)->sync_stream_to_time (mpeg_demux,
mpeg_demux->private_stream[i], new_ts);
mpeg_demux->private_stream[i]->cur_ts = new_ts;
}
}
#if 0
/* Send a filler event on the indicated pad to catch it up to
* last_ts. Query the pad for current time, and use that time
* to set the duration of the filler event, otherwise we use
* the last timestamp of the stream and rely on the sinks
* to absorb any overlap with the decoded data.
*/
static void
gst_mpeg_demux_sync_stream_to_time (GstMPEGDemux * mpeg_demux,
GstMPEGStream * stream, GstClockTime last_ts)
{
GstClockTime start_ts;
GstEvent *filler = NULL;
GstFormat fmt = GST_FORMAT_TIME;
if (!GST_PAD_PEER (stream->pad)
|| !gst_pad_query (GST_PAD_PEER (stream->pad), GST_QUERY_POSITION, &fmt,
(gint64 *) & start_ts)) {
start_ts = stream->cur_ts;
}
if (start_ts < last_ts) {
filler = gst_event_new_filler_stamped (start_ts, GST_CLOCK_DIFF (last_ts,
start_ts));
}
if (filler) {
if (GST_PAD_IS_USABLE (stream->pad)) {
GST_LOG ("Advancing %s from %llu by %lld to %llu (diff %lld)",
gst_pad_get_name (stream->pad), stream->cur_ts,
gst_event_filler_get_duration (filler), last_ts,
GST_CLOCK_DIFF (last_ts, stream->cur_ts));
gst_pad_push (stream->pad, filler);
} else
gst_event_unref (filler);
}
}
const GstFormat *
gst_mpeg_demux_get_src_formats (GstPad * pad)
{
@ -1036,7 +1229,7 @@ gst_mpeg_demux_handle_src_event (GstPad * pad, GstEvent * event)
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
{
guint64 desired_offset;
gint64 desired_offset;
if (mpeg_demux->index)
res = index_seek (pad, event, &desired_offset);
@ -1060,6 +1253,7 @@ gst_mpeg_demux_handle_src_event (GstPad * pad, GstEvent * event)
if (out && GST_PAD_IS_USABLE (out))
return gst_pad_send_event (out, event);
}
/* fall-through */
default:
gst_event_unref (event);
break;
@ -1067,6 +1261,25 @@ gst_mpeg_demux_handle_src_event (GstPad * pad, GstEvent * event)
return res;
}
static gboolean
gst_mpeg_demux_handle_src_query (GstPad * pad, GstQueryType type,
GstFormat * format, gint64 * value)
{
gboolean res;
res = gst_mpeg_parse_handle_src_query (pad, type, format, value);
if (res && (type == GST_QUERY_POSITION) && (format)
&& (*format == GST_FORMAT_TIME)) {
GstMPEGDemux *mpeg_demux = GST_MPEG_DEMUX (gst_pad_get_parent (pad));
*value += mpeg_demux->adjust;
}
return res;
}
#endif
static void
gst_mpeg_demux_reset (GstMPEGDemux * mpeg_demux)
{
@ -1081,6 +1294,10 @@ gst_mpeg_demux_reset (GstMPEGDemux * mpeg_demux)
/* check get_audio/video_stream because it can be derivated */
for (i = 0; i < GST_MPEG_DEMUX_NUM_VIDEO_STREAMS; i++)
if (mpeg_demux->video_stream[i]) {
if (GST_PAD_IS_USABLE (mpeg_demux->video_stream[i]->pad)) {
gst_pad_push_event (mpeg_demux->video_stream[i]->pad,
gst_event_new_eos ());
}
gst_element_remove_pad (GST_ELEMENT (mpeg_demux),
mpeg_demux->video_stream[i]->pad);
g_free (mpeg_demux->video_stream[i]);
@ -1088,6 +1305,10 @@ gst_mpeg_demux_reset (GstMPEGDemux * mpeg_demux)
}
for (i = 0; i < GST_MPEG_DEMUX_NUM_AUDIO_STREAMS; i++)
if (mpeg_demux->audio_stream[i]) {
if (GST_PAD_IS_USABLE (mpeg_demux->audio_stream[i]->pad)) {
gst_pad_push_event (mpeg_demux->audio_stream[i]->pad,
gst_event_new_eos ());
}
gst_element_remove_pad (GST_ELEMENT (mpeg_demux),
mpeg_demux->audio_stream[i]->pad);
g_free (mpeg_demux->audio_stream[i]);
@ -1095,6 +1316,10 @@ gst_mpeg_demux_reset (GstMPEGDemux * mpeg_demux)
}
for (i = 0; i < GST_MPEG_DEMUX_NUM_PRIVATE_STREAMS; i++)
if (mpeg_demux->private_stream[i]) {
if (GST_PAD_IS_USABLE (mpeg_demux->private_stream[i]->pad)) {
gst_pad_push_event (mpeg_demux->private_stream[i]->pad,
gst_event_new_eos ());
}
gst_element_remove_pad (GST_ELEMENT (mpeg_demux),
mpeg_demux->private_stream[i]->pad);
g_free (mpeg_demux->private_stream[i]);
@ -1116,8 +1341,15 @@ gst_mpeg_demux_reset (GstMPEGDemux * mpeg_demux)
mpeg_demux->index = NULL;
mpeg_demux->adjust = 0;
/*
* Don't adjust things that are only for subclass use
* - if they changed it, they can reset it.
*
* mpeg_demux->adjust = 0;
* mpeg_demux->max_gap = GST_CLOCK_TIME_NONE;
* mpeg_demux->max_gap_tolerance = GST_CLOCK_TIME_NONE;
* mpeg_demux->just_flushed = FALSE;
*/
}
static GstStateChangeReturn

View file

@ -93,6 +93,8 @@ struct _GstMPEGStream {
GstPad *pad;
gint index_id;
gint size_bound;
GstClockTime cur_ts;
GstClockTimeDiff scr_offs;
};
/* Extended structure to hold additional information for video
@ -127,9 +129,21 @@ struct _GstMPEGDemux {
GstMPEGStream *audio_stream[GST_MPEG_DEMUX_NUM_AUDIO_STREAMS];
GstMPEGStream *private_stream[GST_MPEG_DEMUX_NUM_PRIVATE_STREAMS];
GstClockTimeDiff adjust; /* Added to all PTS timestamps. This element
GstClockTimeDiff adjust; /* Added to all PTS timestamps. This element
keeps always this value in 0, but it is
there for the benefit of subclasses. */
GstClockTime max_gap; /* Maximum timestamp difference to allow
* between pads before using a filler to catch up
*/
GstClockTime max_gap_tolerance; /* When catching a pad up, how far behind
to make it
*/
GstClockTime max_ts; /* Highest timestamp of all pads */
GstPad *max_pad; /* Pad with highest timestamp */
gboolean just_flushed;
};
struct _GstMPEGDemuxClass {
@ -178,6 +192,14 @@ struct _GstMPEGDemuxClass {
guint stream_nr,
GstClockTime timestamp,
guint headerlen, guint datalen);
void (*synchronise_pads) (GstMPEGDemux *mpeg_demux,
GstClockTime threshold,
GstClockTime new_ts);
void (*sync_stream_to_time) (GstMPEGDemux *mpeg_demux,
GstMPEGStream *stream,
GstClockTime last_ts);
};
GType gst_mpeg_demux_get_type (void);

View file

@ -21,24 +21,31 @@
#include "config.h"
#endif
#include <string.h>
/*#define GST_DEBUG_ENABLED */
#include "gstmpegpacketize.h"
GstMPEGPacketize *
gst_mpeg_packetize_new (GstPad * pad, GstMPEGPacketizeType type)
gst_mpeg_packetize_new (GstPad * srcpad, GstMPEGPacketizeType type)
{
GstMPEGPacketize *new;
g_return_val_if_fail (pad != NULL, NULL);
g_return_val_if_fail (GST_IS_PAD (pad), NULL);
g_return_val_if_fail (srcpad != NULL, NULL);
g_return_val_if_fail (GST_IS_PAD (srcpad), NULL);
g_return_val_if_fail (GST_PAD_IS_SRC (srcpad), NULL);
gst_object_ref (GST_OBJECT (srcpad));
new = g_malloc (sizeof (GstMPEGPacketize));
gst_object_ref (GST_OBJECT (pad));
new->resync = TRUE;
new->id = 0;
new->pad = pad;
new->bs = gst_bytestream_new (pad);
new->srcpad = srcpad;
new->cache_head = 0;
new->cache_tail = 0;
new->cache_size = 0x4000;
new->cache = g_malloc (new->cache_size);
new->cache_byte_pos = 0;
new->MPEG2 = FALSE;
new->type = type;
@ -50,25 +57,118 @@ gst_mpeg_packetize_destroy (GstMPEGPacketize * packetize)
{
g_return_if_fail (packetize != NULL);
gst_bytestream_destroy (packetize->bs);
gst_object_unref (GST_OBJECT (packetize->pad));
gst_object_unref (GST_OBJECT (packetize->srcpad));
g_free (packetize->cache);
g_free (packetize);
}
static GstData *
parse_packhead (GstMPEGPacketize * packetize)
guint64
gst_mpeg_packetize_tell (GstMPEGPacketize * packetize)
{
gint length = 8 + 4;
return packetize->cache_byte_pos + packetize->cache_head;
}
gboolean
gst_mpeg_packetize_put (GstMPEGPacketize * packetize, GstBuffer * buf)
{
int cache_len = packetize->cache_tail - packetize->cache_head;
if (cache_len + GST_BUFFER_SIZE (buf) > packetize->cache_size) {
/* the buffer does not fit into the cache so grow the cache */
guint8 *new_cache;
/* get the new size of the cache */
do {
packetize->cache_size *= 2;
} while (cache_len + GST_BUFFER_SIZE (buf) > packetize->cache_size);
/* allocate new cache - do not realloc to avoid copying data twice */
new_cache = g_malloc (packetize->cache_size);
if (new_cache == NULL)
return FALSE;
/* copy the data to the beginning of the new cache and update the cache info */
memcpy (new_cache, packetize->cache + packetize->cache_head, cache_len);
g_free (packetize->cache);
packetize->cache = new_cache;
packetize->cache_byte_pos += packetize->cache_head;
packetize->cache_head = 0;
packetize->cache_tail = cache_len;
} else if (packetize->cache_tail + GST_BUFFER_SIZE (buf) >
packetize->cache_size) {
/* the buffer does not fit into the end of the cache so move the cache data
to the beginning of the cache */
memmove (packetize->cache, packetize->cache + packetize->cache_head,
packetize->cache_tail - packetize->cache_head);
packetize->cache_byte_pos += packetize->cache_head;
packetize->cache_tail -= packetize->cache_head;
packetize->cache_head = 0;
}
/* copy the buffer to the cache */
memcpy (packetize->cache + packetize->cache_tail, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
packetize->cache_tail += GST_BUFFER_SIZE (buf);
gst_buffer_unref (buf);
return TRUE;
}
static guint
peek_cache (GstMPEGPacketize * packetize, guint length, guint8 ** buf)
{
*buf = packetize->cache + packetize->cache_head;
if (packetize->cache_tail - packetize->cache_head < length)
return packetize->cache_tail - packetize->cache_head;
return length;
}
static void
skip_cache (GstMPEGPacketize * packetize, guint length)
{
g_assert (packetize->cache_tail - packetize->cache_head >= length);
packetize->cache_head += length;
}
static GstFlowReturn
read_cache (GstMPEGPacketize * packetize, guint length, GstBuffer ** outbuf)
{
if (packetize->cache_tail - packetize->cache_head < length)
return GST_FLOW_RESEND;
if (length == 0)
return GST_FLOW_RESEND;
*outbuf = gst_buffer_new_and_alloc (length);
if (*outbuf == NULL)
return GST_FLOW_ERROR;
memcpy (GST_BUFFER_DATA (*outbuf), packetize->cache + packetize->cache_head,
length);
packetize->cache_head += length;
return GST_FLOW_OK;
}
static GstFlowReturn
parse_packhead (GstMPEGPacketize * packetize, GstBuffer ** outbuf)
{
guint length = 8 + 4;
guint8 *buf;
GstBuffer *outbuf;
guint32 got_bytes;
guint got_bytes;
GST_DEBUG ("packetize: in parse_packhead");
got_bytes = gst_bytestream_peek_bytes (packetize->bs, &buf, length);
*outbuf = NULL;
got_bytes = peek_cache (packetize, length, &buf);
if (got_bytes < length)
return NULL;
return GST_FLOW_RESEND;
buf += 4;
@ -79,74 +179,55 @@ parse_packhead (GstMPEGPacketize * packetize)
GST_DEBUG ("packetize::parse_packhead setting mpeg2");
packetize->MPEG2 = TRUE;
length += 2;
got_bytes = gst_bytestream_peek_bytes (packetize->bs, &buf, length);
got_bytes = peek_cache (packetize, length, &buf);
if (got_bytes < length)
return NULL;
return GST_FLOW_RESEND;
} else {
GST_DEBUG ("packetize::parse_packhead setting mpeg1");
packetize->MPEG2 = FALSE;
}
got_bytes = gst_bytestream_read (packetize->bs, &outbuf, length);
if (got_bytes < length)
return NULL;
return GST_DATA (outbuf);
return read_cache (packetize, length, outbuf);
}
static GstData *
parse_end (GstMPEGPacketize * packetize)
static GstFlowReturn
parse_end (GstMPEGPacketize * packetize, GstBuffer ** outbuf)
{
guint32 got_bytes;
GstBuffer *outbuf;
got_bytes = gst_bytestream_read (packetize->bs, &outbuf, 4);
if (got_bytes < 4)
return NULL;
return GST_DATA (outbuf);
return read_cache (packetize, 4, outbuf);
}
static inline GstData *
parse_generic (GstMPEGPacketize * packetize)
static GstFlowReturn
parse_generic (GstMPEGPacketize * packetize, GstBuffer ** outbuf)
{
GstByteStream *bs = packetize->bs;
guchar *buf;
GstBuffer *outbuf;
guint32 got_bytes;
gint16 length = 6;
guint length = 6;
guint got_bytes;
GST_DEBUG ("packetize: in parse_generic");
got_bytes = gst_bytestream_peek_bytes (bs, (guint8 **) & buf, length);
if (got_bytes < 6)
return NULL;
got_bytes = peek_cache (packetize, length, &buf);
if (got_bytes < length)
return GST_FLOW_RESEND;
buf += 4;
length += GST_READ_UINT16_BE (buf);
GST_DEBUG ("packetize: header_length %d", length);
got_bytes = gst_bytestream_read (packetize->bs, &outbuf, length);
if (got_bytes < length)
return NULL;
return GST_DATA (outbuf);
return read_cache (packetize, length, outbuf);
}
static inline GstData *
parse_chunk (GstMPEGPacketize * packetize)
static GstFlowReturn
parse_chunk (GstMPEGPacketize * packetize, GstBuffer ** outbuf)
{
GstByteStream *bs = packetize->bs;
guchar *buf;
gint offset;
guint32 code;
gint chunksize;
GstBuffer *outbuf = NULL;
guint chunksize;
chunksize = gst_bytestream_peek_bytes (bs, (guint8 **) & buf, 4096);
chunksize = peek_cache (packetize, 4096, &buf);
if (chunksize == 0)
return NULL;
return GST_FLOW_RESEND;
offset = 4;
@ -160,33 +241,29 @@ parse_chunk (GstMPEGPacketize * packetize)
GST_DEBUG (" code = %08x", code);
if (offset == chunksize) {
chunksize =
gst_bytestream_peek_bytes (bs, (guint8 **) & buf, offset + 4096);
chunksize = peek_cache (packetize, offset + 4096, &buf);
if (chunksize == 0)
return NULL;
return GST_FLOW_RESEND;
chunksize += offset;
}
}
if (offset > 4) {
chunksize = gst_bytestream_read (bs, &outbuf, offset - 4);
if (chunksize == 0)
return NULL;
return read_cache (packetize, offset - 4, outbuf);
}
return GST_DATA (outbuf);
return GST_FLOW_RESEND;
}
/* FIXME mmx-ify me */
static inline gboolean
static gboolean
find_start_code (GstMPEGPacketize * packetize)
{
GstByteStream *bs = packetize->bs;
guchar *buf;
guint8 *buf;
gint offset;
guint32 code;
gint chunksize;
chunksize = gst_bytestream_peek_bytes (bs, (guint8 **) & buf, 4096);
chunksize = peek_cache (packetize, 4096, &buf);
if (chunksize < 5)
return FALSE;
@ -202,9 +279,9 @@ find_start_code (GstMPEGPacketize * packetize)
GST_DEBUG (" code = %08x %p %08x", code, buf, chunksize);
if (offset == chunksize) {
gst_bytestream_flush_fast (bs, offset);
skip_cache (packetize, offset);
chunksize = gst_bytestream_peek_bytes (bs, (guint8 **) & buf, 4096);
chunksize = peek_cache (packetize, 4096, &buf);
if (chunksize == 0)
return FALSE;
@ -213,86 +290,75 @@ find_start_code (GstMPEGPacketize * packetize)
}
packetize->id = code & 0xff;
if (offset > 4) {
gst_bytestream_flush_fast (bs, offset - 4);
skip_cache (packetize, offset - 4);
}
return TRUE;
}
GstData *
gst_mpeg_packetize_read (GstMPEGPacketize * packetize)
GstFlowReturn
gst_mpeg_packetize_read (GstMPEGPacketize * packetize, GstBuffer ** outbuf)
{
gboolean got_event = FALSE;
GstData *outbuf = NULL;
g_return_val_if_fail (packetize != NULL, GST_FLOW_ERROR);
g_return_val_if_fail (packetize != NULL, NULL);
*outbuf = NULL;
while (outbuf == NULL) {
while (*outbuf == NULL) {
if (!find_start_code (packetize))
got_event = TRUE;
else {
GST_DEBUG ("packetize: have chunk 0x%02X", packetize->id);
if (packetize->type == GST_MPEG_PACKETIZE_SYSTEM) {
if (packetize->resync) {
if (packetize->id != PACK_START_CODE) {
gst_bytestream_flush_fast (packetize->bs, 4);
continue;
return GST_FLOW_RESEND;
GST_DEBUG ("packetize: have chunk 0x%02X", packetize->id);
if (packetize->type == GST_MPEG_PACKETIZE_SYSTEM) {
if (packetize->resync) {
if (packetize->id != PACK_START_CODE) {
skip_cache (packetize, 4);
continue;
}
packetize->resync = FALSE;
}
switch (packetize->id) {
case PACK_START_CODE:
return parse_packhead (packetize, outbuf);
case SYS_HEADER_START_CODE:
return parse_generic (packetize, outbuf);
case ISO11172_END_START_CODE:
return parse_end (packetize, outbuf);
default:
if (packetize->MPEG2 && ((packetize->id < 0xBD)
|| (packetize->id > 0xFE))) {
skip_cache (packetize, 4);
g_warning ("packetize: ******** unknown id 0x%02X", packetize->id);
} else {
return parse_generic (packetize, outbuf);
}
packetize->resync = FALSE;
}
switch (packetize->id) {
case PACK_START_CODE:
outbuf = parse_packhead (packetize);
if (!outbuf)
got_event = TRUE;
break;
case SYS_HEADER_START_CODE:
outbuf = parse_generic (packetize);
if (!outbuf)
got_event = TRUE;
break;
case ISO11172_END_START_CODE:
outbuf = parse_end (packetize);
if (!outbuf)
got_event = TRUE;
break;
default:
if (packetize->MPEG2 && ((packetize->id < 0xBD)
|| (packetize->id > 0xFE))) {
gst_bytestream_flush (packetize->bs, 4);
g_warning ("packetize: ******** unknown id 0x%02X",
packetize->id);
} else {
outbuf = parse_generic (packetize);
if (!outbuf)
got_event = TRUE;
}
}
} else if (packetize->type == GST_MPEG_PACKETIZE_VIDEO) {
outbuf = parse_chunk (packetize);
} else {
g_assert_not_reached ();
}
}
if (got_event) {
guint32 remaining;
GstEvent *event;
gint etype;
gst_bytestream_get_status (packetize->bs, &remaining, &event);
etype = event ? GST_EVENT_TYPE (event) : GST_EVENT_EOS;
switch (etype) {
case GST_EVENT_DISCONTINUOUS:
GST_DEBUG ("packetize: discont\n");
gst_bytestream_flush_fast (packetize->bs, remaining);
break;
}
return GST_DATA (event);
} else if (packetize->type == GST_MPEG_PACKETIZE_VIDEO) {
return parse_chunk (packetize, outbuf);
} else {
g_assert_not_reached ();
}
}
return outbuf;
#if 0
/* TODO: flush cache when newsegment is received */
if (got_event) {
guint32 remaining;
GstEvent *event;
gint etype;
gst_bytestream_get_status (packetize->bs, &remaining, &event);
etype = event ? GST_EVENT_TYPE (event) : GST_EVENT_EOS;
switch (etype) {
case GST_EVENT_NEWSEGMENT:
GST_DEBUG ("packetize: discont\n");
gst_bytestream_flush_fast (packetize->bs, remaining);
break;
}
return GST_MINI_OBJECT (event);
}
#endif
g_assert_not_reached ();
}

View file

@ -23,7 +23,6 @@
#include <gst/gst.h>
#include <gst/bytestream/bytestream.h>
G_BEGIN_DECLS
@ -55,18 +54,25 @@ struct _GstMPEGPacketize {
/* current parse state */
guchar id;
GstPad *pad;
GstByteStream *bs;
GstPad *srcpad;
GstMPEGPacketizeType type;
guint8 *cache; /* cache for incoming data */
guint cache_size; /* allocated size of the cache */
guint cache_head; /* position of the beginning of the data */
guint cache_tail; /* position of the end of the data in the cache */
guint64 cache_byte_pos; /* byte position of the cache in the MPEG stream */
gboolean MPEG2;
gboolean resync;
};
GstMPEGPacketize* gst_mpeg_packetize_new (GstPad *pad, GstMPEGPacketizeType type);
void gst_mpeg_packetize_destroy (GstMPEGPacketize *packetize);
GstMPEGPacketize* gst_mpeg_packetize_new (GstPad *pad, GstMPEGPacketizeType type);
void gst_mpeg_packetize_destroy (GstMPEGPacketize *packetize);
GstData* gst_mpeg_packetize_read (GstMPEGPacketize *packetize);
guint64 gst_mpeg_packetize_tell (GstMPEGPacketize *packetize);
gboolean gst_mpeg_packetize_put (GstMPEGPacketize *packetize, GstBuffer * buf);
GstFlowReturn gst_mpeg_packetize_read (GstMPEGPacketize *packetize, GstBuffer ** outbuf);
G_END_DECLS

View file

@ -52,12 +52,12 @@ static GstElementDetails mpeg_parse_details = {
#define CLASS(o) GST_MPEG_PARSE_CLASS (G_OBJECT_GET_CLASS (o))
#define DEFAULT_MAX_DISCONT 45000
#define DEFAULT_MAX_DISCONT 120000
/* GstMPEGParse signals and args */
enum
{
/* FILL ME */
SIGNAL_REACHED_OFFSET,
LAST_SIGNAL
};
@ -66,7 +66,9 @@ enum
ARG_0,
ARG_SYNC,
ARG_MAX_DISCONT,
ARG_DO_ADJUST
ARG_DO_ADJUST,
ARG_BYTE_OFFSET,
ARG_TIME_OFFSET
/* FILL ME */
};
@ -95,17 +97,23 @@ static void gst_mpeg_parse_set_clock (GstElement * element, GstClock * clock);
static gboolean gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse,
GstBuffer * buffer);
static void gst_mpeg_parse_handle_discont (GstMPEGParse * mpeg_parse,
static void gst_mpeg_parse_reset (GstMPEGParse * mpeg_parse);
static GstFlowReturn gst_mpeg_parse_handle_discont (GstMPEGParse * mpeg_parse,
GstEvent * event);
static void gst_mpeg_parse_send_data (GstMPEGParse * mpeg_parse, GstData * data,
GstClockTime time);
static void gst_mpeg_parse_send_discont (GstMPEGParse * mpeg_parse,
static GstFlowReturn gst_mpeg_parse_send_buffer (GstMPEGParse * mpeg_parse,
GstBuffer * buffer, GstClockTime time);
static GstFlowReturn gst_mpeg_parse_process_event (GstMPEGParse * mpeg_parse,
GstEvent * event, GstClockTime time);
static GstFlowReturn gst_mpeg_parse_send_discont (GstMPEGParse * mpeg_parse,
GstClockTime time);
static GstFlowReturn gst_mpeg_parse_send_event (GstMPEGParse * mpeg_parse,
GstEvent * event, GstClockTime time);
static void gst_mpeg_parse_new_pad (GstElement * element, GstPad * pad);
static void gst_mpeg_parse_pad_added (GstElement * element, GstPad * pad);
static void gst_mpeg_parse_loop (GstElement * element);
static gboolean gst_mpeg_parse_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_mpeg_parse_chain (GstPad * pad, GstBuffer * buf);
static void gst_mpeg_parse_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
@ -114,11 +122,9 @@ static void gst_mpeg_parse_set_property (GObject * object, guint prop_id,
static void gst_mpeg_parse_set_index (GstElement * element, GstIndex * index);
static GstIndex *gst_mpeg_parse_get_index (GstElement * element);
static gboolean gst_mpeg_parse_release_locks (GstElement * element);
static GstElementClass *parent_class = NULL;
/*static guint gst_mpeg_parse_signals[LAST_SIGNAL] = { 0 };*/
static guint gst_mpeg_parse_signals[LAST_SIGNAL] = { 0 };
GType
gst_mpeg_parse_get_type (void)
@ -165,7 +171,39 @@ gst_mpeg_parse_class_init (GstMPEGParseClass * klass)
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
parent_class = g_type_class_peek_parent (klass);
gst_mpeg_parse_signals[SIGNAL_REACHED_OFFSET] =
g_signal_new ("reached-offset", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_FIRST, G_STRUCT_OFFSET (GstMPEGParseClass, reached_offset),
NULL, NULL, gst_marshal_VOID__VOID, G_TYPE_NONE, 0);
gobject_class->get_property = gst_mpeg_parse_get_property;
gobject_class->set_property = gst_mpeg_parse_set_property;
gstelement_class->pad_added = gst_mpeg_parse_pad_added;
gstelement_class->change_state = gst_mpeg_parse_change_state;
gstelement_class->set_clock = gst_mpeg_parse_set_clock;
gstelement_class->get_index = gst_mpeg_parse_get_index;
gstelement_class->set_index = gst_mpeg_parse_set_index;
klass->parse_packhead = gst_mpeg_parse_parse_packhead;
klass->parse_syshead = NULL;
klass->parse_packet = NULL;
klass->parse_pes = NULL;
klass->handle_discont = gst_mpeg_parse_handle_discont;
klass->send_buffer = gst_mpeg_parse_send_buffer;
klass->process_event = gst_mpeg_parse_process_event;
klass->send_discont = gst_mpeg_parse_send_discont;
klass->send_event = gst_mpeg_parse_send_event;
/* FIXME: this is a hack. We add the pad templates here instead
* in the base_init function, since the derived class (mpegdemux)
* uses different pads. IMO, this is wrong. */
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_factory));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SYNC,
g_param_spec_boolean ("sync", "Sync", "Synchronize on the stream SCR",
@ -179,32 +217,14 @@ gst_mpeg_parse_class_init (GstMPEGParseClass * klass)
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_DO_ADJUST,
g_param_spec_boolean ("adjust", "adjust", "Adjust timestamps to "
"smooth discontinuities", TRUE, G_PARAM_READWRITE));
gobject_class->get_property = gst_mpeg_parse_get_property;
gobject_class->set_property = gst_mpeg_parse_set_property;
gstelement_class->new_pad = gst_mpeg_parse_new_pad;
gstelement_class->change_state = gst_mpeg_parse_change_state;
gstelement_class->set_clock = gst_mpeg_parse_set_clock;
gstelement_class->get_index = gst_mpeg_parse_get_index;
gstelement_class->set_index = gst_mpeg_parse_set_index;
gstelement_class->release_locks = gst_mpeg_parse_release_locks;
klass->parse_packhead = gst_mpeg_parse_parse_packhead;
klass->parse_syshead = NULL;
klass->parse_packet = NULL;
klass->parse_pes = NULL;
klass->handle_discont = gst_mpeg_parse_handle_discont;
klass->send_data = gst_mpeg_parse_send_data;
klass->send_discont = gst_mpeg_parse_send_discont;
/* FIXME: this is a hack. We add the pad templates here instead
* in the base_init function, since the derived class (mpegdemux)
* uses different pads. IMO, this is wrong. */
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_factory));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BYTE_OFFSET,
g_param_spec_uint64 ("byte-offset", "Byte Offset",
"Emit reached-offset signal when the byte offset is reached.",
0, G_MAXUINT64, G_MAXUINT64, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_TIME_OFFSET,
g_param_spec_uint64 ("time-offset", "Time Offset",
"Time offset in the stream.",
0, G_MAXUINT64, G_MAXUINT64, G_PARAM_READABLE));
}
static void
@ -216,30 +236,25 @@ gst_mpeg_parse_init (GstMPEGParse * mpeg_parse)
templ = gst_element_class_get_pad_template (klass, "sink");
mpeg_parse->sinkpad = gst_pad_new_from_template (templ, "sink");
gst_element_add_pad (GST_ELEMENT (mpeg_parse), mpeg_parse->sinkpad);
gst_pad_set_formats_function (mpeg_parse->sinkpad,
gst_mpeg_parse_get_src_formats);
gst_pad_set_convert_function (mpeg_parse->sinkpad,
gst_mpeg_parse_convert_src);
if ((templ = gst_element_class_get_pad_template (klass, "src"))) {
mpeg_parse->srcpad = gst_pad_new_from_template (templ, "src");
gst_element_add_pad (GST_ELEMENT (mpeg_parse), mpeg_parse->srcpad);
gst_pad_set_formats_function (mpeg_parse->srcpad,
gst_mpeg_parse_get_src_formats);
gst_pad_set_convert_function (mpeg_parse->srcpad,
gst_mpeg_parse_convert_src);
gst_pad_set_event_mask_function (mpeg_parse->srcpad,
gst_mpeg_parse_get_src_event_masks);
gst_pad_set_event_function (mpeg_parse->srcpad,
gst_mpeg_parse_handle_src_event);
GST_DEBUG_FUNCPTR (gst_mpeg_parse_handle_src_event));
#if 0
gst_pad_set_query_type_function (mpeg_parse->srcpad,
gst_mpeg_parse_get_src_query_types);
gst_pad_set_query_function (mpeg_parse->srcpad,
gst_mpeg_parse_handle_src_query);
gst_pad_use_explicit_caps (mpeg_parse->srcpad);
#endif
gst_pad_use_fixed_caps (mpeg_parse->srcpad);
}
gst_element_set_loop_function (GST_ELEMENT (mpeg_parse), gst_mpeg_parse_loop);
gst_pad_set_event_function (mpeg_parse->sinkpad,
GST_DEBUG_FUNCPTR (gst_mpeg_parse_event));
gst_pad_set_chain_function (mpeg_parse->sinkpad,
GST_DEBUG_FUNCPTR (gst_mpeg_parse_chain));
mpeg_parse->packetize = NULL;
mpeg_parse->sync = FALSE;
@ -249,7 +264,9 @@ gst_mpeg_parse_init (GstMPEGParse * mpeg_parse)
mpeg_parse->do_adjust = TRUE;
mpeg_parse->use_adjust = TRUE;
GST_OBJECT_FLAG_SET (mpeg_parse, GST_ELEMENT_EVENT_AWARE);
mpeg_parse->byte_offset = G_MAXUINT64;
gst_mpeg_parse_reset (mpeg_parse);
}
static void
@ -289,7 +306,9 @@ gst_mpeg_parse_update_streaminfo (GstMPEGParse * mpeg_parse)
static void
gst_mpeg_parse_reset (GstMPEGParse * mpeg_parse)
{
GST_DEBUG ("Resetting mpeg_parse");
mpeg_parse->current_scr = 0;
mpeg_parse->current_ts = 0;
mpeg_parse->bytes_since_scr = 0;
mpeg_parse->avg_bitrate_time = 0;
mpeg_parse->avg_bitrate_bytes = 0;
@ -307,86 +326,123 @@ gst_mpeg_parse_reset (GstMPEGParse * mpeg_parse)
mpeg_parse->scr_pending = FALSE;
}
static void
static GstFlowReturn
gst_mpeg_parse_handle_discont (GstMPEGParse * mpeg_parse, GstEvent * event)
{
GstClockTime time;
g_return_if_fail (GST_EVENT_TYPE (event) == GST_EVENT_DISCONTINUOUS);
GstFlowReturn result = GST_FLOW_OK;;
GstFormat format;
gint64 time;
#if 0
if (GST_EVENT_DISCONT_NEW_MEDIA (event)) {
gst_mpeg_parse_reset (mpeg_parse);
}
#endif
if (gst_event_discont_get_value (event, GST_FORMAT_TIME, &time)
&& (GST_CLOCK_TIME_IS_VALID (time))) {
gst_event_parse_newsegment (event, NULL, NULL, &format, &time, NULL, NULL);
if (format == GST_FORMAT_TIME && (GST_CLOCK_TIME_IS_VALID (time))) {
GST_DEBUG_OBJECT (mpeg_parse, "forwarding discontinuity, time: %0.3fs",
(double) time / GST_SECOND);
if (CLASS (mpeg_parse)->send_discont)
CLASS (mpeg_parse)->send_discont (mpeg_parse, time);
result = CLASS (mpeg_parse)->send_discont (mpeg_parse, time);
} else {
/* Use the next SCR to send a discontinuous event. */
GST_DEBUG_OBJECT (mpeg_parse, "Using next SCR to send discont");
mpeg_parse->discont_pending = TRUE;
mpeg_parse->scr_pending = TRUE;
}
mpeg_parse->packetize->resync = TRUE;
gst_event_unref (event);
return result;
}
static void
gst_mpeg_parse_send_data (GstMPEGParse * mpeg_parse, GstData * data,
static GstFlowReturn
gst_mpeg_parse_send_buffer (GstMPEGParse * mpeg_parse, GstBuffer * buffer,
GstClockTime time)
{
if (GST_IS_EVENT (data)) {
GstEvent *event = GST_EVENT (data);
GstFlowReturn result = GST_FLOW_OK;
switch (GST_EVENT_TYPE (event)) {
default:
gst_pad_event_default (mpeg_parse->sinkpad, event);
break;
if (!gst_caps_is_fixed (gst_pad_get_caps (mpeg_parse->srcpad))) {
gboolean mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize);
GstCaps *caps;
caps = gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, (mpeg2 ? 2 : 1),
"systemstream", G_TYPE_BOOLEAN, TRUE,
"parsed", G_TYPE_BOOLEAN, TRUE, NULL);
if (!gst_pad_set_caps (mpeg_parse->srcpad, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_parse),
CORE, NEGOTIATION, (NULL), ("failed to set caps"));
return GST_FLOW_ERROR;
}
} else {
if (!gst_pad_is_negotiated (mpeg_parse->srcpad)) {
gboolean mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize);
GstCaps *caps;
caps = gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, (mpeg2 ? 2 : 1),
"systemstream", G_TYPE_BOOLEAN, TRUE,
"parsed", G_TYPE_BOOLEAN, TRUE, NULL);
if (!gst_pad_set_explicit_caps (mpeg_parse->srcpad, caps)) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_parse),
CORE, NEGOTIATION, (NULL), ("failed to set caps"));
return;
}
}
GST_BUFFER_TIMESTAMP (data) = time;
GST_DEBUG ("current_scr %" G_GINT64_FORMAT, time);
if (GST_PAD_IS_USABLE (mpeg_parse->srcpad))
gst_pad_push (mpeg_parse->srcpad, GST_DATA (data));
else
gst_data_unref (data);
}
GST_BUFFER_TIMESTAMP (buffer) = time;
GST_DEBUG ("current_scr %" G_GINT64_FORMAT, time);
if (GST_PAD_IS_USABLE (mpeg_parse->srcpad))
result = gst_pad_push (mpeg_parse->srcpad, buffer);
else
gst_buffer_unref (buffer);
return result;
}
static void
static GstFlowReturn
gst_mpeg_parse_process_event (GstMPEGParse * mpeg_parse, GstEvent * event,
GstClockTime time)
{
switch (GST_EVENT_TYPE (event)) {
default:
if (!gst_pad_event_default (mpeg_parse->sinkpad, event))
return GST_FLOW_ERROR;
break;
}
return GST_FLOW_OK;
}
static GstFlowReturn
gst_mpeg_parse_send_discont (GstMPEGParse * mpeg_parse, GstClockTime time)
{
GstFlowReturn result = GST_FLOW_OK;
GstEvent *event;
if (GST_PAD_IS_USABLE (mpeg_parse->srcpad)) {
event = gst_event_new_discontinuous (FALSE, GST_FORMAT_TIME, time, NULL);
gst_pad_push (mpeg_parse->srcpad, GST_DATA (event));
event = gst_event_new_newsegment (FALSE, 1.0, GST_FORMAT_TIME, time,
GST_CLOCK_TIME_NONE, (gint64) 0);
if (!event) {
GST_ELEMENT_ERROR (GST_ELEMENT (mpeg_parse),
RESOURCE, FAILED, (NULL), ("Allocation failed"));
return GST_FLOW_ERROR;
}
if (CLASS (mpeg_parse)->send_event)
result = CLASS (mpeg_parse)->send_event (mpeg_parse, event, time);
return result;
}
static GstFlowReturn
gst_mpeg_parse_send_event (GstMPEGParse * mpeg_parse, GstEvent * event,
GstClockTime time)
{
GstFlowReturn result = GST_FLOW_OK;
if (GST_PAD_IS_USABLE (mpeg_parse->srcpad))
result = gst_pad_push_event (mpeg_parse->srcpad, event);
else
gst_event_unref (event);
return result;
}
static void
gst_mpeg_parse_new_pad (GstElement * element, GstPad * pad)
gst_mpeg_parse_pad_added (GstElement * element, GstPad * pad)
{
GstMPEGParse *mpeg_parse;
@ -400,13 +456,14 @@ gst_mpeg_parse_new_pad (GstElement * element, GstPad * pad)
* packets, including setting base time before defining streams or
* even adding streams halfway a stream. */
if (!mpeg_parse->scr_pending) {
GstEvent *event = gst_event_new_discontinuous (FALSE,
GstEvent *event = gst_event_new_newsegment (FALSE, 1.0,
GST_FORMAT_TIME,
(guint64) MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr +
mpeg_parse->adjust),
GST_FORMAT_UNDEFINED);
GST_CLOCK_TIME_NONE,
(gint64) 0);
gst_pad_push (pad, GST_DATA (event));
gst_pad_push_event (pad, event);
}
}
@ -414,9 +471,10 @@ static gboolean
gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
{
guint8 *buf;
guint64 prev_scr, scr;
guint64 prev_scr, scr, diff;
guint32 scr1, scr2;
guint32 new_rate;
guint64 offset;
buf = GST_BUFFER_DATA (buffer);
buf += 4;
@ -461,6 +519,8 @@ gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
prev_scr = mpeg_parse->current_scr;
mpeg_parse->current_scr = scr;
mpeg_parse->current_ts = MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr +
mpeg_parse->adjust);
mpeg_parse->scr_pending = FALSE;
if (mpeg_parse->next_scr == MP_INVALID_SCR) {
@ -470,13 +530,13 @@ gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
if ((mpeg_parse->first_scr == MP_INVALID_SCR) ||
(mpeg_parse->current_scr < mpeg_parse->first_scr)) {
mpeg_parse->first_scr = mpeg_parse->current_scr;
mpeg_parse->first_scr_pos = gst_bytestream_tell (mpeg_parse->packetize->bs);
mpeg_parse->first_scr_pos = gst_mpeg_packetize_tell (mpeg_parse->packetize);
}
if ((mpeg_parse->last_scr == MP_INVALID_SCR) ||
(mpeg_parse->current_scr > mpeg_parse->last_scr)) {
mpeg_parse->last_scr = mpeg_parse->current_scr;
mpeg_parse->last_scr_pos = gst_bytestream_tell (mpeg_parse->packetize->bs);
mpeg_parse->last_scr_pos = gst_mpeg_packetize_tell (mpeg_parse->packetize);
}
GST_LOG_OBJECT (mpeg_parse,
@ -493,8 +553,14 @@ gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr) -
MPEGTIME_TO_GSTTIME (mpeg_parse->next_scr));
if (ABS ((gint64) mpeg_parse->next_scr - (gint64) (scr)) >
mpeg_parse->max_discont) {
/* watch out for integer overflows... */
if (mpeg_parse->next_scr > scr) {
diff = mpeg_parse->next_scr - scr;
} else {
diff = scr - mpeg_parse->next_scr;
}
if (diff > mpeg_parse->max_discont) {
GST_DEBUG ("discontinuity detected; expected: %" G_GUINT64_FORMAT " got: %"
G_GUINT64_FORMAT " adjusted:%" G_GINT64_FORMAT " adjust:%"
G_GINT64_FORMAT, mpeg_parse->next_scr, mpeg_parse->current_scr,
@ -511,6 +577,15 @@ gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
}
}
mpeg_parse->current_ts = MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr +
mpeg_parse->adjust);
offset = gst_mpeg_packetize_tell (mpeg_parse->packetize);
if (offset > mpeg_parse->byte_offset) {
/* we have reached the wanted offset so emit the signal. */
g_signal_emit (G_OBJECT (mpeg_parse),
gst_mpeg_parse_signals[SIGNAL_REACHED_OFFSET], 0);
}
if (mpeg_parse->index && GST_INDEX_IS_WRITABLE (mpeg_parse->index)) {
/* update index if any */
gst_index_add_association (mpeg_parse->index, mpeg_parse->index_id,
@ -519,7 +594,7 @@ gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
GST_FORMAT_TIME, MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr), 0);
}
if (mpeg_parse->current_scr > prev_scr) {
if ((mpeg_parse->current_scr > prev_scr) && (diff < mpeg_parse->max_discont)) {
mpeg_parse->avg_bitrate_time +=
MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr - prev_scr);
mpeg_parse->avg_bitrate_bytes += mpeg_parse->bytes_since_scr;
@ -539,12 +614,6 @@ gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
mpeg_parse->bytes_since_scr / 1024.0);
}
if (mpeg_parse->avg_bitrate_bytes > MP_MAX_VALID_BSS) {
mpeg_parse->avg_bitrate_bytes = 0;
mpeg_parse->avg_bitrate_time = 0;
}
mpeg_parse->bytes_since_scr = 0;
if (mpeg_parse->avg_bitrate_bytes) {
GST_DEBUG ("stream avg is %1.3fMbs, calculated over %1.3fkB",
(float) (mpeg_parse->avg_bitrate_bytes) * 8 * GST_SECOND
@ -552,27 +621,68 @@ gst_mpeg_parse_parse_packhead (GstMPEGParse * mpeg_parse, GstBuffer * buffer)
mpeg_parse->avg_bitrate_bytes / 1024.0);
}
if (mpeg_parse->avg_bitrate_bytes > MP_MAX_VALID_BSS) {
mpeg_parse->avg_bitrate_bytes = 0;
mpeg_parse->avg_bitrate_time = 0;
}
mpeg_parse->bytes_since_scr = 0;
return TRUE;
}
static void
gst_mpeg_parse_loop (GstElement * element)
static gboolean
gst_mpeg_parse_event (GstPad * pad, GstEvent * event)
{
GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (element);
GstData *data;
GstFlowReturn ret = GST_FLOW_OK;
GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (gst_pad_get_parent (pad));
GstClockTime time;
time = MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
if (CLASS (mpeg_parse)->handle_discont)
ret = CLASS (mpeg_parse)->handle_discont (mpeg_parse, event);
break;
default:
if (CLASS (mpeg_parse)->process_event)
ret = CLASS (mpeg_parse)->process_event (mpeg_parse, event, time);
else
gst_event_unref (event);
break;
}
gst_object_unref (mpeg_parse);
return ret == GST_FLOW_OK;
}
static GstFlowReturn
gst_mpeg_parse_chain (GstPad * pad, GstBuffer * buffer)
{
GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (gst_pad_get_parent (pad));
GstFlowReturn result = GST_FLOW_ERROR;
guint id;
gboolean mpeg2;
GstClockTime time;
guint64 size;
data = gst_mpeg_packetize_read (mpeg_parse->packetize);
if (!data)
return;
if (!gst_mpeg_packetize_put (mpeg_parse->packetize, buffer)) {
gst_buffer_unref (buffer);
goto done;
}
id = GST_MPEG_PACKETIZE_ID (mpeg_parse->packetize);
mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize);
while (1) {
result = gst_mpeg_packetize_read (mpeg_parse->packetize, &buffer);
if (result == GST_FLOW_RESEND) {
// there was not enough data in packetizer cache
result = GST_FLOW_OK;
goto done;
}
if (result != GST_FLOW_OK)
goto done;
if (GST_IS_BUFFER (data)) {
GstBuffer *buffer = GST_BUFFER (data);
id = GST_MPEG_PACKETIZE_ID (mpeg_parse->packetize);
mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize);
GST_LOG_OBJECT (mpeg_parse, "have chunk 0x%02X", id);
@ -604,78 +714,73 @@ gst_mpeg_parse_loop (GstElement * element)
}
}
}
}
time = MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr);
if (GST_IS_EVENT (data)) {
GstEvent *event = GST_EVENT (data);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_DISCONTINUOUS:
if (CLASS (mpeg_parse)->handle_discont)
CLASS (mpeg_parse)->handle_discont (mpeg_parse, event);
return;
default:
break;
}
if (CLASS (mpeg_parse)->send_data)
CLASS (mpeg_parse)->send_data (mpeg_parse, data, time);
else
gst_event_unref (event);
} else {
guint64 size;
time = MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr);
/* we're not sending data as long as no new SCR was found */
if (mpeg_parse->discont_pending) {
if (!mpeg_parse->scr_pending) {
#if 0
if (mpeg_parse->clock && mpeg_parse->sync) {
gst_element_set_time (GST_ELEMENT (mpeg_parse),
MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr));
}
#endif
if (CLASS (mpeg_parse)->send_discont) {
CLASS (mpeg_parse)->send_discont (mpeg_parse,
result = CLASS (mpeg_parse)->send_discont (mpeg_parse,
MPEGTIME_TO_GSTTIME (mpeg_parse->current_scr +
mpeg_parse->adjust));
if (result != GST_FLOW_OK) {
gst_buffer_unref (buffer);
goto done;
}
}
mpeg_parse->discont_pending = FALSE;
} else {
GST_DEBUG ("waiting for SCR");
gst_buffer_unref (GST_BUFFER (data));
return;
gst_buffer_unref (buffer);
result = GST_FLOW_OK;
goto done;
}
}
size = GST_BUFFER_SIZE (data);
size = GST_BUFFER_SIZE (buffer);
mpeg_parse->bytes_since_scr += size;
if (!GST_PAD_CAPS (mpeg_parse->sinkpad)) {
gboolean mpeg2 = GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize);
if (gst_pad_try_set_caps (mpeg_parse->sinkpad,
if (!gst_pad_set_caps (mpeg_parse->sinkpad,
gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, (mpeg2 ? 2 : 1),
"systemstream", G_TYPE_BOOLEAN, TRUE,
"parsed", G_TYPE_BOOLEAN, TRUE, NULL)) < 0) {
GST_ELEMENT_ERROR (mpeg_parse, CORE, NEGOTIATION, (NULL), (NULL));
return;
gst_buffer_unref (buffer);
result = GST_FLOW_ERROR;
goto done;
}
}
if (CLASS (mpeg_parse)->send_data)
CLASS (mpeg_parse)->send_data (mpeg_parse, data, time);
if (CLASS (mpeg_parse)->send_buffer)
result = CLASS (mpeg_parse)->send_buffer (mpeg_parse, buffer, time);
#if 0
if (mpeg_parse->clock && mpeg_parse->sync && !mpeg_parse->discont_pending) {
GST_DEBUG ("syncing mpegparse");
gst_element_wait (GST_ELEMENT (mpeg_parse), time);
}
#endif
if (mpeg_parse->current_scr != MP_INVALID_SCR) {
guint64 scr, bss, br;
scr = mpeg_parse->current_scr;
bss = mpeg_parse->bytes_since_scr;
br = mpeg_parse->mux_rate;
if (mpeg_parse->scr_rate != 0)
br = mpeg_parse->scr_rate;
else
br = mpeg_parse->mux_rate;
if (br) {
if (GST_MPEG_PACKETIZE_IS_MPEG2 (mpeg_parse->packetize)) {
@ -699,9 +804,19 @@ gst_mpeg_parse_loop (GstElement * element)
", total since SCR: %" G_GINT64_FORMAT ", br: %" G_GINT64_FORMAT
", next SCR: %" G_GINT64_FORMAT, size, bss, br, mpeg_parse->next_scr);
}
if (result != GST_FLOW_OK) {
gst_buffer_unref (buffer);
goto done;
}
}
done:
gst_object_unref (mpeg_parse);
return result;
}
#if 0
const GstFormat *
gst_mpeg_parse_get_src_formats (GstPad * pad)
{
@ -733,10 +848,15 @@ gst_mpeg_parse_get_rate (GstMPEGParse * mpeg_parse, gint64 * rate)
&&
gst_pad_query (GST_PAD_PEER (mpeg_parse->sinkpad),
GST_QUERY_TOTAL, &bytes_format, &total_bytes)
&& total_time != 0) {
*rate = GST_SECOND * total_bytes / total_time;
return TRUE;
&& total_time != 0 && total_bytes != 0) {
/* Use the funny calculation to avoid overflow of 64 bits */
*rate =
((total_bytes * GST_USECOND) / total_time) * (GST_SECOND / GST_USECOND);
if (*rate > 0)
return TRUE;
}
*rate = 0;
if ((mpeg_parse->first_scr != MP_INVALID_SCR) &&
(mpeg_parse->last_scr != MP_INVALID_SCR) &&
@ -746,31 +866,30 @@ gst_mpeg_parse_get_rate (GstMPEGParse * mpeg_parse, gint64 * rate)
GST_SECOND * (mpeg_parse->last_scr_pos -
mpeg_parse->first_scr_pos) / MPEGTIME_TO_GSTTIME (mpeg_parse->last_scr -
mpeg_parse->first_scr);
if (*rate != 0) {
/*
* check if we need to update scr_rate
*/
if ((mpeg_parse->scr_rate == 0) ||
(((double) (ABS (mpeg_parse->scr_rate -
*rate)) / mpeg_parse->scr_rate)
>= MP_SCR_RATE_HYST)) {
mpeg_parse->scr_rate = *rate;
return TRUE;
}
}
if (mpeg_parse->scr_rate != 0) {
*rate = mpeg_parse->scr_rate;
}
if (*rate == 0 && mpeg_parse->avg_bitrate_time != 0
&& mpeg_parse->avg_bitrate_bytes > MP_MIN_VALID_BSS) {
*rate =
GST_SECOND * mpeg_parse->avg_bitrate_bytes /
mpeg_parse->avg_bitrate_time;
}
if (*rate != 0) {
/*
* check if we need to update scr_rate
*/
if ((mpeg_parse->scr_rate == 0) ||
(((double) (ABS (mpeg_parse->scr_rate - *rate)) / mpeg_parse->scr_rate)
>= MP_SCR_RATE_HYST)) {
mpeg_parse->scr_rate = *rate;
return TRUE;
}
}
if (mpeg_parse->avg_bitrate_time != 0 && mpeg_parse->avg_bitrate_bytes != 0) {
*rate =
GST_SECOND * mpeg_parse->avg_bitrate_bytes /
mpeg_parse->avg_bitrate_time;
if (*rate != 0) {
return TRUE;
}
if (mpeg_parse->scr_rate != 0) {
*rate = mpeg_parse->scr_rate;
return TRUE;
}
if (mpeg_parse->mux_rate != 0) {
@ -829,6 +948,7 @@ gst_mpeg_parse_convert_src (GstPad * pad, GstFormat src_format,
return res;
}
#if 0
const GstQueryType *
gst_mpeg_parse_get_src_query_types (GstPad * pad)
{
@ -840,6 +960,7 @@ gst_mpeg_parse_get_src_query_types (GstPad * pad)
return types;
}
#endif
gboolean
gst_mpeg_parse_handle_src_query (GstPad * pad, GstQueryType type,
@ -867,7 +988,6 @@ gst_mpeg_parse_handle_src_query (GstPad * pad, GstQueryType type,
res = TRUE;
break;
}
/* Otherwise fallthrough */
default:
src_format = GST_FORMAT_BYTES;
@ -923,7 +1043,7 @@ gst_mpeg_parse_get_src_event_masks (GstPad * pad)
}
static gboolean
index_seek (GstPad * pad, GstEvent * event, guint64 * offset, gint64 * scr)
index_seek (GstPad * pad, GstEvent * event, gint64 * offset, gint64 * scr)
{
GstIndexEntry *entry;
GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (gst_pad_get_parent (pad));
@ -951,7 +1071,7 @@ index_seek (GstPad * pad, GstEvent * event, guint64 * offset, gint64 * scr)
}
static gboolean
normal_seek (GstPad * pad, GstEvent * event, guint64 * offset, gint64 * scr)
normal_seek (GstPad * pad, GstEvent * event, gint64 * offset, gint64 * scr)
{
gboolean res;
GstFormat format;
@ -973,18 +1093,24 @@ normal_seek (GstPad * pad, GstEvent * event, guint64 * offset, gint64 * scr)
return res;
}
#endif
gboolean
gst_mpeg_parse_handle_src_event (GstPad * pad, GstEvent * event)
{
gboolean res = FALSE;
#if 0
GstMPEGParse *mpeg_parse = GST_MPEG_PARSE (gst_pad_get_parent (pad));
#endif
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:
{
guint64 desired_offset;
guint64 expected_scr;
#if 0
/* FIXME: port the seeking to gstreamer 0.9 */
gint64 desired_offset;
gint64 expected_scr = 0;
/* first to to use the index if we have one */
if (mpeg_parse->index)
@ -996,9 +1122,10 @@ gst_mpeg_parse_handle_src_event (GstPad * pad, GstEvent * event)
if (!res)
break;
GST_DEBUG ("sending seek to %" G_GINT64_FORMAT " expected SCR: %"
G_GUINT64_FORMAT " (%" G_GUINT64_FORMAT ")", desired_offset,
expected_scr, MPEGTIME_TO_GSTTIME (expected_scr));
GST_DEBUG ("from pad %s: sending seek to %" G_GINT64_FORMAT
" expected SCR: %" G_GUINT64_FORMAT " (%" G_GUINT64_FORMAT ")",
gst_object_get_name (GST_OBJECT (pad)), desired_offset, expected_scr,
MPEGTIME_TO_GSTTIME (expected_scr));
if (gst_bytestream_seek (mpeg_parse->packetize->bs, desired_offset,
GST_SEEK_METHOD_SET)) {
@ -1006,9 +1133,11 @@ gst_mpeg_parse_handle_src_event (GstPad * pad, GstEvent * event)
mpeg_parse->scr_pending = TRUE;
mpeg_parse->next_scr = expected_scr;
mpeg_parse->current_scr = MP_INVALID_SCR;
mpeg_parse->current_ts = GST_CLOCK_TIME_NONE;
mpeg_parse->adjust = 0;
res = TRUE;
}
#endif
break;
}
default:
@ -1027,7 +1156,7 @@ gst_mpeg_parse_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_READY_TO_PAUSED:
if (!mpeg_parse->packetize) {
mpeg_parse->packetize =
gst_mpeg_packetize_new (mpeg_parse->sinkpad,
gst_mpeg_packetize_new (mpeg_parse->srcpad,
GST_MPEG_PACKETIZE_SYSTEM);
}
/* initialize parser state */
@ -1065,6 +1194,12 @@ gst_mpeg_parse_get_property (GObject * object, guint prop_id, GValue * value,
case ARG_DO_ADJUST:
g_value_set_boolean (value, mpeg_parse->do_adjust);
break;
case ARG_BYTE_OFFSET:
g_value_set_uint64 (value, mpeg_parse->byte_offset);
break;
case ARG_TIME_OFFSET:
g_value_set_uint64 (value, mpeg_parse->current_ts);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -1090,6 +1225,9 @@ gst_mpeg_parse_set_property (GObject * object, guint prop_id,
mpeg_parse->do_adjust = g_value_get_boolean (value);
mpeg_parse->adjust = 0;
break;
case ARG_BYTE_OFFSET:
mpeg_parse->byte_offset = g_value_get_uint64 (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -1120,21 +1258,6 @@ gst_mpeg_parse_get_index (GstElement * element)
return mpeg_parse->index;
}
static gboolean
gst_mpeg_parse_release_locks (GstElement * element)
{
GstMPEGParse *mpeg_parse;
mpeg_parse = GST_MPEG_PARSE (element);
if (mpeg_parse->id) {
/* FIXME */
//gst_clock_id_unlock (mpeg_parse->id);
}
return TRUE;
}
gboolean
gst_mpeg_parse_plugin_init (GstPlugin * plugin)
{

View file

@ -22,7 +22,6 @@
#define __MPEG_PARSE_H__
#include <gst/gst.h>
#include <gst/bytestream/bytestream.h>
#include "gstmpegpacketize.h"
G_BEGIN_DECLS
@ -80,6 +79,8 @@ G_BEGIN_DECLS
guint64 next_scr; /* Expected next SCR. */
guint64 bytes_since_scr; /* Bytes since current_scr */
GstClockTime current_ts; /* Current TS corresponding to SCR */
gboolean do_adjust; /* If false, send discont events on SCR
* jumps
*/
@ -98,6 +99,8 @@ G_BEGIN_DECLS
GstIndex *index;
gint index_id;
guint64 byte_offset;
};
struct _GstMPEGParseClass
@ -111,11 +114,16 @@ G_BEGIN_DECLS
gboolean (*parse_pes) (GstMPEGParse * parse, GstBuffer * buffer);
/* process events */
void (*handle_discont) (GstMPEGParse * parse, GstEvent * event);
GstFlowReturn (*handle_discont) (GstMPEGParse * parse, GstEvent * event);
/* optional method to send out the data */
void (*send_data) (GstMPEGParse * parse, GstData * data, GstClockTime time);
void (*send_discont) (GstMPEGParse * parse, GstClockTime time);
GstFlowReturn (*send_buffer) (GstMPEGParse * parse, GstBuffer * buffer, GstClockTime time);
GstFlowReturn (*process_event) (GstMPEGParse * parse, GstEvent * event, GstClockTime time);
GstFlowReturn (*send_discont) (GstMPEGParse * parse, GstClockTime time);
GstFlowReturn (*send_event) (GstMPEGParse * parse, GstEvent *event, GstClockTime time);
/* signals */
void (*reached_offset) (GstMPEGParse *mpeg_parse, GstClockTime timeval);
};
GType gst_mpeg_parse_get_type (void);
@ -126,7 +134,6 @@ G_BEGIN_DECLS
gboolean gst_mpeg_parse_convert_src (GstPad * pad, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
const GstEventMask *gst_mpeg_parse_get_src_event_masks (GstPad * pad);
gboolean gst_mpeg_parse_handle_src_event (GstPad * pad, GstEvent * event);
const GstQueryType *gst_mpeg_parse_get_src_query_types (GstPad * pad);

View file

@ -34,13 +34,8 @@ plugin_init (GstPlugin * plugin)
* stack again and the first _init will be called more than once
* and wtay wants to use dlclose at some point in the future */
if (!gst_library_load ("gstbytestream"))
return FALSE;
if (!gst_mpeg_parse_plugin_init (plugin) ||
!gst_mpeg_demux_plugin_init (plugin) ||
!gst_dvd_demux_plugin_init (plugin) ||
!gst_rfc2250_enc_plugin_init (plugin))
if (!gst_mpeg_parse_plugin_init (plugin) || !gst_mpeg_demux_plugin_init (plugin) || !gst_dvd_demux_plugin_init (plugin) /*||
!gst_rfc2250_enc_plugin_init (plugin) */ )
return FALSE;
return TRUE;

View file

@ -160,8 +160,6 @@ gst_rfc2250_enc_init (GstRFC2250Enc * rfc2250_enc)
/* zero counters (should be done at RUNNING?) */
rfc2250_enc->bit_rate = 0;
rfc2250_enc->MTU = 3048;
GST_OBJECT_FLAG_SET (rfc2250_enc, GST_ELEMENT_EVENT_AWARE);
}
static void
@ -328,6 +326,7 @@ gst_rfc2250_enc_get_property (GObject * object, guint prop_id, GValue * value,
{
GstRFC2250Enc *rfc2250_enc;
/* it's not null if we got it, but it might not be ours */
rfc2250_enc = GST_RFC2250_ENC (object);
switch (prop_id) {

View file

@ -23,7 +23,6 @@
#include <gst/gst.h>
#include <gst/bytestream/bytestream.h>
#include "gstmpegpacketize.h"
G_BEGIN_DECLS