Port mpeg1videoparse to 0.10 and give it rank SECONDARY-1, so that it's below existing decoders.

Original commit message from CVS:
* configure.ac:
* gst/mpeg1videoparse/Makefile.am:
* gst/mpeg1videoparse/gstmp1videoparse.c:
* gst/mpeg1videoparse/gstmp1videoparse.h:
* gst/mpeg1videoparse/mp1videoparse.vcproj:
* gst/mpegvideoparse/Makefile.am:
* gst/mpegvideoparse/mpegpacketiser.c: (mpeg_packetiser_init),
(mpeg_packetiser_free), (mpeg_packetiser_add_buf),
(mpeg_packetiser_flush), (mpeg_find_start_code),
(get_next_free_block), (complete_current_block),
(append_to_current_block), (start_new_block), (handle_packet),
(collect_packets), (mpeg_packetiser_handle_eos),
(mpeg_packetiser_get_block), (mpeg_packetiser_next_block):
* gst/mpegvideoparse/mpegpacketiser.h:
* gst/mpegvideoparse/mpegvideoparse.c: (mpegvideoparse_get_type),
(gst_mpegvideoparse_base_init), (gst_mpegvideoparse_class_init),
(mpv_parse_reset), (gst_mpegvideoparse_init),
(gst_mpegvideoparse_dispose), (set_par_from_dar),
(set_fps_from_code), (mpegvideoparse_parse_seq),
(gst_mpegvideoparse_time_code), (gst_mpegvideoparse_flush),
(mpegvideoparse_drain_avail), (gst_mpegvideoparse_chain),
(mpv_parse_sink_event), (gst_mpegvideoparse_change_state),
(plugin_init):
* gst/mpegvideoparse/mpegvideoparse.h:
* gst/mpegvideoparse/mpegvideoparse.vcproj:
Port mpeg1videoparse to 0.10 and give it rank SECONDARY-1, so
that it's below existing decoders.
Rename it to mpegvideoparse to reflect that it handles MPEG-1 and
MPEG-2 now.
Re-write the parsing code so that it collects packets differently
and timestamps Picture packets correctly.
Add a list of FIXME's at the top.
This commit is contained in:
Jan Schmidt 2007-03-15 20:48:08 +00:00
parent 03ab530205
commit 1f7755917a
11 changed files with 1321 additions and 680 deletions

View file

@ -1,3 +1,41 @@
2007-03-15 Jan Schmidt <thaytan@mad.scientist.com>
* configure.ac:
* gst/mpeg1videoparse/Makefile.am:
* gst/mpeg1videoparse/gstmp1videoparse.c:
* gst/mpeg1videoparse/gstmp1videoparse.h:
* gst/mpeg1videoparse/mp1videoparse.vcproj:
* gst/mpegvideoparse/Makefile.am:
* gst/mpegvideoparse/mpegpacketiser.c: (mpeg_packetiser_init),
(mpeg_packetiser_free), (mpeg_packetiser_add_buf),
(mpeg_packetiser_flush), (mpeg_find_start_code),
(get_next_free_block), (complete_current_block),
(append_to_current_block), (start_new_block), (handle_packet),
(collect_packets), (mpeg_packetiser_handle_eos),
(mpeg_packetiser_get_block), (mpeg_packetiser_next_block):
* gst/mpegvideoparse/mpegpacketiser.h:
* gst/mpegvideoparse/mpegvideoparse.c: (mpegvideoparse_get_type),
(gst_mpegvideoparse_base_init), (gst_mpegvideoparse_class_init),
(mpv_parse_reset), (gst_mpegvideoparse_init),
(gst_mpegvideoparse_dispose), (set_par_from_dar),
(set_fps_from_code), (mpegvideoparse_parse_seq),
(gst_mpegvideoparse_time_code), (gst_mpegvideoparse_flush),
(mpegvideoparse_drain_avail), (gst_mpegvideoparse_chain),
(mpv_parse_sink_event), (gst_mpegvideoparse_change_state),
(plugin_init):
* gst/mpegvideoparse/mpegvideoparse.h:
* gst/mpegvideoparse/mpegvideoparse.vcproj:
Port mpeg1videoparse to 0.10 and give it rank SECONDARY-1, so
that it's below existing decoders.
Rename it to mpegvideoparse to reflect that it handles MPEG-1 and
MPEG-2 now.
Re-write the parsing code so that it collects packets differently
and timestamps Picture packets correctly.
Add a list of FIXME's at the top.
2007-03-14 Stefan Kost <ensonic@users.sf.net>
* tests/icles/equalizer-test.c: (equalizer_set_band_value),

View file

@ -88,6 +88,7 @@ GST_PLUGINS_ALL="\
interleave \
librfb \
modplug \
mpegvideoparse \
multifile \
mve \
nsf \
@ -1061,6 +1062,7 @@ gst/interleave/Makefile
gst/librfb/Makefile
gst/modplug/Makefile
gst/modplug/libmodplug/Makefile
gst/mpegvideoparse/Makefile
gst/multifile/Makefile
gst/mve/Makefile
gst/nsf/Makefile

View file

@ -1,9 +0,0 @@
plugin_LTLIBRARIES = libgstmp1videoparse.la
libgstmp1videoparse_la_SOURCES = gstmp1videoparse.c
libgstmp1videoparse_la_CFLAGS = $(GST_CFLAGS)
libgstmp1videoparse_la_LIBADD =
libgstmp1videoparse_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
noinst_HEADERS = gstmp1videoparse.h

View file

@ -1,585 +0,0 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
/*#define GST_DEBUG_ENABLED */
#include "gstmp1videoparse.h"
/* Start codes. */
#define SEQ_START_CODE 0x000001b3
#define GOP_START_CODE 0x000001b8
#define PICTURE_START_CODE 0x00000100
#define SLICE_MIN_START_CODE 0x00000101
#define SLICE_MAX_START_CODE 0x000001af
#define EXT_START_CODE 0x000001b5
#define USER_START_CODE 0x000001b2
#define SEQUENCE_ERROR_CODE 0x000001b4
#define SEQ_END_CODE 0x000001b7
/* elementfactory information */
static const GstElementDetails mpeg1videoparse_details =
GST_ELEMENT_DETAILS ("MPEG-1 video parser",
"Codec/Parser/Video",
"Parses and frames MPEG 1 video streams, provides seek",
"Wim Taymans <wim.taymans@chello.be>");
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, "
"mpegversion = (int) 1, "
"systemstream = (boolean) false, "
"width = (int) [ 16, 4096 ], "
"height = (int) [ 16, 4096 ], "
"pixel_width = (int) [ 1, 255 ], "
"pixel_height = (int) [ 1, 255 ], " "framerate = (double) [ 0, MAX ]")
);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, "
"mpegversion = (int) 1, " "systemstream = (boolean) false")
);
/* Mp1VideoParse signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0
/* FILL ME */
};
static void gst_mp1videoparse_class_init (Mp1VideoParseClass * klass);
static void gst_mp1videoparse_base_init (Mp1VideoParseClass * klass);
static void gst_mp1videoparse_init (Mp1VideoParse * mp1videoparse);
static void gst_mp1videoparse_chain (GstPad * pad, GstData * _data);
static void gst_mp1videoparse_real_chain (Mp1VideoParse * mp1videoparse,
GstBuffer * buf, GstPad * outpad);
static void gst_mp1videoparse_flush (Mp1VideoParse * mp1videoparse);
static GstStateChangeReturn
gst_mp1videoparse_change_state (GstElement * element,
GstStateChange transition);
static GstElementClass *parent_class = NULL;
/*static guint gst_mp1videoparse_signals[LAST_SIGNAL] = { 0 }; */
GType
mp1videoparse_get_type (void)
{
static GType mp1videoparse_type = 0;
if (!mp1videoparse_type) {
static const GTypeInfo mp1videoparse_info = {
sizeof (Mp1VideoParseClass),
(GBaseInitFunc) gst_mp1videoparse_base_init,
NULL,
(GClassInitFunc) gst_mp1videoparse_class_init,
NULL,
NULL,
sizeof (Mp1VideoParse),
0,
(GInstanceInitFunc) gst_mp1videoparse_init,
};
mp1videoparse_type =
g_type_register_static (GST_TYPE_ELEMENT, "Mp1VideoParse",
&mp1videoparse_info, 0);
}
return mp1videoparse_type;
}
static void
gst_mp1videoparse_base_init (Mp1VideoParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
gst_element_class_set_details (element_class, &mpeg1videoparse_details);
}
static void
gst_mp1videoparse_class_init (Mp1VideoParseClass * klass)
{
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_peek_parent (klass);
gstelement_class->change_state = gst_mp1videoparse_change_state;
}
static void
gst_mp1videoparse_init (Mp1VideoParse * mp1videoparse)
{
mp1videoparse->sinkpad =
gst_pad_new_from_template (gst_static_pad_template_get (&sink_factory),
"sink");
gst_element_add_pad (GST_ELEMENT (mp1videoparse), mp1videoparse->sinkpad);
gst_pad_set_chain_function (mp1videoparse->sinkpad, gst_mp1videoparse_chain);
mp1videoparse->srcpad =
gst_pad_new_from_template (gst_static_pad_template_get (&src_factory),
"src");
gst_element_add_pad (GST_ELEMENT (mp1videoparse), mp1videoparse->srcpad);
gst_pad_use_explicit_caps (mp1videoparse->srcpad);
mp1videoparse->partialbuf = NULL;
mp1videoparse->need_resync = FALSE;
mp1videoparse->need_discont = TRUE;
mp1videoparse->last_pts = GST_CLOCK_TIME_NONE;
mp1videoparse->picture_in_buffer = 0;
mp1videoparse->width = mp1videoparse->height = -1;
mp1videoparse->fps = mp1videoparse->asr = 0.;
}
static void
mp1videoparse_parse_seq (Mp1VideoParse * mp1videoparse, GstBuffer * buf)
{
gint width, height, asr_idx, fps_idx;
gfloat asr_table[] = { 0., 1.,
0.6735, 0.7031, 0.7615, 0.8055, 0.8437,
0.8935, 0.9157, 0.9815, 1.0255, 1.0695,
1.0950, 1.1575, 1.2015
};
gfloat fps_table[] = { 0., 24. / 1.001, 24., 25.,
30. / 1.001, 30.,
50., 60. / 1.001, 60.
};
guint32 n = GST_READ_UINT32_BE (GST_BUFFER_DATA (buf));
width = (n & 0xfff00000) >> 20;
height = (n & 0x000fff00) >> 8;
asr_idx = (n & 0x000000f0) >> 4;
fps_idx = (n & 0x0000000f) >> 0;
if (fps_idx >= 9 || fps_idx <= 0)
fps_idx = 3; /* well, we need a default */
if (asr_idx >= 15 || asr_idx <= 0)
asr_idx = 1; /* no aspect ratio */
if (asr_table[asr_idx] != mp1videoparse->asr ||
fps_table[fps_idx] != mp1videoparse->fps ||
width != mp1videoparse->width || height != mp1videoparse->height) {
GstCaps *caps;
gint p_w, p_h;
mp1videoparse->asr = asr_table[asr_idx];
mp1videoparse->fps = fps_table[fps_idx];
mp1videoparse->width = width;
mp1videoparse->height = height;
p_w = (asr_table[asr_idx] < 1.0) ? (100 / asr_table[asr_idx]) : 1;
p_h = (asr_table[asr_idx] > 1.0) ? (100 * asr_table[asr_idx]) : 1;
caps = gst_caps_new_simple ("video/mpeg",
"systemstream", G_TYPE_BOOLEAN, FALSE,
"mpegversion", G_TYPE_INT, 1,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", G_TYPE_DOUBLE, fps_table[fps_idx],
"pixel_width", G_TYPE_INT, p_w, "pixel_height", G_TYPE_INT, p_h, NULL);
GST_DEBUG ("New mpeg1videoparse caps: " GST_PTR_FORMAT, caps);
gst_pad_set_explicit_caps (mp1videoparse->srcpad, caps);
}
}
static gboolean
mp1videoparse_valid_sync (Mp1VideoParse * mp1videoparse, guint32 head,
GstBuffer * buf)
{
switch (head) {
case SEQ_START_CODE:{
GstBuffer *subbuf = gst_buffer_create_sub (buf, 4,
GST_BUFFER_SIZE (buf) - 4);
mp1videoparse_parse_seq (mp1videoparse, subbuf);
gst_buffer_unref (subbuf);
return TRUE;
}
case GOP_START_CODE:
case PICTURE_START_CODE:
case USER_START_CODE:
case EXT_START_CODE:
return TRUE;
default:
if (head >= SLICE_MIN_START_CODE && head <= SLICE_MAX_START_CODE)
return TRUE;
}
return FALSE;
}
static gint
mp1videoparse_find_next_gop (Mp1VideoParse * mp1videoparse, GstBuffer * buf)
{
guchar *data = GST_BUFFER_DATA (buf);
gulong size = GST_BUFFER_SIZE (buf);
gulong offset = 0;
gint sync_zeros = 0;
gboolean have_sync = FALSE;
while (offset < size) {
guchar byte = *(data + offset);
offset++;
if (byte == 0) {
sync_zeros++;
} else if (byte == 1 && sync_zeros >= 2) {
sync_zeros = 0;
have_sync = TRUE;
} else if (have_sync) {
if (byte == (SEQ_START_CODE & 0xff) || byte == (GOP_START_CODE & 0xff)) {
return offset - 4;
} else {
sync_zeros = 0;
have_sync = FALSE;
}
} else {
sync_zeros = 0;
}
}
return -1;
}
static guint64
gst_mp1videoparse_time_code (guchar * gop, gfloat fps)
{
guint32 data = GST_READ_UINT32_BE (gop);
return ((((data & 0xfc000000) >> 26) * 3600 * GST_SECOND) + /* hours */
(((data & 0x03f00000) >> 20) * 60 * GST_SECOND) + /* minutes */
(((data & 0x0007e000) >> 13) * GST_SECOND) + /* seconds */
(((data & 0x00001f80) >> 7) * GST_SECOND / fps)); /* frames */
}
static void
gst_mp1videoparse_flush (Mp1VideoParse * mp1videoparse)
{
GST_DEBUG ("mp1videoparse: flushing");
if (mp1videoparse->partialbuf) {
gst_buffer_unref (mp1videoparse->partialbuf);
mp1videoparse->partialbuf = NULL;
}
mp1videoparse->need_resync = TRUE;
mp1videoparse->in_flush = TRUE;
mp1videoparse->picture_in_buffer = 0;
}
static void
gst_mp1videoparse_chain (GstPad * pad, GstData * _data)
{
GstBuffer *buf = GST_BUFFER (_data);
Mp1VideoParse *mp1videoparse;
g_return_if_fail (pad != NULL);
g_return_if_fail (GST_IS_PAD (pad));
g_return_if_fail (buf != NULL);
mp1videoparse = GST_MP1VIDEOPARSE (GST_OBJECT_PARENT (pad));
gst_mp1videoparse_real_chain (mp1videoparse, buf, mp1videoparse->srcpad);
}
static void
gst_mp1videoparse_real_chain (Mp1VideoParse * mp1videoparse, GstBuffer * buf,
GstPad * outpad)
{
guchar *data;
gulong size, offset = 0;
GstBuffer *outbuf;
gint sync_state;
gboolean have_sync;
guchar sync_byte;
guint32 head;
gint sync_pos;
guint64 time_stamp;
GstBuffer *temp;
time_stamp = GST_BUFFER_TIMESTAMP (buf);
if (GST_IS_EVENT (buf)) {
GstEvent *event = GST_EVENT (buf);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_DISCONTINUOUS:
mp1videoparse->need_discont = TRUE;
/* fall-through */
case GST_EVENT_FLUSH:
gst_mp1videoparse_flush (mp1videoparse);
break;
case GST_EVENT_EOS:
gst_mp1videoparse_flush (mp1videoparse);
gst_event_ref (event);
gst_pad_push (outpad, GST_DATA (event));
gst_element_set_eos (GST_ELEMENT (mp1videoparse));
break;
default:
GST_DEBUG ("Unhandled event type %d", GST_EVENT_TYPE (event));
break;
}
gst_event_unref (event);
return;
}
if (mp1videoparse->partialbuf) {
GstBuffer *merge;
offset = GST_BUFFER_SIZE (mp1videoparse->partialbuf);
merge = gst_buffer_merge (mp1videoparse->partialbuf, buf);
gst_buffer_unref (mp1videoparse->partialbuf);
gst_buffer_unref (buf);
mp1videoparse->partialbuf = merge;
} else {
mp1videoparse->partialbuf = buf;
offset = 0;
}
data = GST_BUFFER_DATA (mp1videoparse->partialbuf);
size = GST_BUFFER_SIZE (mp1videoparse->partialbuf);
GST_DEBUG ("mp1videoparse: received buffer of %ld bytes %" G_GINT64_FORMAT,
size, GST_BUFFER_TIMESTAMP (buf));
do {
data = GST_BUFFER_DATA (mp1videoparse->partialbuf);
size = GST_BUFFER_SIZE (mp1videoparse->partialbuf);
head = GST_READ_UINT32_BE (data);
GST_DEBUG ("mp1videoparse: head is %08x", (unsigned int) head);
if (!mp1videoparse_valid_sync (mp1videoparse, head,
mp1videoparse->partialbuf) || mp1videoparse->need_resync) {
sync_pos =
mp1videoparse_find_next_gop (mp1videoparse,
mp1videoparse->partialbuf);
if (sync_pos >= 0) {
mp1videoparse->need_resync = FALSE;
GST_DEBUG ("mp1videoparse: found new gop at %d", sync_pos);
if (sync_pos != 0) {
temp =
gst_buffer_create_sub (mp1videoparse->partialbuf, sync_pos,
size - sync_pos);
g_assert (temp != NULL);
gst_buffer_unref (mp1videoparse->partialbuf);
mp1videoparse->partialbuf = temp;
data = GST_BUFFER_DATA (mp1videoparse->partialbuf);
size = GST_BUFFER_SIZE (mp1videoparse->partialbuf);
offset = 0;
}
head = GST_READ_UINT32_BE (data);
/* re-call this function so that if we hadn't already, we can
* now read the sequence header and parse video properties,
* set caps, stream data, be happy, bla, bla, bla... */
if (!mp1videoparse_valid_sync (mp1videoparse, head,
mp1videoparse->partialbuf))
g_error ("Found sync but no valid sync point at pos 0x0");
} else {
GST_DEBUG ("mp1videoparse: could not sync");
gst_buffer_unref (mp1videoparse->partialbuf);
mp1videoparse->partialbuf = NULL;
return;
}
}
if (mp1videoparse->picture_in_buffer == 1 &&
time_stamp != GST_CLOCK_TIME_NONE) {
mp1videoparse->last_pts = time_stamp;
}
sync_state = 0;
have_sync = FALSE;
GST_DEBUG ("mp1videoparse: searching sync");
while (offset < size - 1) {
sync_byte = *(data + offset);
if (sync_byte == 0) {
sync_state++;
} else if ((sync_byte == 1) && (sync_state >= 2)) {
GST_DEBUG ("mp1videoparse: code 0x000001%02x", data[offset + 1]);
if (data[offset + 1] == (PICTURE_START_CODE & 0xff)) {
mp1videoparse->picture_in_buffer++;
if (mp1videoparse->picture_in_buffer == 1) {
if (time_stamp != GST_CLOCK_TIME_NONE) {
mp1videoparse->last_pts = time_stamp;
}
sync_state = 0;
} else if (mp1videoparse->picture_in_buffer == 2) {
have_sync = TRUE;
break;
} else {
GST_DEBUG ("mp1videoparse: %d in buffer",
mp1videoparse->picture_in_buffer);
g_assert_not_reached ();
}
}
/* A new sequence (or GOP) is a valid sync too. Note that the
* sequence header should be put in the next buffer, not here. */
else if (data[offset + 1] == (SEQ_START_CODE & 0xFF) ||
data[offset + 1] == (GOP_START_CODE & 0xFF)) {
if (mp1videoparse->picture_in_buffer == 0 &&
data[offset + 1] == (GOP_START_CODE & 0xFF)) {
mp1videoparse->last_pts = gst_mp1videoparse_time_code (&data[2],
mp1videoparse->fps);
} else if (mp1videoparse->picture_in_buffer == 1) {
have_sync = TRUE;
break;
} else {
g_assert (mp1videoparse->picture_in_buffer == 0);
}
}
/* end-of-sequence is a valid sync point and should be included
* in the current picture, not the next. */
else if (data[offset + 1] == (SEQ_END_CODE & 0xFF)) {
if (mp1videoparse->picture_in_buffer == 1) {
offset += 4;
have_sync = TRUE;
break;
} else {
g_assert (mp1videoparse->picture_in_buffer == 0);
}
} else
sync_state = 0;
}
/* something else... */
else
sync_state = 0;
/* go down the buffer */
offset++;
}
if (have_sync) {
offset -= 2;
GST_DEBUG ("mp1videoparse: synced");
outbuf = gst_buffer_create_sub (mp1videoparse->partialbuf, 0, offset);
g_assert (outbuf != NULL);
GST_BUFFER_TIMESTAMP (outbuf) = mp1videoparse->last_pts;
GST_BUFFER_DURATION (outbuf) = GST_SECOND / mp1videoparse->fps;
mp1videoparse->last_pts += GST_BUFFER_DURATION (outbuf);
if (mp1videoparse->in_flush) {
/* FIXME, send a flush event here */
mp1videoparse->in_flush = FALSE;
}
if (GST_PAD_CAPS (outpad) != NULL) {
if (mp1videoparse->need_discont &&
GST_BUFFER_TIMESTAMP_IS_VALID (outbuf)) {
GstEvent *event = gst_event_new_discontinuous (FALSE,
GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (outbuf),
GST_FORMAT_UNDEFINED);
GST_DEBUG ("prepending discont event");
gst_pad_push (outpad, GST_DATA (event));
mp1videoparse->need_discont = FALSE;
}
GST_DEBUG ("mp1videoparse: pushing %d bytes %" G_GUINT64_FORMAT,
GST_BUFFER_SIZE (outbuf), GST_BUFFER_TIMESTAMP (outbuf));
gst_pad_push (outpad, GST_DATA (outbuf));
GST_DEBUG ("mp1videoparse: pushing done");
} else {
GST_DEBUG ("No capsnego yet, delaying buffer push");
gst_buffer_unref (outbuf);
}
mp1videoparse->picture_in_buffer = 0;
if (size != offset) {
temp =
gst_buffer_create_sub (mp1videoparse->partialbuf, offset,
size - offset);
} else {
temp = NULL;
}
gst_buffer_unref (mp1videoparse->partialbuf);
mp1videoparse->partialbuf = temp;
offset = 0;
} else {
if (time_stamp != GST_CLOCK_TIME_NONE)
mp1videoparse->last_pts = time_stamp;
return;
}
} while (mp1videoparse->partialbuf != NULL);
}
static GstStateChangeReturn
gst_mp1videoparse_change_state (GstElement * element, GstStateChange transition)
{
Mp1VideoParse *mp1videoparse;
g_return_val_if_fail (GST_IS_MP1VIDEOPARSE (element),
GST_STATE_CHANGE_FAILURE);
mp1videoparse = GST_MP1VIDEOPARSE (element);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_mp1videoparse_flush (mp1videoparse);
mp1videoparse->need_discont = TRUE;
mp1videoparse->width = mp1videoparse->height = -1;
mp1videoparse->fps = mp1videoparse->asr = 0.;
break;
default:
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
return GST_STATE_CHANGE_SUCCESS;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
return gst_element_register (plugin, "mpeg1videoparse",
GST_RANK_NONE, GST_TYPE_MP1VIDEOPARSE);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"mpeg1videoparse",
"MPEG-1 video parser",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -1,76 +0,0 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __MP1VIDEOPARSE_H__
#define __MP1VIDEOPARSE_H__
#include <gst/gst.h>
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
#define GST_TYPE_MP1VIDEOPARSE \
(mp1videoparse_get_type())
#define GST_MP1VIDEOPARSE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MP1VIDEOPARSE,Mp1VideoParse))
#define GST_MP1VIDEOPARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MP1VIDEOPARSE,Mp1VideoParseClass))
#define GST_IS_MP1VIDEOPARSE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MP1VIDEOPARSE))
#define GST_IS_MP1VIDEOPARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MP1VIDEOPARSE))
typedef struct _Mp1VideoParse Mp1VideoParse;
typedef struct _Mp1VideoParseClass Mp1VideoParseClass;
struct _Mp1VideoParse {
GstElement element;
GstPad *sinkpad,*srcpad;
GstBuffer *partialbuf; /* previous buffer (if carryover) */
gulong next_buffer_offset;
gboolean need_resync;
gboolean in_flush;
gboolean need_discont;
guint64 last_pts;
gint picture_in_buffer;
gint width, height;
gfloat fps, asr;
};
struct _Mp1VideoParseClass {
GstElementClass parent_class;
};
GType gst_mp1videoparse_get_type(void);
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* __MP1VIDEOPARSE_H__ */

View file

@ -0,0 +1,9 @@
plugin_LTLIBRARIES = libgstmpegvideoparse.la
libgstmpegvideoparse_la_SOURCES = mpegvideoparse.c mpegpacketiser.c
libgstmpegvideoparse_la_CFLAGS = $(GST_CFLAGS)
libgstmpegvideoparse_la_LIBADD = $(GST_BASE_LIBS) $(GST_LIBS)
libgstmpegvideoparse_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
noinst_HEADERS = mpegvideoparse.h mpegpacketiser.h

View file

@ -0,0 +1,438 @@
/* GStreamer
* Copyright (C) <2007> Jan Schmidt <thaytan@mad.scientist.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
/* The purpose of the packetiser is to parse the incoming buffers into 'blocks'
* that consist of the stream split at certain packet boundaries. It splits into
* a new block at the start of a GOP, Picture or Sequence packet.
* A GOP or sequence header always starts a new block. A Picture
* header starts a new block only if the previous packet was not a GOP -
* otherwise it is accumulated with the GOP */
#include "mpegpacketiser.h"
static void collect_packets (MPEGPacketiser * p, GstBuffer * buf);
void
mpeg_packetiser_init (MPEGPacketiser * p)
{
p->adapter = gst_adapter_new ();
p->n_blocks = 0;
p->blocks = NULL;
mpeg_packetiser_flush (p);
}
void
mpeg_packetiser_free (MPEGPacketiser * p)
{
gst_object_unref (p->adapter);
g_free (p->blocks);
}
void
mpeg_packetiser_add_buf (MPEGPacketiser * p, GstBuffer * buf)
{
/* Add the buffer to our pool */
gst_adapter_push (p->adapter, buf);
/* Store the timestamp to apply to the next picture that gets collected */
if (p->cur_buf_ts != GST_CLOCK_TIME_NONE) {
p->prev_buf_ts = p->cur_buf_ts;
}
p->cur_buf_ts = GST_BUFFER_TIMESTAMP (buf);
/* read what new packets we have in this buffer */
collect_packets (p, buf);
p->tracked_offset += GST_BUFFER_SIZE (buf);
}
void
mpeg_packetiser_flush (MPEGPacketiser * p)
{
gst_adapter_clear (p->adapter);
p->adapter_offset = 0;
p->sync_word = 0xffffffff;
p->tracked_offset = 0;
p->prev_sync_packet = MPEG_PACKET_NONE;
/* Reset our block info */
p->cur_block_idx = -1;
p->first_block_idx = -1;
/* Clear any pending timestamps */
p->prev_buf_ts = GST_CLOCK_TIME_NONE;
p->cur_buf_ts = GST_CLOCK_TIME_NONE;
}
guint8 *
mpeg_find_start_code (guint32 * sync_word, guint8 * cur, guint8 * end)
{
guint32 code = *sync_word;
if (G_UNLIKELY (cur == NULL))
return NULL;
while (cur < end) {
code <<= 8;
if (code == 0x00000100) {
/* Reset the sync word accumulator */
*sync_word = 0xffffffff;
return cur;
}
/* Add the next available byte to the collected sync word */
code |= *cur++;
}
*sync_word = code;
return NULL;
}
/* When we need to reallocate the blocks array, grow it by this much */
#define BLOCKS_INCREMENT 5
/* Get the index of the next unfilled block in the buffer. May need to grow
* the array first */
gint
get_next_free_block (MPEGPacketiser * p)
{
gint next;
gboolean grow_array = FALSE;
/* Get a free block from the blocks array. May need to grow
* the array first */
if (p->n_blocks == 0) {
grow_array = TRUE;
next = 0;
} else {
if (G_UNLIKELY (p->cur_block_idx == -1)) {
next = 0;
} else {
next = p->cur_block_idx;
if (((next + 1) % p->n_blocks) == p->first_block_idx)
grow_array = TRUE;
}
}
if (grow_array) {
gint old_n_blocks = p->n_blocks;
p->n_blocks += BLOCKS_INCREMENT;
p->blocks = g_realloc (p->blocks, sizeof (MPEGBlockInfo) * p->n_blocks);
/* Now we may need to move some data up to the end of the array, if the
* cur_block_idx is before the first_block_idx in the array. */
if (p->cur_block_idx < p->first_block_idx) {
#if 0
g_print ("Moving %d blocks from idx %d to idx %d of %d\n",
old_n_blocks - p->first_block_idx,
p->first_block_idx, p->first_block_idx + BLOCKS_INCREMENT,
p->n_blocks);
#endif
memmove (p->blocks + p->first_block_idx + BLOCKS_INCREMENT,
p->blocks + p->first_block_idx,
sizeof (MPEGBlockInfo) * (old_n_blocks - p->first_block_idx));
p->first_block_idx += BLOCKS_INCREMENT;
}
}
return next;
}
/* Mark the current block as complete */
static void
complete_current_block (MPEGPacketiser * p, guint64 offset)
{
MPEGBlockInfo *block;
if (G_UNLIKELY (p->cur_block_idx == -1))
return; /* No block is in progress */
/* If we're pointing at the first_block_idx, then we're about to re-complete
* a previously completed buffer. Not allowed, because the array should have
* been previously expanded to cope via a get_next_free_block call. */
g_assert (p->cur_block_idx != p->first_block_idx);
/* Get the appropriate entry from the blocks array */
g_assert (p->blocks != NULL && p->cur_block_idx < p->n_blocks);
block = p->blocks + p->cur_block_idx;
/* Extend the block length to the current offset */
g_assert (block->offset < offset);
block->length = offset - block->offset;
#if 0
g_print ("Completed block of type 0x%02x @ offset %" G_GUINT64_FORMAT
" with size %u\n", block->first_pack_type, block->offset, block->length);
#endif
/* If this is the first complete block, set first_block_idx to be this block */
if (p->first_block_idx == -1)
p->first_block_idx = p->cur_block_idx;
/* Update the statistics regarding the packet we're handling */
if (block->flags & MPEG_BLOCK_FLAG_PICTURE)
p->n_pictures++;
/* And advance the cur_block_idx ptr to the next slot */
p->cur_block_idx = (p->cur_block_idx + 1) % p->n_blocks;
}
/* Accumulate the packet up to 'offset' into the current block
* (discard if no block is in progress). Update the block info
* to indicate what is in it. */
static void
append_to_current_block (MPEGPacketiser * p, guint64 offset, guint8 pack_type)
{
MPEGBlockInfo *block;
if (G_UNLIKELY (p->cur_block_idx == -1))
return; /* No block in progress, drop this packet */
/* Get the appropriate entry from the blocks array */
g_assert (p->blocks != NULL && p->cur_block_idx < p->n_blocks);
block = p->blocks + p->cur_block_idx;
/* Extend the block length to the current offset */
g_assert (block->offset < offset);
block->length = offset - block->offset;
/* Update flags */
switch (pack_type) {
case MPEG_PACKET_SEQUENCE:
g_assert (!(block->flags &
(MPEG_BLOCK_FLAG_GOP | MPEG_BLOCK_FLAG_PICTURE)));
block->flags |= MPEG_BLOCK_FLAG_SEQUENCE;
break;
case MPEG_PACKET_GOP:
block->flags |= MPEG_BLOCK_FLAG_GOP;
break;
case MPEG_PACKET_PICTURE:
block->flags |= MPEG_BLOCK_FLAG_PICTURE;
break;
default:
break;
}
}
static void
start_new_block (MPEGPacketiser * p, guint64 offset, guint8 pack_type)
{
gint block_idx;
MPEGBlockInfo *block;
/* First, append data up to the start of this block to the current one, but
* not including this packet info */
complete_current_block (p, offset);
block_idx = get_next_free_block (p);
/* FIXME: Retrieve the appropriate entry from the blocks array */
/* Get the appropriate entry from the blocks array */
g_assert (p->blocks != NULL && block_idx < p->n_blocks);
block = p->blocks + block_idx;
/* Init the block */
block->first_pack_type = pack_type;
block->offset = offset;
block->ts = GST_CLOCK_TIME_NONE;
/* Initially, the length is 0. It grows as we encounter new sync headers */
block->length = 0;
switch (pack_type) {
case MPEG_PACKET_SEQUENCE:
block->flags = MPEG_BLOCK_FLAG_SEQUENCE;
break;
case MPEG_PACKET_GOP:
block->flags = MPEG_BLOCK_FLAG_GOP;
break;
case MPEG_PACKET_PICTURE:
block->flags = MPEG_BLOCK_FLAG_PICTURE;
break;
default:
/* We don't start blocks with other packet types */
g_assert_not_reached ();
}
/* Make this our current block */
p->cur_block_idx = block_idx;
#if 0
g_print ("Started new block in slot %d with first pack 0x%02x @ offset %"
G_GUINT64_FORMAT "\n", block_idx, block->first_pack_type, block->offset);
#endif
}
static void
handle_packet (MPEGPacketiser * p, guint64 offset, guint8 pack_type)
{
switch (pack_type) {
case MPEG_PACKET_SEQUENCE:
case MPEG_PACKET_GOP:
/* Start a new block */
start_new_block (p, offset, pack_type);
p->prev_sync_packet = pack_type;
break;
case MPEG_PACKET_PICTURE:{
MPEGBlockInfo *block;
GstClockTime ts;
/* Start a new block unless the previous sync packet was a GOP */
if (p->prev_sync_packet != MPEG_PACKET_GOP) {
start_new_block (p, offset, pack_type);
} else {
append_to_current_block (p, offset, pack_type);
}
p->prev_sync_packet = pack_type;
/* We have a picture packet, apply any pending timestamp. The logic here
* is that the timestamp on any incoming buffer needs to apply to the next
* picture packet where the _first_byte_ of the sync word starts after the
* packet boundary. We track the ts from the current buffer and a
* previous buffer in order to handle this correctly. It would still be
* possible to get it wrong if there was a PES packet smaller than 3 bytes
* but anyone that does that can suck it. */
if (offset >= p->tracked_offset) {
/* sync word started within this buffer - take the cur ts */
ts = p->cur_buf_ts;
p->cur_buf_ts = GST_CLOCK_TIME_NONE;
p->prev_buf_ts = GST_CLOCK_TIME_NONE;
} else {
/* sync word started in a previous buffer - take the old ts */
ts = p->prev_buf_ts;
p->prev_buf_ts = GST_CLOCK_TIME_NONE;
}
/* If we didn't drop the packet, set the timestamp on it */
if (G_LIKELY (p->cur_block_idx != -1)) {
block = p->blocks + p->cur_block_idx;
block->ts = ts;
#if 0
g_print ("Picture @ offset %" G_GINT64_FORMAT " has ts %"
GST_TIME_FORMAT "\n", block->offset, GST_TIME_ARGS (block->ts));
#endif
}
break;
}
default:
append_to_current_block (p, offset, pack_type);
break;
}
}
static void
collect_packets (MPEGPacketiser * p, GstBuffer * buf)
{
guint8 *cur;
guint8 *end = GST_BUFFER_DATA (buf) + GST_BUFFER_SIZE (buf);
cur = mpeg_find_start_code (&(p->sync_word), GST_BUFFER_DATA (buf), end);
while (cur != NULL) {
/* Calculate the offset as tracked since the last flush. Note that cur
* points to the last byte of the sync word, so we adjust by -3 to get the
* first byte */
guint64 offset = p->tracked_offset + (cur - GST_BUFFER_DATA (buf) - 3);
handle_packet (p, offset, *cur);
cur = mpeg_find_start_code (&(p->sync_word), cur, end);
}
}
void
mpeg_packetiser_handle_eos (MPEGPacketiser * p)
{
/* Append any remaining data to the current block */
if (p->tracked_offset > 0) {
complete_current_block (p, p->tracked_offset);
}
}
/* Returns a pointer to the block info for the completed block at the
* head of the queue, and extracts the bytes from the adapter if requested.
* Caller should move to the next block by calling mpeg_packetiser_next_block
* afterward.
*/
MPEGBlockInfo *
mpeg_packetiser_get_block (MPEGPacketiser * p, GstBuffer ** buf)
{
MPEGBlockInfo *block;
if (buf)
*buf = NULL;
if (G_UNLIKELY (p->first_block_idx == -1)) {
return NULL; /* No complete blocks to discard */
}
/* p->first_block_idx can't get set != -1 unless some block storage got
* allocated */
g_assert (p->blocks != NULL && p->n_blocks != 0);
block = p->blocks + p->first_block_idx;
/* Can only get the buffer out once, so we'll return NULL on later attempts */
if (buf != NULL && block->length > 0 && p->adapter_offset <= block->offset) {
/* Kick excess data out of the adapter */
if (p->adapter_offset < block->offset) {
guint64 to_flush = block->offset - p->adapter_offset;
g_assert (gst_adapter_available (p->adapter) >= to_flush);
gst_adapter_flush (p->adapter, to_flush);
p->adapter_offset += to_flush;
}
g_assert (gst_adapter_available (p->adapter) >= block->length);
*buf = gst_adapter_take_buffer (p->adapter, block->length);
p->adapter_offset += block->length;
GST_BUFFER_TIMESTAMP (*buf) = block->ts;
}
return block;
}
/* Advance the first_block pointer to discard a completed block
* from the queue */
void
mpeg_packetiser_next_block (MPEGPacketiser * p)
{
gint next;
MPEGBlockInfo *block;
block = mpeg_packetiser_get_block (p, NULL);
if (G_UNLIKELY (block == NULL))
return; /* No complete blocks to discard */
/* Update the statistics regarding the block we're discarding */
if (block->flags & MPEG_BLOCK_FLAG_PICTURE)
p->n_pictures--;
next = (p->first_block_idx + 1) % p->n_blocks;
if (next == p->cur_block_idx)
p->first_block_idx = -1; /* Discarding the last block */
else
p->first_block_idx = next;
}

View file

@ -0,0 +1,116 @@
/* GStreamer
* Copyright (C) <2007> Jan Schmidt <thaytan@mad.scientist.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __MPEGPACKETISER_H__
#define __MPEGPACKETISER_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
typedef struct MPEGPacketiser MPEGPacketiser;
typedef struct MPEGBlockInfo MPEGBlockInfo;
/* Packet ID codes for different packet types we
* care about */
#define MPEG_PACKET_PICTURE 0x00
#define MPEG_PACKET_SLICE_MIN 0x01
#define MPEG_PACKET_SLICE_MAX 0xaf
#define MPEG_PACKET_SEQUENCE 0xb3
#define MPEG_PACKET_EXTENSION 0xb5
#define MPEG_PACKET_SEQUENCE_END 0xb7
#define MPEG_PACKET_GOP 0xb8
#define MPEG_PACKET_NONE 0xff
/* Extension codes we care about */
#define MPEG_PACKET_EXT_SEQUENCE 0x01
#define MPEG_PACKET_EXT_SEQUENCE_DISPLAY 0x02
#define MPEG_PACKET_EXT_QUANT_MATRIX 0x03
/* Flags indicating what type of packets are in this block, some are mutually
* exclusive though - ie, sequence packs are accumulated separately. GOP &
* Picture may occur together or separately */
#define MPEG_BLOCK_FLAG_SEQUENCE 0x01
#define MPEG_BLOCK_FLAG_PICTURE 0x02
#define MPEG_BLOCK_FLAG_GOP 0x04
struct MPEGBlockInfo {
guint8 first_pack_type;
guint8 flags;
guint64 offset;
guint32 length;
GstClockTime ts;
};
struct MPEGPacketiser {
GstAdapter *adapter;
/* position in the adapter */
guint64 adapter_offset;
/* Sync word accumulator */
guint32 sync_word;
/* Offset since the last flush (unrelated to incoming buffer offsets) */
guint64 tracked_offset;
/* Number of picture packets currently collected */
guint n_pictures;
/* 2 sets of timestamps + offsets used to mark picture blocks
* The first is used when a sync word overlaps packet boundaries
* and comes from some buffer in the past. The next one comes from current
* buffer. These are only ever valid when handling streams from a demuxer,
* of course. */
GstClockTime prev_buf_ts;
GstClockTime cur_buf_ts;
/* MPEG id of the previous SEQUENCE, PICTURE or GOP packet.
MPEG_PACKET_NONE after a flush */
guint8 prev_sync_packet;
/* Indices into the blocks array. cur_block_idx is where we're writing and
indicates the end of the populated block entries.
first_block_idx is the read ptr. It may be -1 to indicate there are no
complete blocks available */
gint cur_block_idx;
gint first_block_idx;
/* An array of MPEGBlockInfo entries, used as a growable circular buffer
* indexed by cur_block_idx and bounded by last_block_idx */
gint n_blocks;
MPEGBlockInfo *blocks;
};
guint8 *mpeg_find_start_code (guint32 *sync_word, guint8 *cur, guint8 *end);
void mpeg_packetiser_init (MPEGPacketiser *p);
void mpeg_packetiser_free (MPEGPacketiser *p);
void mpeg_packetiser_add_buf (MPEGPacketiser *p, GstBuffer *buf);
void mpeg_packetiser_handle_eos (MPEGPacketiser *p);
void mpeg_packetiser_flush (MPEGPacketiser *p);
/* Get the blockinfo and buffer for the block at the head of the queue */
MPEGBlockInfo *mpeg_packetiser_get_block (MPEGPacketiser *p, GstBuffer **buf);
/* Advance to the next data block */
void mpeg_packetiser_next_block (MPEGPacketiser *p);
#endif

View file

@ -0,0 +1,629 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2007> Jan Schmidt <thaytan@mad.scientist.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include "mpegvideoparse.h"
/* FIXME: there are still some things to do in this element.
* + Handle Sequence Display Extension to output the display size
* rather than the encoded size.
* + Collect a list of regions and the sequence headers that apply
* to each region so that we properly handle SEQUENCE_END followed
* by a new sequence.
* + At least detect when the sequence changes and error out instead.
* + Do all the other stuff (documentation, tests) to get it into
* ugly or good.
* + low priority:
* - handle seeking in raw elementary streams
* - calculate timestamps for all un-timestamped frames, taking into
* account frame re-ordering. Doing this probably requires introducing
* an extra end-to-end delay, however so might not be really desirable.
*/
GST_DEBUG_CATEGORY_STATIC (mpv_parse_debug);
#define GST_CAT_DEFAULT mpv_parse_debug
/* elementfactory information */
static GstElementDetails mpegvideoparse_details =
GST_ELEMENT_DETAILS ("MPEG video elementary stream parser",
"Codec/Parser/Video",
"Parses and frames MPEG-1 and MPEG-2 elementary video streams",
"Wim Taymans <wim.taymans@chello.be>\n"
"Jan Schmidt <thaytan@mad.scientist.com>");
static GstStaticPadTemplate src_template =
GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, "
"mpegversion = (int) [ 1, 2 ], "
"parsed = (boolean) true, "
"systemstream = (boolean) false, "
"width = (int) [ 16, 4096 ], "
"height = (int) [ 16, 4096 ], "
"pixel-aspect-ratio = (fraction) [ 0/1, MAX ], "
"framerate = (fraction) [ 0/1, MAX ]")
);
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/mpeg, "
"mpegversion = (int) [ 1, 2 ], "
"parsed = (boolean) false, " "systemstream = (boolean) false")
);
/* MpegVideoParse signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0
/* FILL ME */
};
static void gst_mpegvideoparse_class_init (MpegVideoParseClass * klass);
static void gst_mpegvideoparse_base_init (MpegVideoParseClass * klass);
static void gst_mpegvideoparse_init (MpegVideoParse * mpegvideoparse);
static void gst_mpegvideoparse_dispose (MpegVideoParse * mpegvideoparse);
static GstFlowReturn gst_mpegvideoparse_chain (GstPad * pad, GstBuffer * buf);
static gboolean mpv_parse_sink_event (GstPad * pad, GstEvent * event);
static void gst_mpegvideoparse_flush (MpegVideoParse * mpegvideoparse);
static GstStateChangeReturn
gst_mpegvideoparse_change_state (GstElement * element,
GstStateChange transition);
static GstElementClass *parent_class = NULL;
/*static guint gst_mpegvideoparse_signals[LAST_SIGNAL] = { 0 }; */
GType
mpegvideoparse_get_type (void)
{
static GType mpegvideoparse_type = 0;
if (!mpegvideoparse_type) {
static const GTypeInfo mpegvideoparse_info = {
sizeof (MpegVideoParseClass),
(GBaseInitFunc) gst_mpegvideoparse_base_init,
NULL,
(GClassInitFunc) gst_mpegvideoparse_class_init,
NULL,
NULL,
sizeof (MpegVideoParse),
0,
(GInstanceInitFunc) gst_mpegvideoparse_init,
};
mpegvideoparse_type =
g_type_register_static (GST_TYPE_ELEMENT, "MpegVideoParse",
&mpegvideoparse_info, 0);
}
return mpegvideoparse_type;
}
static void
gst_mpegvideoparse_base_init (MpegVideoParseClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_set_details (element_class, &mpegvideoparse_details);
}
static void
gst_mpegvideoparse_class_init (MpegVideoParseClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
gobject_class = G_OBJECT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
gobject_class->dispose = (GObjectFinalizeFunc) (gst_mpegvideoparse_dispose);
gstelement_class->change_state = gst_mpegvideoparse_change_state;
}
static void
mpv_parse_reset (MpegVideoParse * mpegvideoparse)
{
mpegvideoparse->seq_hdr.mpeg_version = 0;
mpegvideoparse->seq_hdr.width = mpegvideoparse->seq_hdr.height = -1;
mpegvideoparse->seq_hdr.fps_n = mpegvideoparse->seq_hdr.par_w = 0;
mpegvideoparse->seq_hdr.fps_d = mpegvideoparse->seq_hdr.par_h = 1;
}
static void
gst_mpegvideoparse_init (MpegVideoParse * mpegvideoparse)
{
mpegvideoparse->sinkpad =
gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_chain_function (mpegvideoparse->sinkpad,
gst_mpegvideoparse_chain);
gst_pad_set_event_function (mpegvideoparse->sinkpad, mpv_parse_sink_event);
gst_element_add_pad (GST_ELEMENT (mpegvideoparse), mpegvideoparse->sinkpad);
mpegvideoparse->srcpad =
gst_pad_new_from_static_template (&src_template, "src");
gst_pad_use_fixed_caps (mpegvideoparse->srcpad);
gst_element_add_pad (GST_ELEMENT (mpegvideoparse), mpegvideoparse->srcpad);
mpeg_packetiser_init (&mpegvideoparse->packer);
mpv_parse_reset (mpegvideoparse);
}
void
gst_mpegvideoparse_dispose (MpegVideoParse * mpegvideoparse)
{
mpeg_packetiser_free (&mpegvideoparse->packer);
gst_buffer_replace (&mpegvideoparse->seq_hdr_buf, NULL);
}
/* Set the Pixel Aspect Ratio in our hdr from a DAR code in the data */
static void
set_par_from_dar (MPEGSeqHdr * hdr, guint8 asr_code)
{
/* Pixel_width = DAR_width * display_vertical_size */
/* Pixel_height = DAR_height * display_horizontal_size */
switch (asr_code) {
case 0x02: /* 3:4 DAR = 4:3 pixels */
hdr->par_w = 4 * hdr->height;
hdr->par_h = 3 * hdr->width;
break;
case 0x03: /* 9:16 DAR */
hdr->par_w = 16 * hdr->height;
hdr->par_h = 9 * hdr->width;
break;
case 0x04: /* 1:2.21 DAR */
hdr->par_w = 221 * hdr->height;
hdr->par_h = 100 * hdr->width;
break;
case 0x01: /* Square pixels */
default:
hdr->par_w = hdr->par_h = 1;
break;
}
}
static void
set_fps_from_code (MPEGSeqHdr * hdr, guint8 fps_code)
{
const gint framerates[][2] = {
{30, 1}, {24000, 1001}, {24, 1}, {25, 1},
{30000, 1001}, {30, 1}, {50, 1}, {60000, 1001},
{60, 1}, {30, 1}
};
if (fps_code < 10) {
hdr->fps_n = framerates[fps_code][0];
hdr->fps_d = framerates[fps_code][1];
} else {
/* Force a valid framerate */
hdr->fps_n = 30;
hdr->fps_d = 1;
}
}
static void
mpegvideoparse_parse_seq (MpegVideoParse * mpegvideoparse, GstBuffer * buf)
{
MPEGSeqHdr new_hdr;
guint32 code;
guint8 dar_idx, fps_idx;
gint seq_data_length;
guint32 sync_word = 0xffffffff;
guint8 *cur, *end;
gboolean constrained_flag;
gboolean load_intra_flag;
gboolean load_non_intra_flag;
cur = GST_BUFFER_DATA (buf);
end = GST_BUFFER_DATA (buf) + GST_BUFFER_SIZE (buf);
if (GST_BUFFER_SIZE (buf) < 12)
return; /* Too small to be a sequence header */
seq_data_length = 12; /* minimum length. */
/* Skip the sync word */
cur += 4;
/* Parse the MPEG 1 bits */
new_hdr.mpeg_version = 1;
code = GST_READ_UINT32_BE (cur);
new_hdr.width = (code >> 20) & 0xfff;
new_hdr.height = (code >> 8) & 0xfff;
dar_idx = (code >> 4) & 0xf;
set_par_from_dar (&new_hdr, dar_idx);
fps_idx = code & 0xf;
set_fps_from_code (&new_hdr, fps_idx);
constrained_flag = (cur[7] >> 2) & 0x01;
load_intra_flag = (cur[7] >> 1) & 0x01;
if (load_intra_flag) {
seq_data_length += 64; /* 8 rows of 8 bytes of intra matrix */
if (GST_BUFFER_SIZE (buf) < seq_data_length)
return;
cur += 64;
}
load_non_intra_flag = cur[7] & 0x01;
if (load_non_intra_flag) {
seq_data_length += 64; /* 8 rows of 8 bytes of non-intra matrix */
if (GST_BUFFER_SIZE (buf) < seq_data_length)
return;
cur += 64;
}
/* Skip the rest of the MPEG-1 header */
cur += 8;
/* Read MPEG-2 sequence extensions */
cur = mpeg_find_start_code (&sync_word, cur, end);
while (cur != NULL) {
/* Cur points at the last byte of the start code */
if (cur[0] == MPEG_PACKET_EXTENSION) {
guint8 ext_code;
if ((end - cur - 1) < 1)
return; /* short extension packet extension */
ext_code = cur[1] >> 4;
if (ext_code == MPEG_PACKET_EXT_SEQUENCE) {
/* Parse a Sequence Extension */
guint8 horiz_size_ext, vert_size_ext;
guint8 fps_n_ext, fps_d_ext;
if ((end - cur - 1) < 7)
/* need at least 10 bytes, minus 3 for the start code 000001 */
return;
horiz_size_ext = ((cur[2] << 1) & 0x02) | ((cur[3] >> 7) & 0x01);
vert_size_ext = (cur[3] >> 5) & 0x03;
fps_n_ext = (cur[6] >> 5) & 0x03;
fps_d_ext = cur[6] & 0x1f;
new_hdr.fps_n *= (fps_n_ext + 1);
new_hdr.fps_d *= (fps_d_ext + 1);
new_hdr.width += (horiz_size_ext << 12);
new_hdr.height += (vert_size_ext << 12);
}
new_hdr.mpeg_version = 2;
}
cur = mpeg_find_start_code (&sync_word, cur, end);
}
if (new_hdr.par_w != mpegvideoparse->seq_hdr.par_w ||
new_hdr.par_h != mpegvideoparse->seq_hdr.par_h ||
new_hdr.fps_n != mpegvideoparse->seq_hdr.fps_n ||
new_hdr.fps_d != mpegvideoparse->seq_hdr.fps_d ||
new_hdr.width != mpegvideoparse->seq_hdr.width ||
new_hdr.height != mpegvideoparse->seq_hdr.height ||
new_hdr.mpeg_version != mpegvideoparse->seq_hdr.mpeg_version) {
GstCaps *caps;
GstBuffer *seq_buf;
/* Store the entire sequence header + sequence header extension
for output as codec_data */
seq_buf = gst_buffer_copy (buf);
gst_buffer_replace (&mpegvideoparse->seq_hdr_buf, seq_buf);
gst_buffer_unref (seq_buf);
/* And update the new_hdr into our stored version */
memcpy (&mpegvideoparse->seq_hdr, &new_hdr, sizeof (MPEGSeqHdr));
caps = gst_caps_new_simple ("video/mpeg",
"systemstream", G_TYPE_BOOLEAN, FALSE,
"parsed", G_TYPE_BOOLEAN, TRUE,
"mpegversion", G_TYPE_INT, new_hdr.mpeg_version,
"width", G_TYPE_INT, new_hdr.width,
"height", G_TYPE_INT, new_hdr.height,
"framerate", GST_TYPE_FRACTION, new_hdr.fps_n, new_hdr.fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, new_hdr.par_w, new_hdr.par_h,
"codec_data", GST_TYPE_BUFFER, seq_buf, NULL);
GST_DEBUG ("New mpegvideoparse caps: %" GST_PTR_FORMAT, caps);
gst_pad_set_caps (mpegvideoparse->srcpad, caps);
}
}
#if 0
static guint64
gst_mpegvideoparse_time_code (guchar * gop, MPEGSeqHdr * seq_hdr)
{
guint32 data = GST_READ_UINT32_BE (gop);
guint64 seconds;
guint8 frames;
seconds = ((data & 0xfc000000) >> 26) * 3600; /* hours */
seconds += ((data & 0x03f00000) >> 20) * 60; /* minutes */
seconds += (data & 0x0007e000) >> 13; /* seconds */
frames = (data & 0x00001f80) >> 7;
return seconds * GST_SECOND + gst_util_uint64_scale_int (frames * GST_SECOND,
seq_hdr->fps_d, seq_hdr->fps_n);
}
#endif
static void
gst_mpegvideoparse_flush (MpegVideoParse * mpegvideoparse)
{
GST_DEBUG_OBJECT (mpegvideoparse, "mpegvideoparse: flushing");
mpegvideoparse->next_offset = GST_BUFFER_OFFSET_NONE;
}
static GstFlowReturn
mpegvideoparse_drain_avail (MpegVideoParse * mpegvideoparse)
{
MPEGBlockInfo *cur;
GstBuffer *buf;
GstFlowReturn res = GST_FLOW_OK;
cur = mpeg_packetiser_get_block (&mpegvideoparse->packer, &buf);
while (cur != NULL) {
/* Handle the block */
GST_LOG_OBJECT (mpegvideoparse,
"Have block of size %u with pack_type 0x%02x and flags 0x%02x\n",
cur->length, cur->first_pack_type, cur->flags);
/* Don't start pushing out buffers until we've seen a sequence header */
if (mpegvideoparse->seq_hdr.mpeg_version == 0) {
if ((cur->flags & MPEG_BLOCK_FLAG_SEQUENCE) == 0) {
if (buf) {
GST_DEBUG_OBJECT (mpegvideoparse,
"No sequence header yet. Dropping buffer of %u bytes",
GST_BUFFER_SIZE (buf));
gst_buffer_unref (buf);
buf = NULL;
}
} else {
/* Found a sequence header */
mpegvideoparse_parse_seq (mpegvideoparse, buf);
}
}
if (buf != NULL) {
GST_DEBUG_OBJECT (mpegvideoparse,
"mpegvideoparse: pushing buffer of %u bytes with ts %"
GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
gst_buffer_set_caps (buf, GST_PAD_CAPS (mpegvideoparse->srcpad));
if (mpegvideoparse->need_discont) {
GST_DEBUG_OBJECT (mpegvideoparse,
"setting discont flag on outgoing buffer");
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DISCONT);
mpegvideoparse->need_discont = FALSE;
}
res = gst_pad_push (mpegvideoparse->srcpad, buf);
if (res != GST_FLOW_OK)
break;
}
/* Advance to the next data block */
mpeg_packetiser_next_block (&mpegvideoparse->packer);
cur = mpeg_packetiser_get_block (&mpegvideoparse->packer, &buf);
};
return res;
}
static GstFlowReturn
gst_mpegvideoparse_chain (GstPad * pad, GstBuffer * buf)
{
MpegVideoParse *mpegvideoparse;
GstFlowReturn res;
gboolean have_discont;
gint64 next_offset = GST_BUFFER_OFFSET_NONE;
g_return_val_if_fail (pad != NULL, GST_FLOW_ERROR);
g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
mpegvideoparse =
GST_MPEGVIDEOPARSE (gst_object_get_parent (GST_OBJECT (pad)));
GST_DEBUG_OBJECT (mpegvideoparse,
"mpegvideoparse: received buffer of %u bytes with ts %"
GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)));
/* If we have an offset, and the incoming offset doesn't match,
or we have a discont, handle it first by flushing out data
we have collected. */
have_discont = GST_BUFFER_IS_DISCONT (buf);
if (mpegvideoparse->next_offset != GST_BUFFER_OFFSET_NONE) {
if (GST_BUFFER_OFFSET_IS_VALID (buf)) {
if (mpegvideoparse->next_offset != GST_BUFFER_OFFSET (buf))
have_discont = TRUE;
next_offset = GST_BUFFER_OFFSET (buf) + GST_BUFFER_SIZE (buf);
} else {
next_offset = mpegvideoparse->next_offset + GST_BUFFER_SIZE (buf);
}
}
if (have_discont) {
GST_DEBUG_OBJECT (mpegvideoparse, "Have discont packet, draining data");
mpegvideoparse->need_discont = TRUE;
mpeg_packetiser_handle_eos (&mpegvideoparse->packer);
res = mpegvideoparse_drain_avail (mpegvideoparse);
mpeg_packetiser_flush (&mpegvideoparse->packer);
if (res != GST_FLOW_OK) {
mpegvideoparse->next_offset = next_offset;
gst_buffer_unref (buf);
return res;
}
}
/* Takes ownership of the data */
mpeg_packetiser_add_buf (&mpegvideoparse->packer, buf);
/* And push out what we can */
res = mpegvideoparse_drain_avail (mpegvideoparse);
/* Update our offset */
mpegvideoparse->next_offset = next_offset;
gst_object_unref (mpegvideoparse);
return res;
}
static gboolean
mpv_parse_sink_event (GstPad * pad, GstEvent * event)
{
gboolean res = TRUE;
MpegVideoParse *mpegvideoparse =
GST_MPEGVIDEOPARSE (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
{
gdouble rate, applied_rate;
GstFormat format;
gint64 start, stop, pos;
gboolean update;
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
&format, &start, &stop, &pos);
if (format == GST_FORMAT_BYTES) {
/* FIXME: Later, we might use a seek table to seek on elementary stream
files, and that would allow byte-to-time conversions. It's not a high
priority - most mpeg video is muxed and then the demuxer handles
seeking. In the meantime, here's some commented out logic copied
from mp3parse */
#if 0
GstClockTime seg_start, seg_stop, seg_pos;
/* stop time is allowed to be open-ended, but not start & pos */
if (!mp3parse_bytepos_to_time (mp3parse, stop, &seg_stop))
seg_stop = GST_CLOCK_TIME_NONE;
if (mp3parse_bytepos_to_time (mp3parse, start, &seg_start) &&
mp3parse_bytepos_to_time (mp3parse, pos, &seg_pos)) {
gst_event_unref (event);
event = gst_event_new_new_segment_full (update, rate, applied_rate,
GST_FORMAT_TIME, seg_start, seg_stop, seg_pos);
format = GST_FORMAT_TIME;
GST_DEBUG_OBJECT (mp3parse, "Converted incoming segment to TIME. "
"start = %" G_GINT64_FORMAT ", stop = %" G_GINT64_FORMAT
"pos = %" G_GINT64_FORMAT, seg_start, seg_stop, seg_pos);
}
#endif
}
if (format != GST_FORMAT_TIME) {
/* Unknown incoming segment format. Output a default open-ended
* TIME segment */
gst_event_unref (event);
event = gst_event_new_new_segment_full (update, rate, applied_rate,
GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
}
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
&format, &start, &stop, &pos);
GST_DEBUG_OBJECT (mpegvideoparse,
"Pushing newseg rate %g, applied rate %g, "
"format %d, start %lld, stop %lld, pos %lld\n",
rate, applied_rate, format, start, stop, pos);
res = gst_pad_event_default (pad, event);
break;
}
case GST_EVENT_FLUSH_STOP:
gst_mpegvideoparse_flush (mpegvideoparse);
res = gst_pad_event_default (pad, event);
break;
case GST_EVENT_EOS:
/* Push any remaining buffers out, then flush. */
mpeg_packetiser_handle_eos (&mpegvideoparse->packer);
mpegvideoparse_drain_avail (mpegvideoparse);
gst_mpegvideoparse_flush (mpegvideoparse);
res = gst_pad_event_default (pad, event);
break;
default:
res = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (mpegvideoparse);
return res;
}
static GstStateChangeReturn
gst_mpegvideoparse_change_state (GstElement * element,
GstStateChange transition)
{
MpegVideoParse *mpegvideoparse;
GstStateChangeReturn ret;
g_return_val_if_fail (GST_IS_MPEGVIDEOPARSE (element),
GST_STATE_CHANGE_FAILURE);
mpegvideoparse = GST_MPEGVIDEOPARSE (element);
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
mpv_parse_reset (mpegvideoparse);
break;
default:
break;
}
return ret;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (mpv_parse_debug, "mpegvideoparse", 0,
"MPEG Video Parser");
return gst_element_register (plugin, "mpegvideoparse",
GST_RANK_SECONDARY - 1, GST_TYPE_MPEGVIDEOPARSE);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"mpegvideoparse",
"MPEG-1 and MPEG-2 video parser",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -0,0 +1,79 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2007> Jan Schmidt <thaytan@mad.scientist.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __MPEGVIDEOPARSE_H__
#define __MPEGVIDEOPARSE_H__
#include <gst/gst.h>
#include "mpegpacketiser.h"
G_BEGIN_DECLS
#define GST_TYPE_MPEGVIDEOPARSE \
(mpegvideoparse_get_type())
#define GST_MPEGVIDEOPARSE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MPEGVIDEOPARSE,MpegVideoParse))
#define GST_MPEGVIDEOPARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MPEGVIDEOPARSE,MpegVideoParseClass))
#define GST_IS_MPEGVIDEOPARSE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MPEGVIDEOPARSE))
#define GST_IS_MPEGVIDEOPARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MPEGVIDEOPARSE))
typedef struct _MpegVideoParse MpegVideoParse;
typedef struct _MpegVideoParseClass MpegVideoParseClass;
typedef struct MPEGSeqHdr
{
/* 0 for unknown, else 1 or 2 */
guint8 mpeg_version;
/* Pixel-Aspect Ratio from DAR code via set_par_from_dar */
gint par_w, par_h;
/* Width and Height of the video */
gint width, height;
/* Framerate */
gint fps_n, fps_d;
} MPEGSeqHdr;
struct _MpegVideoParse {
GstElement element;
GstPad *sinkpad, *srcpad;
gint64 next_offset;
gboolean need_discont;
/* Info from the Sequence Header */
MPEGSeqHdr seq_hdr;
GstBuffer *seq_hdr_buf;
/* Packetise helper */
MPEGPacketiser packer;
};
struct _MpegVideoParseClass {
GstElementClass parent_class;
};
GType gst_mpegvideoparse_get_type(void);
G_END_DECLS
#endif /* __MPEGVIDEOPARSE_H__ */

View file

@ -2,7 +2,7 @@
<VisualStudioProject
ProjectType="Visual C++"
Version="7.10"
Name="mp1videoparse"
Name="mpegvideoparse"
ProjectGUID="{979C216F-0ACF-4956-AE00-055A42D678E6}"
Keyword="Win32Proj">
<Platforms>
@ -20,7 +20,7 @@
Name="VCCLCompilerTool"
Optimization="0"
AdditionalIncludeDirectories="../../../gstreamer/win32;../../../gstreamer;../../../gstreamer/libs;../../../glib;../../../glib/glib;../../../glib/gmodule;&quot;../../gst-libs&quot;;../../../popt/include;../../../libxml2/include/libxml2"
PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;mp1videoparse_EXPORTS;HAVE_CONFIG_H;_USE_MATH_DEFINES"
PreprocessorDefinitions="WIN32;_DEBUG;_WINDOWS;_USRDLL;mpegvideoparse_EXPORTS;HAVE_CONFIG_H;_USE_MATH_DEFINES"
MinimalRebuild="TRUE"
BasicRuntimeChecks="3"
RuntimeLibrary="3"
@ -33,15 +33,15 @@
<Tool
Name="VCLinkerTool"
AdditionalDependencies="glib-2.0.lib gmodule-2.0.lib gthread-2.0.lib gobject-2.0.lib libgstreamer.lib gstbytestream.lib iconv.lib intl.lib"
OutputFile="$(OutDir)/gstmp1videoparse.dll"
OutputFile="$(OutDir)/mpegvideoparse.dll"
LinkIncremental="2"
AdditionalLibraryDirectories="../../../gstreamer/win32/Debug;../../../glib/glib;../../../glib/gmodule;../../../glib/gthread;../../../glib/gobject;../../../gettext/lib;../../../libiconv/lib"
ModuleDefinitionFile=""
GenerateDebugInformation="TRUE"
ProgramDatabaseFile="$(OutDir)/mp1videoparse.pdb"
ProgramDatabaseFile="$(OutDir)/mpegvideoparse.pdb"
SubSystem="2"
OptimizeReferences="2"
ImportLibrary="$(OutDir)/gstmp1videoparse.lib"
ImportLibrary="$(OutDir)/mpegvideoparse.lib"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
@ -74,7 +74,7 @@
<Tool
Name="VCCLCompilerTool"
AdditionalIncludeDirectories="../../../gstreamer/win32;../../../gstreamer;../../../gstreamer/libs;../../../glib;../../../glib/glib;../../../glib/gmodule;&quot;../../gst-libs&quot;;../../../popt/include;../../../libxml2/include/libxml2"
PreprocessorDefinitions="WIN32;NDEBUG;GST_DISABLE_GST_DEBUG;_WINDOWS;_USRDLL;mp1videoparse_EXPORTS;HAVE_CONFIG_H;_USE_MATH_DEFINES"
PreprocessorDefinitions="WIN32;NDEBUG;GST_DISABLE_GST_DEBUG;_WINDOWS;_USRDLL;mpegvideoparse_EXPORTS;HAVE_CONFIG_H;_USE_MATH_DEFINES"
RuntimeLibrary="2"
UsePrecompiledHeader="0"
WarningLevel="3"
@ -85,7 +85,7 @@
<Tool
Name="VCLinkerTool"
AdditionalDependencies="glib-2.0.lib gmodule-2.0.lib gthread-2.0.lib gobject-2.0.lib libgstreamer.lib gstbytestream.lib iconv.lib intl.lib"
OutputFile="$(OutDir)/gstmp1videoparse.dll"
OutputFile="$(OutDir)/mpegvideoparse.dll"
LinkIncremental="1"
AdditionalLibraryDirectories="../../../gstreamer/win32/Release;../../../glib/glib;../../../glib/gmodule;../../../glib/gthread;../../../glib/gobject;../../../gettext/lib;../../../libiconv/lib"
ModuleDefinitionFile=""
@ -93,7 +93,7 @@
SubSystem="2"
OptimizeReferences="2"
EnableCOMDATFolding="2"
ImportLibrary="$(OutDir)/gstmp1videoparse.lib"
ImportLibrary="$(OutDir)/mpegvideoparse.lib"
TargetMachine="1"/>
<Tool
Name="VCMIDLTool"/>
@ -126,7 +126,7 @@
Filter="cpp;c;cxx;def;odl;idl;hpj;bat;asm;asmx"
UniqueIdentifier="{4FC737F1-C7A5-4376-A066-2A32D752A2FF}">
<File
RelativePath=".\gstmp1videoparse.c">
RelativePath=".\mpegvideoparse.c">
</File>
</Filter>
<Filter
@ -134,7 +134,7 @@
Filter="h;hpp;hxx;hm;inl;inc;xsd"
UniqueIdentifier="{93995380-89BD-4b04-88EB-625FBE52EBFB}">
<File
RelativePath=".\gstmp1videoparse.h">
RelativePath=".\mpegvideoparse.h">
</File>
</Filter>
<Filter