Merge remote-tracking branch 'origin/master' into 0.11-premerge

Conflicts:
	docs/libs/Makefile.am
	ext/kate/gstkatetiger.c
	ext/opus/gstopusdec.c
	ext/xvid/gstxvidenc.c
	gst-libs/gst/basecamerabinsrc/Makefile.am
	gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c
	gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h
	gst-libs/gst/video/gstbasevideocodec.c
	gst-libs/gst/video/gstbasevideocodec.h
	gst-libs/gst/video/gstbasevideodecoder.c
	gst-libs/gst/video/gstbasevideoencoder.c
	gst/asfmux/gstasfmux.c
	gst/audiovisualizers/gstwavescope.c
	gst/camerabin2/gstcamerabin2.c
	gst/debugutils/gstcompare.c
	gst/frei0r/gstfrei0rmixer.c
	gst/mpegpsmux/mpegpsmux.c
	gst/mpegtsmux/mpegtsmux.c
	gst/mxf/mxfmux.c
	gst/videomeasure/gstvideomeasure_ssim.c
	gst/videoparsers/gsth264parse.c
	gst/videoparsers/gstmpeg4videoparse.c
This commit is contained in:
Edward Hervey 2011-12-30 11:41:17 +01:00
commit f70a623418
111 changed files with 6838 additions and 2439 deletions

View file

@ -52,6 +52,7 @@ CRUFT_FILES = \
$(top_builddir)/gst/audioparsers/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/flacparse/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/imagefreeze/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/mpeg4videoparse/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/qtmux/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/selector/.libs/*.{so,dll,DLL,dylib} \
$(top_builddir)/gst/shapewipe/.libs/*.{so,dll,DLL,dylib} \
@ -79,6 +80,7 @@ CRUFT_DIRS = \
$(top_srcdir)/gst/flacparse \
$(top_srcdir)/gst/imagefreeze \
$(top_srcdir)/gst/invtelecine \
$(top_srcdir)/gst/mpeg4videoparse \
$(top_srcdir)/gst/qtmux \
$(top_srcdir)/gst/selector \
$(top_srcdir)/gst/shapewipe \

View file

@ -308,7 +308,7 @@ GST_PLUGINS_NONPORTED=" adpcmdec adpcmenc aiff asfmux \
decklink fbdev linsys shm vcd \
voaacenc apexsink bz2 cdaudio celt cog curl dc1394 dirac directfb dts resindvd \
gsettings gsm jp2k ladspa modplug mpeg2enc mplex mimic \
musepack musicbrainz nas neon ofa openal rsvg schro sdl smooth sndfile soundtouch spandsp timidity \
musepack musicbrainz nas neon ofa openal opencv rsvg schro sdl smooth sndfile soundtouch spandsp timidity \
wildmidi xvid apple_media "
AC_SUBST(GST_PLUGINS_NONPORTED)
@ -354,7 +354,6 @@ AG_GST_CHECK_PLUGIN(mpegdemux)
AG_GST_CHECK_PLUGIN(mpegtsdemux)
AG_GST_CHECK_PLUGIN(mpegtsmux)
AG_GST_CHECK_PLUGIN(mpegpsmux)
AG_GST_CHECK_PLUGIN(mpeg4videoparse)
AG_GST_CHECK_PLUGIN(mpegvideoparse)
AG_GST_CHECK_PLUGIN(mve)
AG_GST_CHECK_PLUGIN(mxf)
@ -1382,7 +1381,7 @@ AG_GST_CHECK_FEATURE(OPENCV, [opencv plugins], opencv, [
dnl a new version and the no-backward-compatibility define. (There doesn't
dnl seem to be a switch to suppress the warnings the cvcompat.h header
dnl causes.)
PKG_CHECK_MODULES(OPENCV, opencv >= 2.0.0 opencv <= 2.2.0 , [
PKG_CHECK_MODULES(OPENCV, opencv >= 2.0.0 opencv <= 2.3.1 , [
AC_PROG_CXX
AC_LANG_CPLUSPLUS
OLD_CPPFLAGS=$CPPFLAGS
@ -1612,6 +1611,9 @@ AG_GST_CHECK_FEATURE(XVID, [xvid plugins], xvid, [
#if XVID_API_MAJOR(XVID_API) != 4
#error "Incompatible XviD API version"
#endif
#if XVID_API_MAJOR(XVID_API) == 4 && XVID_API_MINOR(XVID_API) < 3
#error "Incompatible XviD API version"
#endif
],[ AC_MSG_RESULT(yes)
XVID_LIBS="-lxvidcore $LIBM"
AC_SUBST(XVID_LIBS)
@ -1965,7 +1967,6 @@ gst/mpegtsdemux/Makefile
gst/mpegtsmux/Makefile
gst/mpegtsmux/tsmux/Makefile
gst/mpegpsmux/Makefile
gst/mpeg4videoparse/Makefile
gst/mpegvideoparse/Makefile
gst/mve/Makefile
gst/mxf/Makefile

View file

@ -49,22 +49,21 @@ extra_files =
# CFLAGS and LDFLAGS for compiling scan program. Only needed if your app/lib
# contains GtkObjects/GObjects and you want to document signals and properties.
GTKDOC_CFLAGS = -DGST_USE_UNSTABLE_API $(GST_PLUGINS_BAD_CFLAGS) $(GST_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
GTKDOC_CFLAGS = -DGST_USE_UNSTABLE_API $(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS)
GTKDOC_LIBS = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/signalprocessor/libgstsignalprocessor-@GST_MAJORMINOR@.la \
$(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_MAJORMINOR@.la \
$(GST_BASE_LIBS) $(GST_BAD_LIBS)
$(GST_BASE_LIBS)
GTKDOC_CC=$(LIBTOOL) --tag=CC --mode=compile $(CC)
GTKDOC_LD=$(LIBTOOL) --tag=CC --mode=link $(CC)
# If you need to override some of the declarations, place them in this file
# and uncomment this line.
#DOC_OVERRIDES = $(DOC_MODULE)-overrides.txt
DOC_OVERRIDES =
DOC_OVERRIDES = $(DOC_MODULE)-overrides.txt
include $(top_srcdir)/common/gtk-doc.mak

View file

@ -30,6 +30,7 @@
</para>
<xi:include href="xml/gsth264parser.xml" />
<xi:include href="xml/gstmpegvideoparser.xml" />
<xi:include href="xml/gstmpeg4parser.xml" />
<xi:include href="xml/gstvc1parser.xml" />
</chapter>

View file

@ -117,6 +117,41 @@ gst_mpeg_video_parse_quant_matrix_extension
<SUBSECTION Private>
</SECTION>
<SECTION>
<FILE>gstmpeg4parser</FILE>
<TITLE>mpeg4parser</TITLE>
<INCLUDE>gst/codecparsers/gstmpeg4parser.h</INCLUDE>
GstMpeg4StartCode
GstMpeg4VisualObjectType
GstMpeg4AspectRatioInfo
GstMpeg4ParseResult
GstMpeg4VideoObjectCodingType
GstMpeg4ChromaFormat
GstMpeg4VideoObjectLayerShape
GstMpeg4SpriteEnable
GstMpeg4Profile
GstMpeg4Level
GstMpeg4VisualObjectSequence
GstMpeg4VisualObject
GstMpeg4VideoSignalType
GstMpeg4VideoPlaneShortHdr
GstMpeg4VideoObjectLayer
GstMpeg4SpriteTrajectory
GstMpeg4GroupOfVOP
GstMpeg4VideoObjectPlane
GstMpeg4Packet
GstMpeg4VideoPacketHdr
gst_mpeg4_parse
gst_mpeg4_parse_video_object_plane
gst_mpeg4_parse_group_of_vop
gst_mpeg4_parse_video_object_layer
gst_mpeg4_parse_visual_object
gst_mpeg4_parse_visual_object_sequence
gst_mpeg4_parse_video_packet_header
<SUBSECTION Standard>
<SUBSECTION Private>
</SECTION>
<SECTION>
<FILE>gstphotography</FILE>
GST_PHOTOGRAPHY_AUTOFOCUS_DONE

View file

@ -349,9 +349,9 @@ SWFDEC_DIR=
endif
if USE_TELETEXTDEC
TELETEXT_DIR=teletextdec
TELETEXTDEC_DIR=teletextdec
else
TELETEXT_DIR=
TELETEXTDEC_DIR=
endif
if USE_VP8

View file

@ -2,9 +2,10 @@ plugin_LTLIBRARIES = libgstdtsdec.la
libgstdtsdec_la_SOURCES = gstdtsdec.c
libgstdtsdec_la_CFLAGS = -DGST_USE_UNSTABLE_API \
$(GST_CFLAGS) $(ORC_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
libgstdtsdec_la_LIBADD = $(DTS_LIBS) $(ORC_LIBS) $(GST_PLUGINS_BASE_LIBS) \
-lgstaudio-@GST_MAJORMINOR@
$(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(ORC_CFLAGS)
libgstdtsdec_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) \
-lgstaudio-@GST_MAJORMINOR@ \
$(DTS_LIBS) $(ORC_LIBS)
libgstdtsdec_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdtsdec_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -494,11 +494,15 @@ gst_dtsdec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buffer)
guint8 *data;
gint size, chans;
gint length = 0, flags, sample_rate, bit_rate, frame_length;
GstFlowReturn result = GST_FLOW_UNEXPECTED;
GstFlowReturn result = GST_FLOW_OK;
GstBuffer *outbuf;
dts = GST_DTSDEC (bdec);
/* no fancy draining */
if (G_UNLIKELY (!buffer))
return GST_FLOW_OK;
/* parsed stuff already, so this should work out fine */
data = GST_BUFFER_DATA (buffer);
size = GST_BUFFER_SIZE (buffer);
@ -599,7 +603,8 @@ gst_dtsdec_handle_frame (GstAudioDecoder * bdec, GstBuffer * buffer)
/* negotiate if required */
if (need_renegotiation) {
GST_DEBUG ("dtsdec: sample_rate:%d stream_chans:0x%x using_chans:0x%x",
GST_DEBUG_OBJECT (dts,
"dtsdec: sample_rate:%d stream_chans:0x%x using_chans:0x%x",
dts->sample_rate, dts->stream_channels, dts->using_channels);
if (!gst_dtsdec_renegotiate (dts))
goto failed_negotiation;

View file

@ -369,9 +369,8 @@ static gboolean
gst_faac_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
{
GstFaac *faac = GST_FAAC (enc);
faacEncHandle *handle;
gint channels, samplerate, width;
gulong samples, bytes, fmt = 0, bps = 0;
gulong fmt = 0, bps = 0;
gboolean result = FALSE;
/* base class takes care */
@ -398,41 +397,24 @@ gst_faac_set_format (GstAudioEncoder * enc, GstAudioInfo * info)
bps = 4;
}
/* clean up in case of re-configure */
gst_faac_close_encoder (faac);
if (!(handle = faacEncOpen (samplerate, channels, &samples, &bytes)))
goto setup_failed;
/* mind channel count */
samples /= channels;
/* ok, record and set up */
faac->format = fmt;
faac->bps = bps;
faac->handle = handle;
faac->bytes = bytes;
faac->samples = samples;
faac->channels = channels;
faac->samplerate = samplerate;
/* finish up */
result = gst_faac_configure_source_pad (faac);
if (!result)
goto done;
/* report needs to base class */
gst_audio_encoder_set_frame_samples_min (enc, samples);
gst_audio_encoder_set_frame_samples_max (enc, samples);
gst_audio_encoder_set_frame_samples_min (enc, faac->samples);
gst_audio_encoder_set_frame_samples_max (enc, faac->samples);
gst_audio_encoder_set_frame_max (enc, 1);
done:
return result;
/* ERRORS */
setup_failed:
{
GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
goto done;
}
}
/* check downstream caps to configure format */
@ -494,15 +476,32 @@ gst_faac_negotiate (GstFaac * faac)
}
static gboolean
gst_faac_configure_source_pad (GstFaac * faac)
gst_faac_open_encoder (GstFaac * faac)
{
GstCaps *srccaps;
gboolean ret = FALSE;
faacEncHandle *handle;
faacEncConfiguration *conf;
guint maxbitrate;
gulong samples, bytes;
/* negotiate stream format */
gst_faac_negotiate (faac);
g_return_val_if_fail (faac->samplerate != 0 && faac->channels != 0, FALSE);
/* clean up in case of re-configure */
gst_faac_close_encoder (faac);
if (!(handle = faacEncOpen (faac->samplerate, faac->channels,
&samples, &bytes)))
goto setup_failed;
/* mind channel count */
samples /= faac->channels;
/* record */
faac->handle = handle;
faac->samples = samples;
faac->bytes = bytes;
GST_DEBUG_OBJECT (faac, "faac needs samples %d, output size %d",
faac->samples, faac->bytes);
/* we negotiated caps update current configuration */
conf = faacEncGetCurrentConfiguration (faac->handle);
@ -539,7 +538,7 @@ gst_faac_configure_source_pad (GstFaac * faac)
conf->bandWidth = 0;
if (!faacEncSetConfiguration (faac->handle, conf))
goto set_failed;
goto setup_failed;
/* let's see what really happened,
* note that this may not really match desired rate */
@ -548,6 +547,28 @@ gst_faac_configure_source_pad (GstFaac * faac)
GST_DEBUG_OBJECT (faac, "quantization quality: %ld", conf->quantqual);
GST_DEBUG_OBJECT (faac, "bandwidth: %d Hz", conf->bandWidth);
return TRUE;
/* ERRORS */
setup_failed:
{
GST_ELEMENT_ERROR (faac, LIBRARY, SETTINGS, (NULL), (NULL));
return FALSE;
}
}
static gboolean
gst_faac_configure_source_pad (GstFaac * faac)
{
GstCaps *srccaps;
gboolean ret;
/* negotiate stream format */
gst_faac_negotiate (faac);
if (!gst_faac_open_encoder (faac))
goto set_failed;
/* now create a caps for it all */
srccaps = gst_caps_new_simple ("audio/mpeg",
"mpegversion", G_TYPE_INT, faac->mpegversion,
@ -665,6 +686,13 @@ gst_faac_handle_frame (GstAudioEncoder * enc, GstBuffer * in_buf)
} else {
gst_buffer_unmap (out_buf, out_data, 0);
gst_buffer_unref (out_buf);
/* re-create encoder after final flush */
if (!in_buf) {
GST_DEBUG_OBJECT (faac, "flushed; recreating encoder");
gst_faac_close_encoder (faac);
if (!gst_faac_open_encoder (faac))
ret = GST_FLOW_ERROR;
}
}
return ret;

View file

@ -51,9 +51,9 @@ struct _GstFaac {
bps;
/* input frame size */
gulong samples;
gint samples;
/* required output buffer size */
gulong bytes;
gint bytes;
/* negotiated */
gint mpegversion, outputformat;

View file

@ -3,7 +3,6 @@
* Copyright 2005 Thomas Vander Stichele <thomas@apestaart.org>
* Copyright 2005 Ronald S. Bultje <rbultje@ronald.bitfreak.net>
* Copyright 2008 Vincent Penquerc'h <ogg.k.ogg.k@googlemail.com>
* Copyright (C) <2009> Young-Ho Cha <ganadist@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
@ -126,39 +125,6 @@ enum
ARG_SILENT
};
/* RGB -> YUV blitting routines taken from textoverlay,
original code from Young-Ho Cha <ganadist@gmail.com> */
#define COMP_Y(ret, r, g, b) \
{ \
ret = (int) (((19595 * r) >> 16) + ((38470 * g) >> 16) + ((7471 * b) >> 16)); \
ret = CLAMP (ret, 0, 255); \
}
#define COMP_U(ret, r, g, b) \
{ \
ret = (int) (-((11059 * r) >> 16) - ((21709 * g) >> 16) + ((32768 * b) >> 16) + 128); \
ret = CLAMP (ret, 0, 255); \
}
#define COMP_V(ret, r, g, b) \
{ \
ret = (int) (((32768 * r) >> 16) - ((27439 * g) >> 16) - ((5329 * b) >> 16) + 128); \
ret = CLAMP (ret, 0, 255); \
}
#define BLEND(ret, alpha, v0, v1) \
{ \
ret = (v0 * alpha + v1 * (255 - alpha)) / 255; \
}
#define OVER(ret, alphaA, Ca, alphaB, Cb, alphaNew) \
{ \
gint _tmp; \
_tmp = (Ca * alphaA + Cb * alphaB * (255 - alphaA) / 255) / alphaNew; \
ret = CLAMP (_tmp, 0, 255); \
}
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
# define TIGER_ARGB_A 3
# define TIGER_ARGB_R 2
@ -187,11 +153,16 @@ static GstStaticPadTemplate kate_sink_factory =
#if G_BYTE_ORDER == G_LITTLE_ENDIAN
#define TIGER_VIDEO_CAPS \
GST_VIDEO_CAPS_xRGB ";" GST_VIDEO_CAPS_BGRx ";" \
GST_VIDEO_CAPS_YUV ("{AYUV, I420, YV12, UYVY, NV12, NV21}")
GST_VIDEO_CAPS_YUV ("{I420, YV12, AYUV, YUY2, UYVY, v308, v210," \
" v216, Y41B, Y42B, Y444, Y800, Y16, NV12, NV21, UYVP, A420," \
" YUV9, IYU1}")
#else
#define TIGER_VIDEO_CAPS \
GST_VIDEO_CAPS_BGRx ";" GST_VIDEO_CAPS_xRGB ";" \
GST_VIDEO_CAPS_YUV ("{AYUV, I420, YV12, UYVY, NV12, NV21}")
GST_VIDEO_CAPS_YUV ("{I420, YV12, AYUV, YUY2, UYVY, v308, v210," \
" v216, Y41B, Y42B, Y444, Y800, Y16, NV12, NV21, UYVP, A420," \
" YUV9, IYU1}")
#endif
static GstStaticPadTemplate video_sink_factory =
@ -417,6 +388,8 @@ gst_kate_tiger_init (GstKateTiger * tiger, GstKateTigerClass * gclass)
tiger->video_width = 0;
tiger->video_height = 0;
tiger->composition = NULL;
tiger->seen_header = FALSE;
}
@ -432,8 +405,10 @@ gst_kate_tiger_dispose (GObject * object)
tiger->default_font_desc = NULL;
}
g_free (tiger->render_buffer);
tiger->render_buffer = NULL;
if (tiger->render_buffer) {
gst_buffer_unref (tiger->render_buffer);
tiger->render_buffer = NULL;
}
g_cond_free (tiger->cond);
tiger->cond = NULL;
@ -441,6 +416,11 @@ gst_kate_tiger_dispose (GObject * object)
g_mutex_free (tiger->mutex);
tiger->mutex = NULL;
if (tiger->composition) {
gst_video_overlay_composition_unref (tiger->composition);
tiger->composition = NULL;
}
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
@ -789,404 +769,44 @@ gst_kate_tiger_get_time (GstKateTiger * tiger)
}
static inline void
gst_kate_tiger_blit_1 (GstKateTiger * tiger, guchar * dest, gint xpos,
gint ypos, const guint8 * image, gint image_width, gint image_height,
guint dest_stride)
gst_kate_tiger_set_composition (GstKateTiger * tiger)
{
gint i, j = 0;
gint x, y;
guchar r, g, b, a;
const guint8 *pimage;
guchar *py;
gint width = image_width;
gint height = image_height;
GstVideoOverlayRectangle *rectangle;
if (xpos < 0) {
xpos = 0;
if (tiger->render_buffer) {
rectangle = gst_video_overlay_rectangle_new_argb (tiger->render_buffer,
tiger->video_width, tiger->video_height, 4 * tiger->video_width,
0, 0, tiger->video_width, tiger->video_height,
GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
if (tiger->composition)
gst_video_overlay_composition_unref (tiger->composition);
tiger->composition = gst_video_overlay_composition_new (rectangle);
gst_video_overlay_rectangle_unref (rectangle);
} else if (tiger->composition) {
gst_video_overlay_composition_unref (tiger->composition);
tiger->composition = NULL;
}
}
if (xpos + width > tiger->video_width) {
width = tiger->video_width - xpos;
}
static inline void
gst_kate_tiger_unpremultiply (GstKateTiger * tiger)
{
guint i, j;
guint8 *pimage, *text_image = GST_BUFFER_DATA (tiger->render_buffer);
if (ypos + height > tiger->video_height) {
height = tiger->video_height - ypos;
}
dest += (ypos / 1) * dest_stride;
for (i = 0; i < height; i++) {
pimage = image + 4 * (i * image_width);
py = dest + i * dest_stride + xpos;
for (j = 0; j < width; j++) {
b = pimage[TIGER_ARGB_B];
g = pimage[TIGER_ARGB_G];
r = pimage[TIGER_ARGB_R];
a = pimage[TIGER_ARGB_A];
TIGER_UNPREMULTIPLY (a, r, g, b);
for (i = 0; i < tiger->video_height; i++) {
pimage = text_image + 4 * (i * tiger->video_width);
for (j = 0; j < tiger->video_width; j++) {
TIGER_UNPREMULTIPLY (pimage[TIGER_ARGB_A], pimage[TIGER_ARGB_R],
pimage[TIGER_ARGB_G], pimage[TIGER_ARGB_B]);
pimage += 4;
if (a == 0) {
py++;
continue;
}
COMP_Y (y, r, g, b);
x = *py;
BLEND (*py++, a, y, x);
}
}
}
static inline void
gst_kate_tiger_blit_sub2x2cbcr (GstKateTiger * tiger,
guchar * destcb, guchar * destcr, gint xpos, gint ypos,
const guint8 * image, gint image_width, gint image_height,
guint destcb_stride, guint destcr_stride, guint pix_stride)
{
gint i, j;
gint x, cb, cr;
gushort r, g, b, a;
gushort r1, g1, b1, a1;
const guint8 *pimage1, *pimage2;
guchar *pcb, *pcr;
gint width = image_width - 2;
gint height = image_height - 2;
xpos *= pix_stride;
if (xpos < 0) {
xpos = 0;
}
if (xpos + width > tiger->video_width) {
width = tiger->video_width - xpos;
}
if (ypos + height > tiger->video_height) {
height = tiger->video_height - ypos;
}
destcb += (ypos / 2) * destcb_stride;
destcr += (ypos / 2) * destcr_stride;
for (i = 0; i < height; i += 2) {
pimage1 = image + 4 * (i * image_width);
pimage2 = pimage1 + 4 * image_width;
pcb = destcb + (i / 2) * destcb_stride + xpos / 2;
pcr = destcr + (i / 2) * destcr_stride + xpos / 2;
for (j = 0; j < width; j += 2) {
b = pimage1[TIGER_ARGB_B];
g = pimage1[TIGER_ARGB_G];
r = pimage1[TIGER_ARGB_R];
a = pimage1[TIGER_ARGB_A];
TIGER_UNPREMULTIPLY (a, r, g, b);
pimage1 += 4;
b1 = pimage1[TIGER_ARGB_B];
g1 = pimage1[TIGER_ARGB_G];
r1 = pimage1[TIGER_ARGB_R];
a1 = pimage1[TIGER_ARGB_A];
TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
b += b1;
g += g1;
r += r1;
a += a1;
pimage1 += 4;
b1 = pimage2[TIGER_ARGB_B];
g1 = pimage2[TIGER_ARGB_G];
r1 = pimage2[TIGER_ARGB_R];
a1 = pimage2[TIGER_ARGB_A];
TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
b += b1;
g += g1;
r += r1;
a += a1;
pimage2 += 4;
/* + 2 for rounding */
b1 = pimage2[TIGER_ARGB_B];
g1 = pimage2[TIGER_ARGB_G];
r1 = pimage2[TIGER_ARGB_R];
a1 = pimage2[TIGER_ARGB_A];
TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
b += b1 + 2;
g += g1 + 2;
r += r1 + 2;
a += a1 + 2;
pimage2 += 4;
b /= 4;
g /= 4;
r /= 4;
a /= 4;
if (a == 0) {
pcb += pix_stride;
pcr += pix_stride;
continue;
}
COMP_U (cb, r, g, b);
COMP_V (cr, r, g, b);
x = *pcb;
BLEND (*pcb, a, cb, x);
x = *pcr;
BLEND (*pcr, a, cr, x);
pcb += pix_stride;
pcr += pix_stride;
}
}
}
/* FIXME:
* - use proper strides and offset for I420
*/
static inline void
gst_kate_tiger_blit_NV12_NV21 (GstKateTiger * tiger,
guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
gint image_width, gint image_height)
{
int y_stride, uv_stride;
int u_offset, v_offset;
int h, w;
/* because U/V is 2x2 subsampled, we need to round, either up or down,
* to a boundary of integer number of U/V pixels:
*/
xpos = GST_ROUND_UP_2 (xpos);
ypos = GST_ROUND_UP_2 (ypos);
w = tiger->video_width;
h = tiger->video_height;
y_stride = gst_video_format_get_row_stride (tiger->video_format, 0, w);
uv_stride = gst_video_format_get_row_stride (tiger->video_format, 1, w);
u_offset =
gst_video_format_get_component_offset (tiger->video_format, 1, w, h);
v_offset =
gst_video_format_get_component_offset (tiger->video_format, 2, w, h);
gst_kate_tiger_blit_1 (tiger, yuv_pixels, xpos, ypos, image, image_width,
image_height, y_stride);
gst_kate_tiger_blit_sub2x2cbcr (tiger, yuv_pixels + u_offset,
yuv_pixels + v_offset, xpos, ypos, image, image_width, image_height,
uv_stride, uv_stride, 2);
}
static inline void
gst_kate_tiger_blit_I420_YV12 (GstKateTiger * tiger,
guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
gint image_width, gint image_height)
{
int y_stride, u_stride, v_stride;
int u_offset, v_offset;
int h, w;
/* because U/V is 2x2 subsampled, we need to round, either up or down,
* to a boundary of integer number of U/V pixels:
*/
xpos = GST_ROUND_UP_2 (xpos);
ypos = GST_ROUND_UP_2 (ypos);
w = tiger->video_width;
h = tiger->video_height;
y_stride = gst_video_format_get_row_stride (tiger->video_format, 0, w);
u_stride = gst_video_format_get_row_stride (tiger->video_format, 1, w);
v_stride = gst_video_format_get_row_stride (tiger->video_format, 2, w);
u_offset =
gst_video_format_get_component_offset (tiger->video_format, 1, w, h);
v_offset =
gst_video_format_get_component_offset (tiger->video_format, 2, w, h);
gst_kate_tiger_blit_1 (tiger, yuv_pixels, xpos, ypos, image, image_width,
image_height, y_stride);
gst_kate_tiger_blit_sub2x2cbcr (tiger, yuv_pixels + u_offset,
yuv_pixels + v_offset, xpos, ypos, image, image_width, image_height,
u_stride, v_stride, 1);
}
static inline void
gst_kate_tiger_blit_UYVY (GstKateTiger * tiger,
guint8 * yuv_pixels, gint xpos, gint ypos, const guint8 * image,
gint image_width, gint image_height)
{
int a0, r0, g0, b0;
int a1, r1, g1, b1;
int y0, y1, u, v;
int i, j;
int h, w;
const guint8 *pimage;
guchar *dest;
/* because U/V is 2x horizontally subsampled, we need to round to a
* boundary of integer number of U/V pixels in x dimension:
*/
xpos = GST_ROUND_UP_2 (xpos);
w = image_width - 2;
h = image_height - 2;
if (xpos < 0) {
xpos = 0;
}
if (xpos + w > tiger->video_width) {
w = tiger->video_width - xpos;
}
if (ypos + h > tiger->video_height) {
h = tiger->video_height - ypos;
}
for (i = 0; i < h; i++) {
pimage = image + i * image_width * 4;
dest = yuv_pixels + (i + ypos) * tiger->video_width * 2 + xpos * 2;
for (j = 0; j < w; j += 2) {
b0 = pimage[TIGER_ARGB_B];
g0 = pimage[TIGER_ARGB_G];
r0 = pimage[TIGER_ARGB_R];
a0 = pimage[TIGER_ARGB_A];
TIGER_UNPREMULTIPLY (a0, r0, g0, b0);
pimage += 4;
b1 = pimage[TIGER_ARGB_B];
g1 = pimage[TIGER_ARGB_G];
r1 = pimage[TIGER_ARGB_R];
a1 = pimage[TIGER_ARGB_A];
TIGER_UNPREMULTIPLY (a1, r1, g1, b1);
pimage += 4;
a0 += a1 + 2;
a0 /= 2;
if (a0 == 0) {
dest += 4;
continue;
}
COMP_Y (y0, r0, g0, b0);
COMP_Y (y1, r1, g1, b1);
b0 += b1 + 2;
g0 += g1 + 2;
r0 += r1 + 2;
b0 /= 2;
g0 /= 2;
r0 /= 2;
COMP_U (u, r0, g0, b0);
COMP_V (v, r0, g0, b0);
BLEND (*dest, a0, u, *dest);
dest++;
BLEND (*dest, a0, y0, *dest);
dest++;
BLEND (*dest, a0, v, *dest);
dest++;
BLEND (*dest, a0, y1, *dest);
dest++;
}
}
}
static inline void
gst_kate_tiger_blit_AYUV (GstKateTiger * tiger,
guint8 * rgb_pixels, gint xpos, gint ypos, const guint8 * image,
gint image_width, gint image_height)
{
int a, r, g, b, a1;
int y, u, v;
int i, j;
int h, w;
const guint8 *pimage;
guchar *dest;
w = image_width;
h = image_height;
if (xpos < 0) {
xpos = 0;
}
if (xpos + w > tiger->video_width) {
w = tiger->video_width - xpos;
}
if (ypos + h > tiger->video_height) {
h = tiger->video_height - ypos;
}
for (i = 0; i < h; i++) {
pimage = image + i * image_width * 4;
dest = rgb_pixels + (i + ypos) * 4 * tiger->video_width + xpos * 4;
for (j = 0; j < w; j++) {
a = pimage[TIGER_ARGB_A];
b = pimage[TIGER_ARGB_B];
g = pimage[TIGER_ARGB_G];
r = pimage[TIGER_ARGB_R];
TIGER_UNPREMULTIPLY (a, r, g, b);
// convert background to yuv
COMP_Y (y, r, g, b);
COMP_U (u, r, g, b);
COMP_V (v, r, g, b);
// preform text "OVER" background alpha compositing
a1 = a + (dest[0] * (255 - a)) / 255 + 1; // add 1 to prevent divide by 0
OVER (dest[1], a, y, dest[0], dest[1], a1);
OVER (dest[2], a, u, dest[0], dest[2], a1);
OVER (dest[3], a, v, dest[0], dest[3], a1);
dest[0] = a1 - 1; // remove the temporary 1 we added
pimage += 4;
dest += 4;
}
}
}
static void
gst_kate_tiger_blend_yuv (GstKateTiger * tiger, GstBuffer * video_frame,
const guint8 * image, gint image_width, gint image_height)
{
gint xpos = 0, ypos = 0;
gint width, height;
width = image_width;
height = image_height;
switch (tiger->video_format) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
gst_kate_tiger_blit_I420_YV12 (tiger,
GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
image_height);
break;
case GST_VIDEO_FORMAT_NV12:
case GST_VIDEO_FORMAT_NV21:
gst_kate_tiger_blit_NV12_NV21 (tiger,
GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
image_height);
break;
case GST_VIDEO_FORMAT_UYVY:
gst_kate_tiger_blit_UYVY (tiger,
GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
image_height);
break;
case GST_VIDEO_FORMAT_AYUV:
gst_kate_tiger_blit_AYUV (tiger,
GST_BUFFER_DATA (video_frame), xpos, ypos, image, image_width,
image_height);
break;
default:
g_assert_not_reached ();
}
}
static GstFlowReturn
gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
{
@ -1249,14 +869,12 @@ gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
/* and setup that buffer before rendering */
if (gst_video_format_is_yuv (tiger->video_format)) {
guint8 *tmp = g_realloc (tiger->render_buffer,
tiger->video_width * tiger->video_height * 4);
if (!tmp) {
GST_WARNING_OBJECT (tiger, "Failed to allocate render buffer");
goto pass;
if (!tiger->render_buffer) {
tiger->render_buffer =
gst_buffer_new_and_alloc (tiger->video_width * tiger->video_height *
4);
}
tiger->render_buffer = tmp;
ptr = tiger->render_buffer;
ptr = GST_BUFFER_DATA (tiger->render_buffer);
tiger_renderer_set_surface_clear_color (tiger->tr, 1, 0.0, 0.0, 0.0, 0.0);
} else {
ptr = GST_BUFFER_DATA (buf);
@ -1278,8 +896,12 @@ gst_kate_tiger_video_chain (GstPad * pad, GstBuffer * buf)
}
if (gst_video_format_is_yuv (tiger->video_format)) {
gst_kate_tiger_blend_yuv (tiger, buf, tiger->render_buffer,
tiger->video_width, tiger->video_height);
/* As the GstVideoOverlayComposition supports only unpremultiply ARGB,
* we need to unpermultiply it */
gst_kate_tiger_unpremultiply (tiger);
gst_kate_tiger_set_composition (tiger);
if (tiger->composition)
gst_video_overlay_composition_blend (tiger->composition, buf);
}
pass:

View file

@ -50,6 +50,7 @@
#include <tiger/tiger.h>
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/video-overlay-composition.h>
#include "gstkateutil.h"
G_BEGIN_DECLS
@ -95,7 +96,8 @@ struct _GstKateTiger
gint video_width;
gint video_height;
gboolean swap_rgb;
guint8 *render_buffer;
GstBuffer *render_buffer;
GstVideoOverlayComposition *composition;
GMutex *mutex;
GCond *cond;

View file

@ -194,8 +194,6 @@ gst_motion_cells_finalize (GObject * obj)
GFREE (filter->basename_datafile);
GFREE (filter->datafile_extension);
g_mutex_free (filter->propset_mutex);
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
@ -328,7 +326,6 @@ gst_motion_cells_class_init (GstMotioncellsClass * klass)
static void
gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
{
filter->propset_mutex = g_mutex_new ();
filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_setcaps_function (filter->sinkpad,
GST_DEBUG_FUNCPTR (gst_motion_cells_set_caps));
@ -398,9 +395,7 @@ gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass)
filter->thickness = THICKNESS_DEF;
filter->datafileidx = 0;
g_mutex_lock (filter->propset_mutex);
filter->id = motion_cells_init ();
g_mutex_unlock (filter->propset_mutex);
}
@ -419,28 +414,19 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
int tmpuy = -1;
int tmplx = -1;
int tmply = -1;
GstStateChangeReturn ret;
g_mutex_lock (filter->propset_mutex);
GST_OBJECT_LOCK (filter);
switch (prop_id) {
case PROP_GRID_X:
ret = gst_element_get_state (GST_ELEMENT (filter),
&filter->state, NULL, 250 * GST_NSECOND);
filter->gridx = g_value_get_int (value);
if (filter->prevgridx != filter->gridx
&& ret == GST_STATE_CHANGE_SUCCESS
&& filter->state == GST_STATE_PLAYING) {
if (filter->prevgridx != filter->gridx && !filter->firstframe) {
filter->changed_gridx = true;
}
filter->prevgridx = filter->gridx;
break;
case PROP_GRID_Y:
ret = gst_element_get_state (GST_ELEMENT (filter),
&filter->state, NULL, 250 * GST_NSECOND);
filter->gridy = g_value_get_int (value);
if (filter->prevgridy != filter->gridy
&& ret == GST_STATE_CHANGE_SUCCESS
&& filter->state == GST_STATE_PLAYING) {
if (filter->prevgridy != filter->gridy && !filter->firstframe) {
filter->changed_gridy = true;
}
filter->prevgridy = filter->gridy;
@ -473,9 +459,7 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
filter->calculate_motion = g_value_get_boolean (value);
break;
case PROP_DATE:
ret = gst_element_get_state (GST_ELEMENT (filter),
&filter->state, NULL, 250 * GST_NSECOND);
if (ret == GST_STATE_CHANGE_SUCCESS && filter->state == GST_STATE_PLAYING) {
if (!filter->firstframe) {
filter->changed_startime = true;
}
filter->starttime = g_value_get_long (value);
@ -629,7 +613,7 @@ gst_motion_cells_set_property (GObject * object, guint prop_id,
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
g_mutex_unlock (filter->propset_mutex);
GST_OBJECT_UNLOCK (filter);
}
static void
@ -640,6 +624,7 @@ gst_motion_cells_get_property (GObject * object, guint prop_id,
GString *str;
int i;
GST_OBJECT_LOCK (filter);
switch (prop_id) {
case PROP_GRID_X:
g_value_set_int (value, filter->gridx);
@ -751,6 +736,7 @@ gst_motion_cells_get_property (GObject * object, guint prop_id,
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
GST_OBJECT_UNLOCK (filter);
}
static void
@ -861,17 +847,17 @@ gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps)
static GstFlowReturn
gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
{
GstMotioncells *filter;
filter = gst_motion_cells (GST_OBJECT_PARENT (pad));
GST_OBJECT_LOCK (filter);
if (filter->calculate_motion) {
double sensitivity;
int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count,
motioncells_count, i;
int thickness, success, motioncellsidxcnt, numberOfCells,
motioncellsnumber, cellsOfInterestNumber;
int mincellsOfInterestNumber, motiondetect;
int mincellsOfInterestNumber, motiondetect, minimum_motion_frames,
postnomotion;
char *datafile;
bool display, changed_datafile, useAlpha;
gint64 starttime;
@ -879,14 +865,14 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
motioncellidx *motionmaskcellsidx;
cellscolor motioncellscolor;
motioncellidx *motioncellsidx;
g_mutex_lock (filter->propset_mutex);
buf = gst_buffer_make_writable (buf);
filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf);
if (filter->firstframe) {
setPrevFrame (filter->cvImage, filter->id);
filter->firstframe = FALSE;
}
minimum_motion_frames = filter->minimum_motion_frames;
postnomotion = filter->postnomotion;
sensitivity = filter->sensitivity;
framerate = filter->framerate;
gridx = filter->gridx;
@ -963,6 +949,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
motionmaskcoords, motionmaskcells_count, motionmaskcellsidx,
motioncellscolor, motioncells_count, motioncellsidx, starttime,
datafile, changed_datafile, thickness, filter->id);
if ((success == 1) && (filter->sent_init_error_msg == false)) {
char *initfailedreason;
int initerrorcode;
@ -996,7 +983,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
GFREE (motionmaskcoords);
GFREE (motionmaskcellsidx);
GFREE (motioncellsidx);
g_mutex_unlock (filter->propset_mutex);
GST_OBJECT_UNLOCK (filter);
return gst_pad_push (filter->srcpad, buf);
}
filter->changed_datafile = getChangedDataFile (filter->id);
@ -1007,6 +994,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
(filter->motioncells_count) : (numberOfCells);
mincellsOfInterestNumber =
floor ((double) cellsOfInterestNumber * filter->threshold);
GST_OBJECT_UNLOCK (filter);
motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0;
if ((motioncellsidxcnt > 0) && (motiondetect == 1)) {
char *detectedmotioncells;
@ -1015,7 +1003,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
if (detectedmotioncells) {
filter->consecutive_motion++;
if ((filter->previous_motion == false)
&& (filter->consecutive_motion >= filter->minimum_motion_frames)) {
&& (filter->consecutive_motion >= minimum_motion_frames)) {
GstStructure *s;
GstMessage *m;
filter->previous_motion = true;
@ -1061,7 +1049,7 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
}
}
}
if (filter->postnomotion > 0) {
if (postnomotion > 0) {
guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l;
if ((last_buf_timestamp -
(filter->last_motion_timestamp / 1000000000l)) >=
@ -1086,10 +1074,9 @@ gst_motion_cells_chain (GstPad * pad, GstBuffer * buf)
GFREE (motionmaskcoords);
GFREE (motionmaskcellsidx);
GFREE (motioncellsidx);
g_mutex_unlock (filter->propset_mutex);
} else {
GST_OBJECT_UNLOCK (filter);
}
return gst_pad_push (filter->srcpad, buf);
}

View file

@ -107,7 +107,6 @@ struct _GstMotioncells
gint width, height;
//time stuff
struct timeval tv;
GMutex *propset_mutex;
double framerate;
};

View file

@ -1,6 +1,6 @@
plugin_LTLIBRARIES = libgstopus.la
libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c gstopusparse.c gstopusheader.c gstopuscommon.c
libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c gstopusparse.c gstopusheader.c gstopuscommon.c gstrtpopuspay.c gstrtpopusdepay.c
libgstopus_la_CFLAGS = \
-DGST_USE_UNSTABLE_API \
$(GST_PLUGINS_BASE_CFLAGS) \
@ -9,10 +9,11 @@ libgstopus_la_CFLAGS = \
libgstopus_la_LIBADD = \
-lgstaudio-$(GST_MAJORMINOR) \
$(GST_PLUGINS_BASE_LIBS) -lgsttag-$(GST_MAJORMINOR) \
-lgstrtp-@GST_MAJORMINOR@ \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(OPUS_LIBS)
libgstopus_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM)
libgstopus_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstopusenc.h gstopusdec.h gstopusparse.h gstopusheader.h gstopuscommon.h
noinst_HEADERS = gstopusenc.h gstopusdec.h gstopusparse.h gstopusheader.h gstopuscommon.h gstrtpopuspay.h gstrtpopusdepay.h

View file

@ -25,6 +25,9 @@
#include "gstopusenc.h"
#include "gstopusparse.h"
#include "gstrtpopuspay.h"
#include "gstrtpopusdepay.h"
#include <gst/tag/tag.h>
static gboolean
@ -43,6 +46,14 @@ plugin_init (GstPlugin * plugin)
GST_TYPE_OPUS_PARSE))
return FALSE;
if (!gst_element_register (plugin, "rtpopusdepay", GST_RANK_NONE,
GST_TYPE_RTP_OPUS_DEPAY))
return FALSE;
if (!gst_element_register (plugin, "rtpopuspay", GST_RANK_NONE,
GST_TYPE_RTP_OPUS_PAY))
return FALSE;
gst_tag_register_musicbrainz_tags ();
return TRUE;

View file

@ -17,6 +17,8 @@
* Boston, MA 02111-1307, USA.
*/
#include <stdio.h>
#include <string.h>
#include "gstopuscommon.h"
/* http://www.xiph.org/vorbis/doc/Vorbis_I_spec.html#x1-800004.3.9 */
@ -86,3 +88,19 @@ const char *gst_opus_channel_names[] = {
"side right",
"none"
};
void
gst_opus_common_log_channel_mapping_table (GstElement * element,
GstDebugCategory * category, const char *msg, int n_channels,
const guint8 * table)
{
char s[8 + 256 * 4] = "[ "; /* enough for 256 times "255 " at most */
int n;
for (n = 0; n < n_channels; ++n) {
size_t len = strlen (s);
snprintf (s + len, sizeof (s) - len, "%d ", table[n]);
}
strcat (s, "]");
GST_CAT_LEVEL_LOG (category, GST_LEVEL_INFO, element, "%s: %s", msg, s);
}

View file

@ -28,6 +28,9 @@ G_BEGIN_DECLS
extern const GstAudioChannelPosition gst_opus_channel_positions[][8];
extern const char *gst_opus_channel_names[];
extern void gst_opus_common_log_channel_mapping_table (GstElement *element,
GstDebugCategory * category, const char *msg,
int n_channels, const guint8 *table);
G_END_DECLS

View file

@ -38,12 +38,11 @@
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#include "config.h"
#endif
#include <math.h>
#include <string.h>
#include <gst/tag/tag.h>
#include "gstopusheader.h"
#include "gstopuscommon.h"
#include "gstopusdec.h"
@ -57,7 +56,7 @@ GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw, "
"format = (string) { " GST_AUDIO_NE (S16) " }, "
"rate = (int) { 8000, 12000, 16000, 24000, 48000 }, "
"rate = (int) { 48000, 24000, 16000, 12000, 8000 }, "
"channels = (int) [ 1, 8 ] ")
);
@ -207,12 +206,32 @@ gst_opus_dec_get_r128_volume (gint16 r128_gain)
return DB_TO_LINEAR (gst_opus_dec_get_r128_gain (r128_gain));
}
static GstCaps *
gst_opus_dec_negotiate (GstOpusDec * dec)
{
GstCaps *caps = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec));
GstStructure *s;
caps = gst_caps_make_writable (caps);
gst_caps_truncate (caps);
s = gst_caps_get_structure (caps, 0);
gst_structure_fixate_field_nearest_int (s, "rate", 48000);
gst_structure_get_int (s, "rate", &dec->sample_rate);
gst_structure_fixate_field_nearest_int (s, "channels", dec->n_channels);
gst_structure_get_int (s, "channels", &dec->n_channels);
GST_INFO_OBJECT (dec, "Negotiated %d channels, %d Hz", dec->n_channels,
dec->sample_rate);
return caps;
}
static GstFlowReturn
gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
const guint8 *data;
GstCaps *caps;
GstStructure *s;
const GstAudioChannelPosition *pos = NULL;
g_return_val_if_fail (gst_opus_header_is_id_header (buf), GST_FLOW_ERROR);
@ -277,16 +296,7 @@ gst_opus_dec_parse_header (GstOpusDec * dec, GstBuffer * buf)
}
}
/* negotiate width with downstream */
caps = gst_pad_get_allowed_caps (GST_AUDIO_DECODER_SRC_PAD (dec));
s = gst_caps_get_structure (caps, 0);
gst_structure_fixate_field_nearest_int (s, "rate", 48000);
gst_structure_get_int (s, "rate", &dec->sample_rate);
gst_structure_fixate_field_nearest_int (s, "channels", dec->n_channels);
gst_structure_get_int (s, "channels", &dec->n_channels);
GST_INFO_OBJECT (dec, "Negotiated %d channels, %d Hz", dec->n_channels,
dec->sample_rate);
caps = gst_opus_dec_negotiate (dec);
if (pos) {
GST_DEBUG_OBJECT (dec, "Setting channel positions on caps");
@ -327,11 +337,36 @@ opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
GstBuffer *buf;
if (dec->state == NULL) {
/* If we did not get any headers, default to 2 channels */
if (dec->n_channels == 0) {
GstCaps *caps;
GST_INFO_OBJECT (dec, "No header, assuming single stream");
dec->n_channels = 2;
dec->sample_rate = 48000;
caps = gst_opus_dec_negotiate (dec);
GST_INFO_OBJECT (dec, "Setting src caps to %" GST_PTR_FORMAT, caps);
gst_pad_set_caps (GST_AUDIO_DECODER_SRC_PAD (dec), caps);
gst_caps_unref (caps);
/* default stereo mapping */
dec->channel_mapping_family = 0;
dec->channel_mapping[0] = 0;
dec->channel_mapping[1] = 1;
dec->n_streams = 1;
dec->n_stereo_streams = 1;
}
GST_DEBUG_OBJECT (dec, "Creating decoder with %d channels, %d Hz",
dec->n_channels, dec->sample_rate);
dec->state = opus_multistream_decoder_create (dec->sample_rate,
dec->n_channels, dec->n_streams, dec->n_stereo_streams,
dec->channel_mapping, &err);
#ifndef GST_DISABLE_DEBUG
gst_opus_common_log_channel_mapping_table (GST_ELEMENT (dec), opusdec_debug,
"Mapping table", dec->n_channels, dec->channel_mapping);
#endif
GST_DEBUG_OBJECT (dec, "%d streams, %d stereo", dec->n_streams,
dec->n_stereo_streams);
dec->state =
opus_multistream_decoder_create (dec->sample_rate, dec->n_channels,
dec->n_streams, dec->n_stereo_streams, dec->channel_mapping, &err);
if (!dec->state || err != OPUS_OK)
goto creation_failed;
}
@ -411,11 +446,11 @@ opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buffer)
GST_INFO_OBJECT (dec,
"Skipping %u samples (%u at 48000 Hz, %u left to skip)", skip,
scaled_skip, dec->pre_skip);
}
if (gst_buffer_get_size (outbuf) == 0) {
gst_buffer_unref (outbuf);
outbuf = NULL;
}
if (gst_buffer_get_size (outbuf) == 0) {
gst_buffer_unref (outbuf);
outbuf = NULL;
}
/* Apply gain */

View file

@ -161,6 +161,8 @@ static void gst_opus_enc_finalize (GObject * object);
static gboolean gst_opus_enc_sink_event (GstAudioEncoder * benc,
GstEvent * event);
static GstCaps *gst_opus_enc_sink_getcaps (GstAudioEncoder * benc,
GstCaps * filter);
static gboolean gst_opus_enc_setup (GstOpusEnc * enc);
static void gst_opus_enc_get_property (GObject * object, guint prop_id,
@ -211,6 +213,7 @@ gst_opus_enc_class_init (GstOpusEncClass * klass)
base_class->set_format = GST_DEBUG_FUNCPTR (gst_opus_enc_set_format);
base_class->handle_frame = GST_DEBUG_FUNCPTR (gst_opus_enc_handle_frame);
base_class->event = GST_DEBUG_FUNCPTR (gst_opus_enc_sink_event);
base_class->getcaps = GST_DEBUG_FUNCPTR (gst_opus_enc_sink_getcaps);
g_object_class_install_property (gobject_class, PROP_AUDIO,
g_param_spec_boolean ("audio", "Audio or voice",
@ -401,7 +404,50 @@ gst_opus_enc_get_frame_samples (GstOpusEnc * enc)
}
static void
gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
gst_opus_enc_setup_trivial_mapping (GstOpusEnc * enc, guint8 mapping[256])
{
int n;
for (n = 0; n < 255; ++n)
mapping[n] = n;
}
static int
gst_opus_enc_find_channel_position (GstOpusEnc * enc, const GstAudioInfo * info,
GstAudioChannelPosition position)
{
int n;
for (n = 0; n < enc->n_channels; ++n) {
if (GST_AUDIO_INFO_POSITION (info, n) == position) {
return n;
}
}
return -1;
}
static int
gst_opus_enc_find_channel_position_in_vorbis_order (GstOpusEnc * enc,
GstAudioChannelPosition position)
{
int c;
for (c = 0; c < enc->n_channels; ++c) {
if (gst_opus_channel_positions[enc->n_channels - 1][c] == position) {
GST_INFO_OBJECT (enc,
"Channel position %s maps to index %d in Vorbis order",
gst_opus_channel_names[position], c);
return c;
}
}
GST_WARNING_OBJECT (enc,
"Channel position %s is not representable in Vorbis order",
gst_opus_channel_names[position]);
return -1;
}
static void
gst_opus_enc_setup_channel_mappings (GstOpusEnc * enc,
const GstAudioInfo * info)
{
#define MAPS(idx,pos) (GST_AUDIO_INFO_POSITION (info, (idx)) == GST_AUDIO_CHANNEL_POSITION_##pos)
@ -411,14 +457,15 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
enc->n_channels);
/* Start by setting up a default trivial mapping */
for (n = 0; n < 255; ++n)
enc->channel_mapping[n] = n;
enc->n_stereo_streams = 0;
gst_opus_enc_setup_trivial_mapping (enc, enc->encoding_channel_mapping);
gst_opus_enc_setup_trivial_mapping (enc, enc->decoding_channel_mapping);
/* For one channel, use the basic RTP mapping */
if (enc->n_channels == 1) {
GST_INFO_OBJECT (enc, "Mono, trivial RTP mapping");
enc->channel_mapping_family = 0;
enc->channel_mapping[0] = 0;
/* implicit mapping for family 0 */
return;
}
@ -428,9 +475,11 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
if (MAPS (0, FRONT_LEFT) && MAPS (1, FRONT_RIGHT)) {
GST_INFO_OBJECT (enc, "Stereo, canonical mapping");
enc->channel_mapping_family = 0;
enc->n_stereo_streams = 1;
/* The channel mapping is implicit for family 0, that's why we do not
attempt to create one for right/left - this will be mapped to the
Vorbis mapping below. */
return;
} else {
GST_DEBUG_OBJECT (enc, "Stereo, but not canonical mapping, continuing");
}
@ -438,42 +487,115 @@ gst_opus_enc_setup_channel_mapping (GstOpusEnc * enc, const GstAudioInfo * info)
/* For channels between 1 and 8, we use the Vorbis mapping if we can
find a permutation that matches it. Mono will have been taken care
of earlier, but this code also handles it. */
of earlier, but this code also handles it. Same for left/right stereo.
There are two mappings. One maps the input channels to an ordering
which has the natural pairs first so they can benefit from the Opus
stereo channel coupling, and the other maps this ordering to the
Vorbis ordering. */
if (enc->n_channels >= 1 && enc->n_channels <= 8) {
GST_DEBUG_OBJECT (enc,
"In range for the Vorbis mapping, checking channel positions");
for (n = 0; n < enc->n_channels; ++n) {
GstAudioChannelPosition pos = GST_AUDIO_INFO_POSITION (info, n);
int c;
int c0, c1, c0v, c1v;
int mapped;
gboolean positions_done[256];
static const GstAudioChannelPosition pairs[][2] = {
{GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT},
{GST_AUDIO_CHANNEL_POSITION_REAR_LEFT,
GST_AUDIO_CHANNEL_POSITION_REAR_RIGHT},
{GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
{GST_AUDIO_CHANNEL_POSITION_FRONT_LEFT_OF_CENTER,
GST_AUDIO_CHANNEL_POSITION_FRONT_RIGHT_OF_CENTER},
{GST_AUDIO_CHANNEL_POSITION_SIDE_LEFT,
GST_AUDIO_CHANNEL_POSITION_SIDE_RIGHT},
};
size_t pair;
GST_DEBUG_OBJECT (enc, "Channel %d has position %d (%s)", n, pos,
gst_opus_channel_names[pos]);
for (c = 0; c < enc->n_channels; ++c) {
if (gst_opus_channel_positions[enc->n_channels - 1][c] == pos) {
GST_DEBUG_OBJECT (enc, "Found in Vorbis mapping as channel %d", c);
break;
GST_DEBUG_OBJECT (enc,
"In range for the Vorbis mapping, building channel mapping tables");
enc->n_stereo_streams = 0;
mapped = 0;
for (n = 0; n < 256; ++n)
positions_done[n] = FALSE;
/* First, find any natural pairs, and move them to the front */
for (pair = 0; pair < G_N_ELEMENTS (pairs); ++pair) {
GstAudioChannelPosition p0 = pairs[pair][0];
GstAudioChannelPosition p1 = pairs[pair][1];
c0 = gst_opus_enc_find_channel_position (enc, info, p0);
c1 = gst_opus_enc_find_channel_position (enc, info, p1);
if (c0 >= 0 && c1 >= 0) {
/* We found a natural pair */
GST_DEBUG_OBJECT (enc, "Natural pair '%s/%s' found at %d %d",
gst_opus_channel_names[p0], gst_opus_channel_names[p1], c0, c1);
/* Find where they map in Vorbis order */
c0v = gst_opus_enc_find_channel_position_in_vorbis_order (enc, p0);
c1v = gst_opus_enc_find_channel_position_in_vorbis_order (enc, p1);
if (c0v < 0 || c1v < 0) {
GST_WARNING_OBJECT (enc,
"Cannot map channel positions to Vorbis order, using unknown mapping");
enc->channel_mapping_family = 255;
enc->n_stereo_streams = 0;
return;
}
enc->encoding_channel_mapping[mapped] = c0;
enc->encoding_channel_mapping[mapped + 1] = c1;
enc->decoding_channel_mapping[c0v] = mapped;
enc->decoding_channel_mapping[c1v] = mapped + 1;
enc->n_stereo_streams++;
mapped += 2;
positions_done[p0] = positions_done[p1] = TRUE;
}
if (c == enc->n_channels) {
/* We did not find that position, so use undefined */
GST_WARNING_OBJECT (enc,
"Position %d (%s) not found in Vorbis mapping, using unknown mapping",
pos, gst_opus_channel_positions[pos]);
enc->channel_mapping_family = 255;
return;
}
GST_DEBUG_OBJECT (enc, "Mapping output channel %d to %d (%s)", c, n,
gst_opus_channel_names[pos]);
enc->channel_mapping[c] = n;
}
GST_INFO_OBJECT (enc, "Permutation found, using Vorbis mapping");
/* Now add all other input channels as mono streams */
for (n = 0; n < enc->n_channels; ++n) {
GstAudioChannelPosition position = GST_AUDIO_INFO_POSITION (info, n);
/* if we already mapped it while searching for pairs, nothing else
needs to be done */
if (!positions_done[position]) {
int cv;
GST_DEBUG_OBJECT (enc, "Channel position %s is not mapped yet, adding",
gst_opus_channel_names[position]);
cv = gst_opus_enc_find_channel_position_in_vorbis_order (enc, position);
if (cv < 0) {
GST_WARNING_OBJECT (enc,
"Cannot map channel positions to Vorbis order, using unknown mapping");
enc->channel_mapping_family = 255;
enc->n_stereo_streams = 0;
return;
}
enc->encoding_channel_mapping[mapped] = n;
enc->decoding_channel_mapping[cv] = mapped;
mapped++;
}
}
#ifndef GST_DISABLE_DEBUG
GST_INFO_OBJECT (enc,
"Mapping tables built: %d channels, %d stereo streams", enc->n_channels,
enc->n_stereo_streams);
gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
"Encoding mapping table", enc->n_channels,
enc->encoding_channel_mapping);
gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
"Decoding mapping table", enc->n_channels,
enc->decoding_channel_mapping);
#endif
enc->channel_mapping_family = 1;
return;
}
/* For other cases, we use undefined, with the default trivial mapping */
/* More than 8 channels, if future mappings are added for those */
/* For other cases, we use undefined, with the default trivial mapping
and all mono streams */
GST_WARNING_OBJECT (enc, "Unknown mapping");
enc->channel_mapping_family = 255;
enc->n_stereo_streams = 0;
#undef MAPS
}
@ -489,7 +611,7 @@ gst_opus_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
enc->n_channels = GST_AUDIO_INFO_CHANNELS (info);
enc->sample_rate = GST_AUDIO_INFO_RATE (info);
gst_opus_enc_setup_channel_mapping (enc, info);
gst_opus_enc_setup_channel_mappings (enc, info);
GST_DEBUG_OBJECT (benc, "Setup with %d channels, %d Hz", enc->n_channels,
enc->sample_rate);
@ -514,17 +636,24 @@ gst_opus_enc_set_format (GstAudioEncoder * benc, GstAudioInfo * info)
static gboolean
gst_opus_enc_setup (GstOpusEnc * enc)
{
int error = OPUS_OK, n;
guint8 trivial_mapping[256];
int error = OPUS_OK;
GST_DEBUG_OBJECT (enc, "setup");
#ifndef GST_DISABLE_DEBUG
GST_DEBUG_OBJECT (enc,
"setup: %d Hz, %d channels, %d stereo streams, family %d",
enc->sample_rate, enc->n_channels, enc->n_stereo_streams,
enc->channel_mapping_family);
GST_INFO_OBJECT (enc, "Mapping tables built: %d channels, %d stereo streams",
enc->n_channels, enc->n_stereo_streams);
gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
"Encoding mapping table", enc->n_channels, enc->encoding_channel_mapping);
gst_opus_common_log_channel_mapping_table (GST_ELEMENT (enc), opusenc_debug,
"Decoding mapping table", enc->n_channels, enc->decoding_channel_mapping);
#endif
for (n = 0; n < 256; ++n)
trivial_mapping[n] = n;
enc->state =
opus_multistream_encoder_create (enc->sample_rate, enc->n_channels,
enc->n_channels, 0, trivial_mapping,
enc->state = opus_multistream_encoder_create (enc->sample_rate,
enc->n_channels, enc->n_channels - enc->n_stereo_streams,
enc->n_stereo_streams, enc->encoding_channel_mapping,
enc->audio_or_voip ? OPUS_APPLICATION_AUDIO : OPUS_APPLICATION_VOIP,
&error);
if (!enc->state || error != OPUS_OK)
@ -580,6 +709,75 @@ gst_opus_enc_sink_event (GstAudioEncoder * benc, GstEvent * event)
return FALSE;
}
static GstCaps *
gst_opus_enc_sink_getcaps (GstAudioEncoder * benc, GstCaps * filter)
{
GstOpusEnc *enc;
GstCaps *caps;
GstCaps *peercaps = NULL;
GstCaps *intersect = NULL;
guint i;
gboolean allow_multistream;
enc = GST_OPUS_ENC (benc);
GST_DEBUG_OBJECT (enc, "sink getcaps");
peercaps = gst_pad_peer_query_caps (GST_AUDIO_ENCODER_SRC_PAD (benc), filter);
if (!peercaps) {
GST_DEBUG_OBJECT (benc, "No peercaps, returning template sink caps");
return
gst_caps_copy (gst_pad_get_pad_template_caps
(GST_AUDIO_ENCODER_SINK_PAD (benc)));
}
intersect = gst_caps_intersect (peercaps,
gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SRC_PAD (benc)));
gst_caps_unref (peercaps);
if (gst_caps_is_empty (intersect))
return intersect;
allow_multistream = FALSE;
for (i = 0; i < gst_caps_get_size (intersect); i++) {
GstStructure *s = gst_caps_get_structure (intersect, i);
gboolean multistream;
if (gst_structure_get_boolean (s, "multistream", &multistream)) {
if (multistream) {
allow_multistream = TRUE;
}
} else {
allow_multistream = TRUE;
}
}
gst_caps_unref (intersect);
caps =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_AUDIO_ENCODER_SINK_PAD
(benc)));
if (!allow_multistream) {
GValue range = { 0 };
g_value_init (&range, GST_TYPE_INT_RANGE);
gst_value_set_int_range (&range, 1, 2);
for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *s = gst_caps_get_structure (caps, i);
gst_structure_set_value (s, "channels", &range);
}
g_value_unset (&range);
}
if (filter) {
GstCaps *tmp = gst_caps_intersect_full (caps, filter,
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = tmp;
}
GST_DEBUG_OBJECT (enc, "Returning caps: %" GST_PTR_FORMAT, caps);
return caps;
}
static GstFlowReturn
gst_opus_enc_encode (GstOpusEnc * enc, GstBuffer * buf)
{
@ -684,7 +882,8 @@ gst_opus_enc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
enc->headers = NULL;
gst_opus_header_create_caps (&caps, &enc->headers, enc->n_channels,
enc->sample_rate, enc->channel_mapping_family, enc->channel_mapping,
enc->n_stereo_streams, enc->sample_rate, enc->channel_mapping_family,
enc->decoding_channel_mapping,
gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc)));

View file

@ -79,7 +79,9 @@ struct _GstOpusEnc {
GstTagList *tags;
guint8 channel_mapping_family;
guint8 channel_mapping[256];
guint8 encoding_channel_mapping[256];
guint8 decoding_channel_mapping[256];
guint8 n_stereo_streams;
};
struct _GstOpusEncClass {

View file

@ -27,12 +27,17 @@
#include "gstopusheader.h"
static GstBuffer *
gst_opus_enc_create_id_buffer (gint nchannels, gint sample_rate,
guint8 channel_mapping_family, const guint8 * channel_mapping)
gst_opus_enc_create_id_buffer (gint nchannels, gint n_stereo_streams,
gint sample_rate, guint8 channel_mapping_family,
const guint8 * channel_mapping)
{
GstBuffer *buffer;
GstByteWriter bw;
g_return_val_if_fail (nchannels > 0 && nchannels < 256, NULL);
g_return_val_if_fail (n_stereo_streams >= 0, NULL);
g_return_val_if_fail (n_stereo_streams <= nchannels - n_stereo_streams, NULL);
gst_byte_writer_init (&bw);
/* See http://wiki.xiph.org/OggOpus */
@ -44,8 +49,8 @@ gst_opus_enc_create_id_buffer (gint nchannels, gint sample_rate,
gst_byte_writer_put_uint16_le (&bw, 0); /* output gain */
gst_byte_writer_put_uint8 (&bw, channel_mapping_family);
if (channel_mapping_family > 0) {
gst_byte_writer_put_uint8 (&bw, nchannels);
gst_byte_writer_put_uint8 (&bw, 0);
gst_byte_writer_put_uint8 (&bw, nchannels - n_stereo_streams);
gst_byte_writer_put_uint8 (&bw, n_stereo_streams);
gst_byte_writer_put_data (&bw, channel_mapping, nchannels);
}
@ -145,11 +150,38 @@ void
gst_opus_header_create_caps_from_headers (GstCaps ** caps, GSList ** headers,
GstBuffer * buf1, GstBuffer * buf2)
{
int n_streams, family;
gboolean multistream;
guint8 *data;
gsize size;
g_return_if_fail (caps);
g_return_if_fail (headers && !*headers);
g_return_if_fail (gst_buffer_get_size (buf1) >= 19);
data = gst_buffer_map (buf1, &size, NULL, GST_MAP_READ);
/* work out the number of streams */
family = data[18];
if (family == 0) {
n_streams = 1;
} else {
/* only included in the header for family > 0 */
if (size >= 20)
n_streams = data[19];
else {
g_warning ("family > 0 but header buffer size < 20");
gst_buffer_unmap (buf1, data, size);
return;
}
}
gst_buffer_unmap (buf1, data, size);
/* mark and put on caps */
*caps = gst_caps_from_string ("audio/x-opus");
multistream = n_streams > 1;
*caps = gst_caps_new_simple ("audio/x-opus",
"multistream", G_TYPE_BOOLEAN, multistream, NULL);
*caps = _gst_caps_set_buffer_array (*caps, "streamheader", buf1, buf2, NULL);
*headers = g_slist_prepend (*headers, buf2);
@ -158,7 +190,7 @@ gst_opus_header_create_caps_from_headers (GstCaps ** caps, GSList ** headers,
void
gst_opus_header_create_caps (GstCaps ** caps, GSList ** headers, gint nchannels,
gint sample_rate, guint8 channel_mapping_family,
gint n_stereo_streams, gint sample_rate, guint8 channel_mapping_family,
const guint8 * channel_mapping, const GstTagList * tags)
{
GstBuffer *buf1, *buf2;
@ -175,7 +207,7 @@ gst_opus_header_create_caps (GstCaps ** caps, GSList ** headers, gint nchannels,
/* create header buffers */
buf1 =
gst_opus_enc_create_id_buffer (nchannels, sample_rate,
gst_opus_enc_create_id_buffer (nchannels, n_stereo_streams, sample_rate,
channel_mapping_family, channel_mapping);
buf2 = gst_opus_enc_create_metadata_buffer (tags);

View file

@ -28,7 +28,7 @@ G_BEGIN_DECLS
extern void gst_opus_header_create_caps_from_headers (GstCaps **caps, GSList **headers,
GstBuffer *id_header, GstBuffer *comment_header);
extern void gst_opus_header_create_caps (GstCaps **caps, GSList **headers,
gint nchannels, gint sample_rate,
gint nchannels, gint n_stereo_streams, gint sample_rate,
guint8 channel_mapping_family, const guint8 *channel_mapping,
const GstTagList *tags);
extern gboolean gst_opus_header_is_header (GstBuffer * buf,

View file

@ -314,7 +314,7 @@ gst_opus_parse_parse_frame (GstBaseParse * base, GstBaseParseFrame * frame)
channel_mapping_family = 0;
channel_mapping[0] = 0;
channel_mapping[1] = 1;
gst_opus_header_create_caps (&caps, &parse->headers, channels, 0,
gst_opus_header_create_caps (&caps, &parse->headers, channels, 1, 0,
channel_mapping_family, channel_mapping, NULL);
}

120
ext/opus/gstrtpopusdepay.c Normal file
View file

@ -0,0 +1,120 @@
/*
* Opus Depayloader Gst Element
*
* @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <string.h>
#include <stdlib.h>
#include <gst/rtp/gstrtpbuffer.h>
#include "gstrtpopusdepay.h"
GST_DEBUG_CATEGORY_STATIC (rtpopusdepay_debug);
#define GST_CAT_DEFAULT (rtpopusdepay_debug)
static GstStaticPadTemplate gst_rtp_opus_depay_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("application/x-rtp, "
"media = (string) \"audio\", "
"payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ","
"clock-rate = (int) 48000, "
"encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
);
static GstStaticPadTemplate gst_rtp_opus_depay_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-opus")
);
static GstBuffer *gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload,
GstBuffer * buf);
static gboolean gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload,
GstCaps * caps);
G_DEFINE_TYPE (GstRTPOpusDepay, gst_rtp_opus_depay,
GST_TYPE_RTP_BASE_DEPAYLOAD);
static void
gst_rtp_opus_depay_class_init (GstRTPOpusDepayClass * klass)
{
GstRTPBaseDepayloadClass *gstbasertpdepayload_class;
GstElementClass *element_class;
element_class = GST_ELEMENT_CLASS (klass);
gstbasertpdepayload_class = (GstRTPBaseDepayloadClass *) klass;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_opus_depay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_opus_depay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP Opus packet depayloader", "Codec/Depayloader/Network/RTP",
"Extracts Opus audio from RTP packets",
"Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
gstbasertpdepayload_class->process = gst_rtp_opus_depay_process;
gstbasertpdepayload_class->set_caps = gst_rtp_opus_depay_setcaps;
GST_DEBUG_CATEGORY_INIT (rtpopusdepay_debug, "rtpopusdepay", 0,
"Opus RTP Depayloader");
}
static void
gst_rtp_opus_depay_init (GstRTPOpusDepay * rtpopusdepay)
{
}
static gboolean
gst_rtp_opus_depay_setcaps (GstRTPBaseDepayload * depayload, GstCaps * caps)
{
GstCaps *srccaps;
gboolean ret;
srccaps = gst_caps_new_empty_simple ("audio/x-opus");
ret = gst_pad_set_caps (GST_RTP_BASE_DEPAYLOAD_SRCPAD (depayload), srccaps);
GST_DEBUG_OBJECT (depayload,
"set caps on source: %" GST_PTR_FORMAT " (ret=%d)", srccaps, ret);
gst_caps_unref (srccaps);
depayload->clock_rate = 48000;
return ret;
}
static GstBuffer *
gst_rtp_opus_depay_process (GstRTPBaseDepayload * depayload, GstBuffer * buf)
{
GstBuffer *outbuf;
GstRTPBuffer rtpbuf = { NULL, };
gst_rtp_buffer_map (buf, GST_MAP_READ, &rtpbuf);
outbuf = gst_rtp_buffer_get_payload_buffer (&rtpbuf);
gst_rtp_buffer_unmap (&rtpbuf);
return outbuf;
}

View file

@ -0,0 +1,57 @@
/*
* Opus Depayloader Gst Element
*
* @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_RTP_OPUS_DEPAY_H__
#define __GST_RTP_OPUS_DEPAY_H__
#include <gst/gst.h>
#include <gst/rtp/gstrtpbasedepayload.h>
G_BEGIN_DECLS typedef struct _GstRTPOpusDepay GstRTPOpusDepay;
typedef struct _GstRTPOpusDepayClass GstRTPOpusDepayClass;
#define GST_TYPE_RTP_OPUS_DEPAY \
(gst_rtp_opus_depay_get_type())
#define GST_RTP_OPUS_DEPAY(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepay))
#define GST_RTP_OPUS_DEPAY_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_DEPAY,GstRTPOpusDepayClass))
#define GST_IS_RTP_OPUS_DEPAY(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_DEPAY))
#define GST_IS_RTP_OPUS_DEPAY_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_DEPAY))
struct _GstRTPOpusDepay
{
GstRTPBaseDepayload depayload;
};
struct _GstRTPOpusDepayClass
{
GstRTPBaseDepayloadClass parent_class;
};
GType gst_rtp_opus_depay_get_type (void);
G_END_DECLS
#endif /* __GST_RTP_OPUS_DEPAY_H__ */

137
ext/opus/gstrtpopuspay.c Normal file
View file

@ -0,0 +1,137 @@
/*
* Opus Payloader Gst Element
*
* @author: Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include <string.h>
#include <gst/rtp/gstrtpbuffer.h>
#include "gstrtpopuspay.h"
GST_DEBUG_CATEGORY_STATIC (rtpopuspay_debug);
#define GST_CAT_DEFAULT (rtpopuspay_debug)
static GstStaticPadTemplate gst_rtp_opus_pay_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-opus, multistream = (boolean) FALSE")
);
static GstStaticPadTemplate gst_rtp_opus_pay_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("application/x-rtp, "
"media = (string) \"audio\", "
"payload = (int) " GST_RTP_PAYLOAD_DYNAMIC_STRING ", "
"clock-rate = (int) 48000, "
"encoding-name = (string) \"X-GST-OPUS-DRAFT-SPITTKA-00\"")
);
static gboolean gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload,
GstCaps * caps);
static GstFlowReturn gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload *
payload, GstBuffer * buffer);
G_DEFINE_TYPE (GstRtpOPUSPay, gst_rtp_opus_pay, GST_TYPE_RTP_BASE_PAYLOAD);
static void
gst_rtp_opus_pay_class_init (GstRtpOPUSPayClass * klass)
{
GstRTPBasePayloadClass *gstbasertppayload_class;
GstElementClass *element_class;
gstbasertppayload_class = (GstRTPBasePayloadClass *) klass;
element_class = GST_ELEMENT_CLASS (klass);
gstbasertppayload_class->set_caps = gst_rtp_opus_pay_setcaps;
gstbasertppayload_class->handle_buffer = gst_rtp_opus_pay_handle_buffer;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_opus_pay_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_rtp_opus_pay_sink_template));
gst_element_class_set_details_simple (element_class,
"RTP Opus payloader",
"Codec/Payloader/Network/RTP",
"Puts Opus audio in RTP packets",
"Danilo Cesar Lemes de Paula <danilo.cesar@collabora.co.uk>");
GST_DEBUG_CATEGORY_INIT (rtpopuspay_debug, "rtpopuspay", 0,
"Opus RTP Payloader");
}
static void
gst_rtp_opus_pay_init (GstRtpOPUSPay * rtpopuspay)
{
}
static gboolean
gst_rtp_opus_pay_setcaps (GstRTPBasePayload * payload, GstCaps * caps)
{
gboolean res;
gchar *capsstr;
capsstr = gst_caps_to_string (caps);
gst_rtp_base_payload_set_options (payload, "audio", FALSE,
"X-GST-OPUS-DRAFT-SPITTKA-00", 48000);
res =
gst_rtp_base_payload_set_outcaps (payload, "caps", G_TYPE_STRING, capsstr,
NULL);
g_free (capsstr);
return res;
}
static GstFlowReturn
gst_rtp_opus_pay_handle_buffer (GstRTPBasePayload * basepayload,
GstBuffer * buffer)
{
GstRTPBuffer rtpbuf = { NULL, };
GstBuffer *outbuf;
gsize size;
gpointer *data;
/* Copy data and timestamp to a new output buffer
* FIXME : Don't we have a convenience function for this ? */
data = gst_buffer_map (buffer, &size, NULL, GST_MAP_READ);
outbuf = gst_rtp_buffer_new_copy_data (data, size);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buffer);
/* Unmap and free input buffer */
gst_buffer_unmap (buffer, data, size);
gst_buffer_unref (buffer);
/* Remove marker from RTP buffer */
gst_rtp_buffer_map (outbuf, GST_MAP_WRITE, &rtpbuf);
gst_rtp_buffer_set_marker (&rtpbuf, FALSE);
gst_rtp_buffer_unmap (&rtpbuf);
/* Push out */
return gst_rtp_base_payload_push (basepayload, outbuf);
}

58
ext/opus/gstrtpopuspay.h Normal file
View file

@ -0,0 +1,58 @@
/*
* Opus Payloader Gst Element
*
* @author: Danilo Cesar Lemes de Paula <danilo.eu@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_RTP_OPUS_PAY_H__
#define __GST_RTP_OPUS_PAY_H__
#include <gst/gst.h>
#include <gst/rtp/gstrtpbasepayload.h>
G_BEGIN_DECLS
#define GST_TYPE_RTP_OPUS_PAY \
(gst_rtp_opus_pay_get_type())
#define GST_RTP_OPUS_PAY(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPay))
#define GST_RTP_OPUS_PAY_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_OPUS_PAY,GstRtpOPUSPayClass))
#define GST_IS_RTP_OPUS_PAY(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_OPUS_PAY))
#define GST_IS_RTP_OPUS_PAY_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_OPUS_PAY))
typedef struct _GstRtpOPUSPay GstRtpOPUSPay;
typedef struct _GstRtpOPUSPayClass GstRtpOPUSPayClass;
struct _GstRtpOPUSPay
{
GstRTPBasePayload payload;
};
struct _GstRtpOPUSPayClass
{
GstRTPBasePayloadClass parent_class;
};
GType gst_rtp_opus_pay_get_type (void);
G_END_DECLS
#endif /* __GST_RTP_OPUS_PAY_H__ */

View file

@ -277,8 +277,7 @@ rsn_dec_change_state (GstElement * element, GstStateChange transition)
new_child = gst_element_factory_make ("autoconvert", NULL);
decoder_factories = klass->get_decoder_factories (klass);
g_object_set (G_OBJECT (new_child), "initial-identity", TRUE,
"factories", decoder_factories, NULL);
g_object_set (G_OBJECT (new_child), "factories", decoder_factories, NULL);
if (new_child == NULL || !rsn_dec_set_child (self, new_child))
ret = GST_STATE_CHANGE_FAILURE;
break;

View file

@ -454,9 +454,6 @@ gst_voaacenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buf)
g_return_val_if_fail (voaacenc->handle, GST_FLOW_NOT_NEGOTIATED);
if (voaacenc->rate == 0 || voaacenc->channels == 0)
goto not_negotiated;
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buf == NULL)) {
GST_DEBUG_OBJECT (benc, "no data");
@ -508,13 +505,6 @@ exit:
return ret;
/* ERRORS */
not_negotiated:
{
GST_ELEMENT_ERROR (voaacenc, STREAM, TYPE_NOT_FOUND,
(NULL), ("unknown type"));
ret = GST_FLOW_NOT_NEGOTIATED;
goto exit;
}
encode_failed:
{
GST_ELEMENT_ERROR (voaacenc, STREAM, ENCODE, (NULL), ("encode failed"));

View file

@ -281,11 +281,6 @@ gst_voamrwbenc_handle_frame (GstAudioEncoder * benc, GstBuffer * buffer)
g_return_val_if_fail (amrwbenc->handle, GST_FLOW_NOT_NEGOTIATED);
if (amrwbenc->rate == 0 || amrwbenc->channels == 0) {
ret = GST_FLOW_NOT_NEGOTIATED;
goto done;
}
/* we don't deal with squeezing remnants, so simply discard those */
if (G_UNLIKELY (buffer == NULL)) {
GST_DEBUG_OBJECT (amrwbenc, "no data");

View file

@ -51,14 +51,37 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-xvid, "
GST_STATIC_CAPS ("video/mpeg, "
"mpegversion = (int) 4, "
"systemstream = (boolean) FALSE, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ]; "
"height = (int) [ 0, MAX ], "
"framerate = (fraction) [ 0/1, MAX ], "
"profile = (string) simple, "
"level = (string) { 0, 1, 2, 3, 4a, 5, 6 };"
"video/mpeg, "
"mpegversion = (int) 4, "
"systemstream = (boolean) FALSE, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ]")
"height = (int) [ 0, MAX ], "
"framerate = (fraction) [ 0/1, MAX ], "
"profile = (string) advanced-real-time-simple, "
"level = (string) { 1, 2, 3, 4 };"
"video/mpeg, "
"mpegversion = (int) 4, "
"systemstream = (boolean) FALSE, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], "
"framerate = (fraction) [ 0/1, MAX ], "
"profile = (string) advanced-simple, "
"level = (string) { 0, 1, 2, 3, 4 };"
"video/mpeg, "
"mpegversion = (int) 4, "
"systemstream = (boolean) FALSE, "
"width = (int) [ 0, MAX ], " "height = (int) [ 0, MAX ]; "
"video/x-xvid, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ];")
);
@ -106,6 +129,9 @@ gst_xvidenc_profile_get_type (void)
{XVID_PROFILE_S_L1, "S_L1", "Simple profile, L1"},
{XVID_PROFILE_S_L2, "S_L2", "Simple profile, L2"},
{XVID_PROFILE_S_L3, "S_L3", "Simple profile, L3"},
{XVID_PROFILE_S_L4a, "S_L4a", "Simple profile, L4a"},
{XVID_PROFILE_S_L5, "S_L5", "Simple profile, L5"},
{XVID_PROFILE_S_L6, "S_L6", "Simple profile, L6"},
{XVID_PROFILE_ARTS_L1, "ARTS_L1",
"Advanced real-time simple profile, L1"},
{XVID_PROFILE_ARTS_L2, "ARTS_L2",
@ -578,11 +604,97 @@ gst_xvidenc_setup (GstXvidEnc * xvidenc)
xvid_enc_create_t xenc;
xvid_enc_plugin_t xplugin[2];
gint ret;
GstCaps *allowed_caps;
gint profile = -1;
/* Negotiate profile/level with downstream */
allowed_caps = gst_pad_get_allowed_caps (xvidenc->srcpad);
if (allowed_caps && !gst_caps_is_empty (allowed_caps)) {
const gchar *profile_str, *level_str;
allowed_caps = gst_caps_make_writable (allowed_caps);
gst_caps_truncate (allowed_caps);
profile_str =
gst_structure_get_string (gst_caps_get_structure (allowed_caps, 0),
"profile");
level_str =
gst_structure_get_string (gst_caps_get_structure (allowed_caps, 0),
"level");
if (profile_str) {
if (g_str_equal (profile_str, "simple")) {
if (!level_str) {
profile = XVID_PROFILE_S_L0;
} else if (g_str_equal (level_str, "0")) {
profile = XVID_PROFILE_S_L0;
} else if (g_str_equal (level_str, "1")) {
profile = XVID_PROFILE_S_L1;
} else if (g_str_equal (level_str, "2")) {
profile = XVID_PROFILE_S_L2;
} else if (g_str_equal (level_str, "3")) {
profile = XVID_PROFILE_S_L3;
} else if (g_str_equal (level_str, "4a")) {
profile = XVID_PROFILE_S_L4a;
} else if (g_str_equal (level_str, "5")) {
profile = XVID_PROFILE_S_L5;
} else if (g_str_equal (level_str, "6")) {
profile = XVID_PROFILE_S_L6;
} else {
GST_ERROR_OBJECT (xvidenc,
"Invalid profile/level combination (%s %s)", profile_str,
level_str);
}
} else if (g_str_equal (profile_str, "advanced-real-time-simple")) {
if (!level_str) {
profile = XVID_PROFILE_ARTS_L1;
} else if (g_str_equal (level_str, "1")) {
profile = XVID_PROFILE_ARTS_L1;
} else if (g_str_equal (level_str, "2")) {
profile = XVID_PROFILE_ARTS_L2;
} else if (g_str_equal (level_str, "3")) {
profile = XVID_PROFILE_ARTS_L3;
} else if (g_str_equal (level_str, "4")) {
profile = XVID_PROFILE_ARTS_L4;
} else {
GST_ERROR_OBJECT (xvidenc,
"Invalid profile/level combination (%s %s)", profile_str,
level_str);
}
} else if (g_str_equal (profile_str, "advanced-simple")) {
if (!level_str) {
profile = XVID_PROFILE_AS_L0;
} else if (g_str_equal (level_str, "0")) {
profile = XVID_PROFILE_AS_L0;
} else if (g_str_equal (level_str, "1")) {
profile = XVID_PROFILE_AS_L1;
} else if (g_str_equal (level_str, "2")) {
profile = XVID_PROFILE_AS_L2;
} else if (g_str_equal (level_str, "3")) {
profile = XVID_PROFILE_AS_L3;
} else if (g_str_equal (level_str, "4")) {
profile = XVID_PROFILE_AS_L4;
} else {
GST_ERROR_OBJECT (xvidenc,
"Invalid profile/level combination (%s %s)", profile_str,
level_str);
}
} else {
GST_ERROR_OBJECT (xvidenc, "Invalid profile (%s)", profile_str);
}
}
}
if (allowed_caps)
gst_caps_unref (allowed_caps);
if (profile != -1) {
xvidenc->profile = profile;
g_object_notify (G_OBJECT (xvidenc), "profile");
}
/* see xvid.h for the meaning of all this. */
gst_xvid_init_struct (xenc);
xenc.profile = xvidenc->profile;
xenc.profile = xvidenc->used_profile = xvidenc->profile;
xenc.width = xvidenc->width;
xenc.height = xvidenc->height;
xenc.max_bframes = xvidenc->max_bframes;
@ -783,6 +895,78 @@ gst_xvidenc_setcaps (GstPad * pad, GstCaps * vscaps)
/* just to be sure */
gst_pad_fixate_caps (xvidenc->srcpad, new_caps);
if (xvidenc->used_profile != 0) {
switch (xvidenc->used_profile) {
case XVID_PROFILE_S_L0:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "0", NULL);
break;
case XVID_PROFILE_S_L1:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "1", NULL);
break;
case XVID_PROFILE_S_L2:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "2", NULL);
break;
case XVID_PROFILE_S_L3:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "3", NULL);
break;
case XVID_PROFILE_S_L4a:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "4a", NULL);
break;
case XVID_PROFILE_S_L5:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "5", NULL);
break;
case XVID_PROFILE_S_L6:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING, "simple",
"level", G_TYPE_STRING, "6", NULL);
break;
case XVID_PROFILE_ARTS_L1:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-real-time-simple", "level", G_TYPE_STRING, "1", NULL);
break;
case XVID_PROFILE_ARTS_L2:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-real-time-simple", "level", G_TYPE_STRING, "2", NULL);
break;
case XVID_PROFILE_ARTS_L3:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-real-time-simple", "level", G_TYPE_STRING, "3", NULL);
break;
case XVID_PROFILE_ARTS_L4:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-real-time-simple", "level", G_TYPE_STRING, "4", NULL);
break;
case XVID_PROFILE_AS_L0:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-simple", "level", G_TYPE_STRING, "0", NULL);
break;
case XVID_PROFILE_AS_L1:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-simple", "level", G_TYPE_STRING, "1", NULL);
break;
case XVID_PROFILE_AS_L2:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-simple", "level", G_TYPE_STRING, "2", NULL);
break;
case XVID_PROFILE_AS_L3:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-simple", "level", G_TYPE_STRING, "3", NULL);
break;
case XVID_PROFILE_AS_L4:
gst_caps_set_simple (new_caps, "profile", G_TYPE_STRING,
"advanced-simple", "level", G_TYPE_STRING, "4", NULL);
break;
default:
g_assert_not_reached ();
break;
}
}
/* src pad should accept anyway */
ret = gst_pad_set_caps (xvidenc->srcpad, new_caps);
gst_caps_unref (new_caps);

View file

@ -64,6 +64,7 @@ struct _GstXvidEnc {
/* encoding profile */
gint profile;
gint used_profile;
/* quantizer type; h263, MPEG */
gint quant_type;

View file

@ -20,8 +20,6 @@ libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_CFLAGS = \
-DGST_USE_UNSTABLE_API \
$(GST_CFLAGS)
libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LIBADD = \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_MAJORMINOR) -lgstinterfaces-$(GST_MAJORMINOR) \
-lgstapp-$(GST_MAJORMINOR) $(GST_BASE_LIBS) $(GST_LIBS)
libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_ALL_LDFLAGS) $(GST_LT_LDFLAGS)
@ -35,7 +33,6 @@ Android.mk: Makefile.am
-:LDFLAGS $(libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LDFLAGS) \
$(libgstbasecamerabinsrc_@GST_MAJORMINOR@_la_LIBADD) \
-ldl \
-:LIBFILTER_STATIC gstphotography-@GST_MAJORMINOR@ \
-:HEADER_TARGET gstreamer-@GST_MAJORMINOR@/gst/basecamerabinsrc \
-:HEADERS $(libgstbasecamerabinsrcinclude_HEADERS) \
-:PASSTHROUGH LOCAL_ARM_MODE:=arm \

View file

@ -111,57 +111,6 @@ GST_STATIC_PAD_TEMPLATE (GST_BASE_CAMERA_SRC_VIDEO_PAD_NAME,
* be implementing the interface..
*/
/**
* gst_base_camera_src_get_photography:
* @self: the camerasrc bin
*
* Get object implementing photography interface, if there is one. Otherwise
* returns NULL.
*/
GstPhotography *
gst_base_camera_src_get_photography (GstBaseCameraSrc * self)
{
GstElement *elem;
if (GST_IS_PHOTOGRAPHY (self)) {
elem = GST_ELEMENT (self);
} else {
elem = gst_bin_get_by_interface (GST_BIN (self), GST_TYPE_PHOTOGRAPHY);
}
if (elem) {
return GST_PHOTOGRAPHY (elem);
}
return NULL;
}
/**
* gst_base_camera_src_get_colorbalance:
* @self: the camerasrc bin
*
* Get object implementing colorbalance interface, if there is one. Otherwise
* returns NULL.
*/
GstColorBalance *
gst_base_camera_src_get_color_balance (GstBaseCameraSrc * self)
{
GstElement *elem;
if (GST_IS_COLOR_BALANCE (self)) {
elem = GST_ELEMENT (self);
} else {
elem = gst_bin_get_by_interface (GST_BIN (self), GST_TYPE_COLOR_BALANCE);
}
if (elem) {
return GST_COLOR_BALANCE (self);
}
return NULL;
}
/**
* gst_base_camera_src_set_mode:
* @self: the camerasrc bin

View file

@ -1,6 +1,7 @@
/*
* GStreamer
* Copyright (C) 2010 Texas Instruments, Inc
* Copyright (C) 2011 Thiago Santos <thiago.sousa.santos@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@ -29,8 +30,6 @@
#include <gst/gst.h>
#include <gst/gstbin.h>
#include <gst/interfaces/photography.h>
#include <gst/video/colorbalance.h>
#include "gstcamerabin-enum.h"
#include "gstcamerabinpreview.h"
@ -131,9 +130,6 @@ struct _GstBaseCameraSrcClass
#define MAX_ZOOM 10.0f
#define ZOOM_1X MIN_ZOOM
GstPhotography * gst_base_camera_src_get_photography (GstBaseCameraSrc *self);
GstColorBalance * gst_base_camera_src_get_color_balance (GstBaseCameraSrc *self);
gboolean gst_base_camera_src_set_mode (GstBaseCameraSrc *self, GstCameraBinMode mode);
void gst_base_camera_src_setup_zoom (GstBaseCameraSrc * self);
void gst_base_camera_src_setup_preview (GstBaseCameraSrc * self, GstCaps * preview_caps);

View file

@ -1,13 +1,16 @@
lib_LTLIBRARIES = libgstcodecparsers-@GST_MAJORMINOR@.la
libgstcodecparsers_@GST_MAJORMINOR@_la_SOURCES = \
gstmpegvideoparser.c gsth264parser.c gstvc1parser.c
gstmpegvideoparser.c gsth264parser.c gstvc1parser.c gstmpeg4parser.c \
parserutils.c
libgstcodecparsers_@GST_MAJORMINOR@includedir = \
$(includedir)/gstreamer-@GST_MAJORMINOR@/gst/codecparsers
noinst_HEADERS = parserutils.h
libgstcodecparsers_@GST_MAJORMINOR@include_HEADERS = \
gstmpegvideoparser.h gsth264parser.h gstvc1parser.h
gstmpegvideoparser.h gsth264parser.h gstvc1parser.h gstmpeg4parser.h
libgstcodecparsers_@GST_MAJORMINOR@_la_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) \

View file

@ -94,31 +94,31 @@ GST_DEBUG_CATEGORY (h264_parser_debug);
#define GST_CAT_DEFAULT h264_parser_debug
/**** Default scaling_lists according to Table 7-2 *****/
const guint8 default_4x4_intra[16] = {
static const guint8 default_4x4_intra[16] = {
6, 13, 13, 20, 20, 20, 28, 28, 28, 28, 32, 32,
32, 37, 37, 42
};
const guint8 default_4x4_inter[16] = {
static const guint8 default_4x4_inter[16] = {
10, 14, 14, 20, 20, 20, 24, 24, 24, 24, 27, 27,
27, 30, 30, 34
};
const guint8 default_8x8_intra[64] = {
static const guint8 default_8x8_intra[64] = {
6, 10, 10, 13, 11, 13, 16, 16, 16, 16, 18, 18,
18, 18, 18, 23, 23, 23, 23, 23, 23, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27,
27, 27, 27, 27, 27, 29, 29, 29, 29, 29, 29, 29, 31, 31, 31, 31, 31, 31, 33,
33, 33, 33, 33, 36, 36, 36, 36, 38, 38, 38, 40, 40, 42
};
const guint8 default_8x8_inter[64] = {
static const guint8 default_8x8_inter[64] = {
9, 13, 13, 15, 13, 15, 17, 17, 17, 17, 19, 19,
19, 19, 19, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 22, 24, 24, 24,
24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 25, 27, 27, 27, 27, 27, 27, 28,
28, 28, 28, 28, 30, 30, 30, 30, 32, 32, 32, 33, 33, 35
};
const guint8 zigzag_8x8[64] = {
static const guint8 zigzag_8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,
@ -129,7 +129,7 @@ const guint8 zigzag_8x8[64] = {
53, 60, 61, 54, 47, 55, 62, 63
};
const guint8 zigzag_4x4[16] = {
static const guint8 zigzag_4x4[16] = {
0, 1, 4, 8,
5, 2, 3, 6,
9, 12, 13, 10,
@ -743,58 +743,59 @@ error:
}
static gboolean
slice_parse_ref_pic_list_reordering (GstH264SliceHdr * slice, NalReader * nr)
slice_parse_ref_pic_list_modification_1 (GstH264SliceHdr * slice,
NalReader * nr, guint list)
{
GST_DEBUG ("parsing \"Reference picture list reordering\"");
GstH264RefPicListModification *entries;
guint8 *ref_pic_list_modification_flag;
guint32 modification_of_pic_nums_idc;
guint i = 0;
if (!GST_H264_IS_I_SLICE (slice) && !GST_H264_IS_SI_SLICE (slice)) {
guint8 ref_pic_list_reordering_flag_l0;
guint32 reordering_of_pic_nums_idc;
READ_UINT8 (nr, ref_pic_list_reordering_flag_l0, 1);
if (ref_pic_list_reordering_flag_l0)
do {
READ_UE (nr, reordering_of_pic_nums_idc);
if (reordering_of_pic_nums_idc == 0 || reordering_of_pic_nums_idc == 1) {
guint32 abs_diff_pic_num_minus1 G_GNUC_UNUSED;
READ_UE_ALLOWED (nr, abs_diff_pic_num_minus1, 0,
slice->max_pic_num - 1);
} else if (reordering_of_pic_nums_idc == 2) {
guint32 long_term_pic_num;
READ_UE (nr, long_term_pic_num);
}
} while (reordering_of_pic_nums_idc != 3);
if (list == 0) {
entries = slice->ref_pic_list_modification_l0;
ref_pic_list_modification_flag = &slice->ref_pic_list_modification_flag_l0;
} else {
entries = slice->ref_pic_list_modification_l1;
ref_pic_list_modification_flag = &slice->ref_pic_list_modification_flag_l1;
}
if (GST_H264_IS_B_SLICE (slice)) {
guint8 ref_pic_list_reordering_flag_l1;
guint32 reordering_of_pic_nums_idc;
READ_UINT8 (nr, ref_pic_list_reordering_flag_l1, 1);
if (ref_pic_list_reordering_flag_l1)
do {
READ_UE (nr, reordering_of_pic_nums_idc);
if (reordering_of_pic_nums_idc == 0 || reordering_of_pic_nums_idc == 1) {
guint32 abs_diff_num_minus1;
READ_UE (nr, abs_diff_num_minus1);
} else if (reordering_of_pic_nums_idc == 2) {
guint32 long_term_pic_num;
READ_UE (nr, long_term_pic_num);
}
} while (reordering_of_pic_nums_idc != 3);
READ_UINT8 (nr, *ref_pic_list_modification_flag, 1);
if (*ref_pic_list_modification_flag) {
do {
READ_UE (nr, modification_of_pic_nums_idc);
if (modification_of_pic_nums_idc == 0 ||
modification_of_pic_nums_idc == 1) {
READ_UE_ALLOWED (nr, entries[i].value.abs_diff_pic_num_minus1, 0,
slice->max_pic_num - 1);
} else if (modification_of_pic_nums_idc == 2) {
READ_UE (nr, entries[i].value.long_term_pic_num);
}
} while (modification_of_pic_nums_idc != 3);
}
return TRUE;
error:
GST_WARNING ("error parsing \"Reference picture list reordering\"");
GST_WARNING ("error parsing \"Reference picture list %u modification\"",
list);
return FALSE;
}
static gboolean
slice_parse_ref_pic_list_modification (GstH264SliceHdr * slice, NalReader * nr)
{
if (!GST_H264_IS_I_SLICE (slice) && !GST_H264_IS_SI_SLICE (slice)) {
if (!slice_parse_ref_pic_list_modification_1 (slice, nr, 0))
return FALSE;
}
if (GST_H264_IS_B_SLICE (slice)) {
if (!slice_parse_ref_pic_list_modification_1 (slice, nr, 1))
return FALSE;
}
return TRUE;
}
static gboolean
gst_h264_slice_parse_dec_ref_pic_marking (GstH264SliceHdr * slice,
GstH264NalUnit * nalu, NalReader * nr)
@ -1090,6 +1091,7 @@ gst_h264_parser_parse_pic_timing (GstH264NalParser * nalparser,
guint8 num_clock_num_ts;
guint i;
tim->pic_struct_present_flag = TRUE;
READ_UINT8 (nr, tim->pic_struct, 4);
CHECK_ALLOWED ((gint8) tim->pic_struct, 0, 8);
@ -1149,22 +1151,28 @@ gst_h264_nal_parser_free (GstH264NalParser * nalparser)
}
/**
* gst_h264_parser_identify_nalu:
* gst_h264_parser_identify_nalu_unchecked:
* @nalparser: a #GstH264NalParser
* @data: The data to parse
* @offset: the offset from which to parse @data
* @size: the size of @data
* @nalu: The #GstH264NalUnit where to store parsed nal headers
*
* Parses @data and fills @nalu from the next nalu data from @data
* Parses @data and fills @nalu from the next nalu data from @data.
*
* This differs from @gst_h264_parser_identify_nalu in that it doesn't
* check whether the packet is complete or not.
*
* Note: Only use this function if you already know the provided @data
* is a complete NALU, else use @gst_h264_parser_identify_nalu.
*
* Returns: a #GstH264ParserResult
*/
GstH264ParserResult
gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
gst_h264_parser_identify_nalu_unchecked (GstH264NalParser * nalparser,
const guint8 * data, guint offset, gsize size, GstH264NalUnit * nalu)
{
gint off1, off2;
gint off1;
if (size < offset + 4) {
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSIZE_FORMAT
@ -1187,12 +1195,14 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
nalu->valid = TRUE;
nalu->sc_offset = offset + off1;
/* sc might have 2 or 3 0-bytes */
if (nalu->sc_offset > 0 && data[nalu->sc_offset - 1] == 00)
nalu->sc_offset--;
nalu->offset = offset + off1 + 3;
nalu->data = (guint8 *) data;
set_nalu_datas (nalu);
if (nalu->type == GST_H264_NAL_SEQ_END ||
@ -1202,6 +1212,37 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
return GST_H264_PARSER_OK;
}
nalu->size = size - nalu->offset;
return GST_H264_PARSER_OK;
}
/**
* gst_h264_parser_identify_nalu:
* @nalparser: a #GstH264NalParser
* @data: The data to parse
* @offset: the offset from which to parse @data
* @size: the size of @data
* @nalu: The #GstH264NalUnit where to store parsed nal headers
*
* Parses @data and fills @nalu from the next nalu data from @data
*
* Returns: a #GstH264ParserResult
*/
GstH264ParserResult
gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
const guint8 * data, guint offset, gsize size, GstH264NalUnit * nalu)
{
GstH264ParserResult res;
gint off2;
res =
gst_h264_parser_identify_nalu_unchecked (nalparser, data, offset, size,
nalu);
if (res != GST_H264_PARSER_OK || nalu->size == 0)
goto beach;
off2 = scan_for_start_codes (data + nalu->offset, size - nalu->offset);
if (off2 < 0) {
GST_DEBUG ("Nal start %d, No end found", nalu->offset);
@ -1217,9 +1258,12 @@ gst_h264_parser_identify_nalu (GstH264NalParser * nalparser,
return GST_H264_PARSER_BROKEN_DATA;
GST_DEBUG ("Complete nal found. Off: %d, Size: %d", nalu->offset, nalu->size);
return GST_H264_PARSER_OK;
beach:
return res;
}
/**
* gst_h264_parser_identify_nalu_avc:
* @nalparser: a #GstH264NalParser
@ -1781,7 +1825,7 @@ gst_h264_parser_parse_slice_hdr (GstH264NalParser * nalparser,
}
}
if (!slice_parse_ref_pic_list_reordering (slice, &nr))
if (!slice_parse_ref_pic_list_modification (slice, &nr))
goto error;
if ((pps->weighted_pred_flag && (GST_H264_IS_P_SLICE (slice)
@ -1864,6 +1908,9 @@ gst_h264_parser_parse_sei (GstH264NalParser * nalparser, GstH264NalUnit * nalu,
nal_reader_init (&nr, nalu->data + nalu->offset + 1, nalu->size - 1);
/* init */
memset (sei, 0, sizeof (*sei));
sei->payloadType = 0;
do {
READ_UINT8 (&nr, payload_type_byte, 8);

View file

@ -168,24 +168,25 @@ typedef enum
GST_H264_S_SI_SLICE = 9
} GstH264SliceType;
typedef struct _GstH264NalParser GstH264NalParser;
typedef struct _GstH264NalParser GstH264NalParser;
typedef struct _GstH264NalUnit GstH264NalUnit;
typedef struct _GstH264NalUnit GstH264NalUnit;
typedef struct _GstH264SPS GstH264SPS;
typedef struct _GstH264PPS GstH264PPS;
typedef struct _GstH264HRDParams GstH264HRDParams;
typedef struct _GstH264VUIParams GstH264VUIParams;
typedef struct _GstH264SPS GstH264SPS;
typedef struct _GstH264PPS GstH264PPS;
typedef struct _GstH264HRDParams GstH264HRDParams;
typedef struct _GstH264VUIParams GstH264VUIParams;
typedef struct _GstH264DecRefPicMarking GstH264DecRefPicMarking;
typedef struct _GstH264RefPicMarking GstH264RefPicMarking;
typedef struct _GstH264PredWeightTable GstH264PredWeightTable;
typedef struct _GstH264SliceHdr GstH264SliceHdr;
typedef struct _GstH264RefPicListModification GstH264RefPicListModification;
typedef struct _GstH264DecRefPicMarking GstH264DecRefPicMarking;
typedef struct _GstH264RefPicMarking GstH264RefPicMarking;
typedef struct _GstH264PredWeightTable GstH264PredWeightTable;
typedef struct _GstH264SliceHdr GstH264SliceHdr;
typedef struct _GstH264ClockTimestamp GstH264ClockTimestamp;
typedef struct _GstH264PicTiming GstH264PicTiming;
typedef struct _GstH264BufferingPeriod GstH264BufferingPeriod;
typedef struct _GstH264SEIMessage GstH264SEIMessage;
typedef struct _GstH264ClockTimestamp GstH264ClockTimestamp;
typedef struct _GstH264PicTiming GstH264PicTiming;
typedef struct _GstH264BufferingPeriod GstH264BufferingPeriod;
typedef struct _GstH264SEIMessage GstH264SEIMessage;
/**
* GstH264NalUnit:
@ -485,6 +486,18 @@ struct _GstH264PPS
gboolean valid;
};
struct _GstH264RefPicListModification
{
guint8 modification_of_pic_nums_idc;
union
{
/* if modification_of_pic_nums_idc == 0 || 1 */
guint32 abs_diff_pic_num_minus1;
/* if modification_of_pic_nums_idc == 2 */
guint32 long_term_pic_num;
} value;
};
struct _GstH264PredWeightTable
{
guint8 luma_log2_weight_denom;
@ -559,6 +572,11 @@ struct _GstH264SliceHdr
guint8 num_ref_idx_l0_active_minus1;
guint8 num_ref_idx_l1_active_minus1;
guint8 ref_pic_list_modification_flag_l0;
GstH264RefPicListModification ref_pic_list_modification_l0[32];
guint8 ref_pic_list_modification_flag_l1;
GstH264RefPicListModification ref_pic_list_modification_l1[32];
GstH264PredWeightTable pred_weight_table;
/* if nal_unit.ref_idc != 0 */
GstH264DecRefPicMarking dec_ref_pic_marking;
@ -660,6 +678,10 @@ GstH264ParserResult gst_h264_parser_identify_nalu (GstH264NalParser *nalpars
const guint8 *data, guint offset,
gsize size, GstH264NalUnit *nalu);
GstH264ParserResult gst_h264_parser_identify_nalu_unchecked (GstH264NalParser *nalparser,
const guint8 *data, guint offset,
gsize size, GstH264NalUnit *nalu);
GstH264ParserResult gst_h264_parser_identify_nalu_avc (GstH264NalParser *nalparser, const guint8 *data,
guint offset, gsize size, guint8 nal_length_size,
GstH264NalUnit *nalu);

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,578 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_MPEG4UTIL_H__
#define __GST_MPEG4UTIL_H__
#include <gst/gst.h>
#include <gst/base/gstbitreader.h>
typedef struct _GstMpeg4VisualObjectSequence GstMpeg4VisualObjectSequence;
typedef struct _GstMpeg4VisualObject GstMpeg4VisualObject;
typedef struct _GstMpeg4VideoObjectLayer GstMpeg4VideoObjectLayer;
typedef struct _GstMpeg4GroupOfVOP GstMpeg4GroupOfVOP;
typedef struct _GstMpeg4VideoObjectPlane GstMpeg4VideoObjectPlane;
typedef struct _GstMpeg4VideoSignalType GstMpeg4VideoSignalType;
typedef struct _GstMpeg4VideoPlaneShortHdr GstMpeg4VideoPlaneShortHdr;
typedef struct _GstMpeg4VideoPacketHdr GstMpeg4VideoPacketHdr;
typedef struct _GstMpeg4SpriteTrajectory GstMpeg4SpriteTrajectory;
typedef struct _GstMpeg4Packet GstMpeg4Packet;
/**
* GstMpeg4StartCode:
*
* Defines the different startcodes present in the bitstream as
* defined in: Table 6-3 Start code values
*/
typedef enum
{
GST_MPEG4_VIDEO_OBJ_FIRST = 0x00,
GST_MPEG4_VIDEO_OBJ_LAST = 0x1f,
GST_MPEG4_VIDEO_LAYER_FIRST = 0x20,
GST_MPEG4_VIDEO_LAYER_LAST = 0x2f,
GST_MPEG4_VISUAL_OBJ_SEQ_START = 0xb0,
GST_MPEG4_VISUAL_OBJ_SEQ_END = 0xb1,
GST_MPEG4_USER_DATA = 0xb2,
GST_MPEG4_GROUP_OF_VOP = 0xb3,
GST_MPEG4_VIDEO_SESSION_ERR = 0xb4,
GST_MPEG4_VISUAL_OBJ = 0xb5,
GST_MPEG4_VIDEO_OBJ_PLANE = 0xb6,
GST_MPEG4_FBA = 0xba,
GST_MPEG4_FBA_PLAN = 0xbb,
GST_MPEG4_MESH = 0xbc,
GST_MPEG4_MESH_PLAN = 0xbd,
GST_MPEG4_STILL_TEXTURE_OBJ = 0xbe,
GST_MPEG4_TEXTURE_SPATIAL = 0xbf,
GST_MPEG4_TEXTURE_SNR_LAYER = 0xc0,
GST_MPEG4_TEXTURE_TILE = 0xc1,
GST_MPEG4_SHAPE_LAYER = 0xc2,
GST_MPEG4_STUFFING = 0xc3,
GST_MPEG4_SYSTEM_FIRST = 0xc6,
GST_MPEG4_SYSTEM_LAST = 0xff,
GST_MPEG4_RESYNC = 0xfff
} GstMpeg4StartCode;
/**
* GstMpeg4VisualObjectType:
*
* Defines the different visual object types as
* defined in: Table 6-5 -- Meaning of visual object type
*/
typedef enum {
GST_MPEG4_VIDEO_ID = 0x01,
GST_MPEG4_STILL_TEXTURE_ID = 0x02,
GST_MPEG4_STILL_MESH_ID = 0x03,
GST_MPEG4_STILL_FBA_ID = 0x04,
GST_MPEG4_STILL_3D_MESH_ID = 0x05,
/*... reserved */
} GstMpeg4VisualObjectType;
/**
* GstMpeg4AspectRatioInfo:
* @GST_MPEG4_SQUARE: 1:1 square
* @GST_MPEG4_625_TYPE_4_3: 12:11 (625-type for 4:3 picture)
* @GST_MPEG4_525_TYPE_4_3: 10:11 (525-type for 4:3 picture)
* @GST_MPEG4_625_TYPE_16_9: 16:11 (625-type stretched for 16:9 picture)
* @GST_MPEG4_525_TYPE_16_9: 40:33 (525-type stretched for 16:9 picture)
* @GST_MPEG4_EXTENDED_PAR: Extended par
*
* Defines the different pixel aspect ratios as
* defined in: Table 6-12 -- Meaning of pixel aspect ratio
*/
typedef enum {
GST_MPEG4_SQUARE = 0x01,
GST_MPEG4_625_TYPE_4_3 = 0x02,
GST_MPEG4_525_TYPE_4_3 = 0x03,
GST_MPEG4_625_TYPE_16_9 = 0x04,
GST_MPEG4_525_TYPE_16_9 = 0x05,
GST_MPEG4_EXTENDED_PAR = 0x0f,
} GstMpeg4AspectRatioInfo;
/**
* GstMpeg4ParseResult:
* @GST_MPEG4_PARSER_OK: The parsing went well
* @GST_MPEG4_PARSER_BROKEN_DATA: The bitstream was broken
* @GST_MPEG4_PARSER_NO_PACKET: There was no packet in the buffer
* @GST_MPEG4_PARSER_NO_PACKET_END: There was no packet end in the buffer
* @GST_MPEG4_PARSER_NO_PACKET_ERROR: An error accured durint the parsing
*
* Result type of any parsing function.
*/
typedef enum {
GST_MPEG4_PARSER_OK,
GST_MPEG4_PARSER_BROKEN_DATA,
GST_MPEG4_PARSER_NO_PACKET,
GST_MPEG4_PARSER_NO_PACKET_END,
GST_MPEG4_PARSER_ERROR,
} GstMpeg4ParseResult;
/**
* GstMpeg4VideoObjectCodingType:
* @GST_MPEG4_I_VOP: intra-coded (I)
* @GST_MPEG4_P_VOP: predictive-coded (P)
* @GST_MPEG4_B_VOP: bidirectionally-predictive-coded (B)
* @GST_MPEG4_S_VOP: sprite (S)
*
* The vop coding types as defined in:
* Table 6-20 -- Meaning of vop_coding_type
*/
typedef enum {
GST_MPEG4_I_VOP = 0x0,
GST_MPEG4_P_VOP = 0x1,
GST_MPEG4_B_VOP = 0x2,
GST_MPEG4_S_VOP = 0x3
} GstMpeg4VideoObjectCodingType;
/**
* GstMpeg4ChromaFormat
*
* The chroma format in use as
* defined in: Table 6-13 -- Meaning of chroma_format
*/
typedef enum {
/* Other value are reserved */
GST_MPEG4_CHROMA_4_2_0 = 0x01
} GstMpeg4ChromaFormat;
/**
* GstMpeg4VideoObjectLayerShape:
*
* The different video object layer shapes as defined in:
* Table 6-16 Video Object Layer shape type
*/
typedef enum {
GST_MPEG4_RECTANGULAR,
GST_MPEG4_BINARY,
GST_MPEG4_BINARY_ONLY,
GST_MPEG4_GRAYSCALE
} GstMpeg4VideoObjectLayerShape;
/**
* GstMpeg4SpriteEnable:
*
* Indicates the usage of static sprite coding
* or global motion compensation (GMC) as defined in:
* Table V2 - 2 -- Meaning of sprite_enable codewords
*/
typedef enum {
GST_MPEG4_SPRITE_UNUSED,
GST_MPEG4_SPRITE_STATIC,
GST_MPEG4_SPRITE_GMG
} GstMpeg4SpriteEnable;
/**
* GstMpeg4Profile:
*
* Different defined profiles as defined in:
* 9- Profiles and levels
*
* It is computed using:
* Table G.1 FLC table for profile_and_level_indication
*/
typedef enum {
GST_MPEG4_PROFILE_CORE,
GST_MPEG4_PROFILE_MAIN,
GST_MPEG4_PROFILE_N_BIT,
GST_MPEG4_PROFILE_SIMPLE,
GST_MPEG4_PROFILE_HYBRID,
GST_MPEG4_PROFILE_RESERVED,
GST_MPEG4_PROFILE_SIMPLE_FBA,
GST_MPEG4_PROFILE_CORE_STUDIO,
GST_MPEG4_PROFILE_SIMPLE_STUDIO,
GST_MPEG4_PROFILE_CORE_SCALABLE,
GST_MPEG4_PROFILE_ADVANCED_CORE,
GST_MPEG4_PROFILE_ADVANCED_SIMPLE,
GST_MPEG4_PROFILE_SIMPLE_SCALABLE,
GST_MPEG4_PROFILE_SCALABLE_TEXTURE,
GST_MPEG4_PROFILE_SIMPLE_FACE_ANIMATION,
GST_MPEG4_PROFILE_BASIC_ANIMATED_TEXTURE,
GST_MPEG4_PROFILE_ADVANCED_REALTIME_SIMPLE,
GST_MPEG4_PROFILE_ADVANCED_SCALABLE_TEXTURE,
GST_MPEG4_PROFILE_FINE_GRANULARITY_SCALABLE,
GST_MPEG4_PROFILE_ADVANCED_CODING_EFFICIENCY
} GstMpeg4Profile;
/**
* GstMpeg4Level:
*
* Different levels as defined in:
* 9- Profiles and levels
*
* It is computed using:
* Table G.1 FLC table for profile_and_level_indication
*/
typedef enum {
GST_MPEG4_LEVEL0,
GST_MPEG4_LEVEL1,
GST_MPEG4_LEVEL2,
GST_MPEG4_LEVEL3,
GST_MPEG4_LEVEL3b,
GST_MPEG4_LEVEL4,
GST_MPEG4_LEVEL5,
GST_MPEG4_LEVEL_RESERVED
} GstMpeg4Level;
/**
* GstMpeg4VisualObjectSequence:
*
* The visual object sequence structure as defined in:
* 6.2.2 Visual Object Sequence and Visual Object
*/
struct _GstMpeg4VisualObjectSequence {
guint8 profile_and_level_indication;
/* Computed according to:
* Table G.1 FLC table for profile_and_level_indication */
GstMpeg4Level level;
GstMpeg4Profile profile;
};
/**
* The visual object structure as defined in:
* 6.2.2 Visual Object Sequence and Visual Object
*/
struct _GstMpeg4VisualObject {
guint8 is_identifier;
/* If is_identifier */
guint8 verid;
guint8 priority;
GstMpeg4VisualObjectType type;
};
/**
* GstMpeg4VideoSignalType:
*
* The video signal type structure as defined in:
* 6.2.2 Visual Object Sequence and Visual Object.
*/
struct _GstMpeg4VideoSignalType {
guint8 type;
guint8 format;
guint8 range;
guint8 color_description;
guint8 color_primaries;
guint8 transfer_characteristics;
guint8 matrix_coefficients;
};
/**
* GstMpeg4VideoPlaneShortHdr:
*
* The video plane short header structure as defined in:
* 6.2.5.2 Video Plane with Short Header
*/
struct _GstMpeg4VideoPlaneShortHdr {
guint8 temporal_reference;
guint8 split_screen_indicator;
guint8 document_camera_indicator;
guint8 full_picture_freeze_release;
guint8 source_format;
guint8 picture_coding_type;
guint8 vop_quant;
guint8 pei;
guint8 psupp;
/* Gob layer specific fields */
guint8 gob_header_empty;
guint8 gob_number;
guint8 gob_frame_id;
guint8 quant_scale;
/* Computed
* If all the values are set to 0, then it is reserved
* Table 6-25 -- Parameters Defined by source_format Field
*/
guint16 vop_width;
guint16 vop_height;
guint16 num_macroblocks_in_gob;
guint8 num_gobs_in_vop;
/* The size in bits */
guint size;
};
/**
* GstMpeg4VideoObjectLayer:
*
* The video object layer structure as defined in:
* 6.2.3 Video Object Layer
*/
struct _GstMpeg4VideoObjectLayer {
guint8 random_accessible_vol;
guint8 video_object_type_indication;
guint8 is_object_layer_identifier;
/* if is_object_layer_identifier */
guint8 verid;
guint8 priority;
GstMpeg4AspectRatioInfo aspect_ratio_info;
guint8 par_width;
guint8 par_height;
guint8 control_parameters;
/* if control_parameters */
GstMpeg4ChromaFormat chroma_format;
guint8 low_delay;
guint8 vbv_parameters;
/* if vbv_parameters */
guint16 first_half_bitrate;
guint16 latter_half_bitrate;
guint16 first_half_vbv_buffer_size;
guint16 latter_half_vbv_buffer_size;
guint16 first_half_vbv_occupancy;
guint16 latter_half_vbv_occupancy;
/* Computed values */
guint32 bit_rate;
guint32 vbv_buffer_size;
GstMpeg4VideoObjectLayerShape shape;
/* if shape == GST_MPEG4_GRAYSCALE && verid =! 1 */
guint8 shape_extension;
guint16 vop_time_increment_resolution;
guint8 vop_time_increment_bits;
guint8 fixed_vop_rate;
/* if fixed_vop_rate */
guint16 fixed_vop_time_increment;
guint16 width;
guint16 height;
guint8 interlaced;
guint8 obmc_disable;
GstMpeg4SpriteEnable sprite_enable;
/* if vol->sprite_enable == SPRITE_GMG or SPRITE_STATIC*/
/* if vol->sprite_enable != GST_MPEG4_SPRITE_GMG */
guint16 sprite_width;
guint16 sprite_height;
guint16 sprite_left_coordinate;
guint16 sprite_top_coordinate;
guint8 no_of_sprite_warping_points;
guint8 sprite_warping_accuracy;
guint8 sprite_brightness_change;
/* if vol->sprite_enable != GST_MPEG4_SPRITE_GMG */
guint8 low_latency_sprite_enable;
/* if shape != GST_MPEG4_RECTANGULAR */
guint8 sadct_disable;
guint8 not_8_bit;
/* if no_8_bit */
guint8 quant_precision;
guint8 bits_per_pixel;
/* if shape == GRAYSCALE */
guint8 no_gray_quant_update;
guint8 composition_method;
guint8 linear_composition;
guint8 quant_type;
/* if quant_type */
guint8 load_intra_quant_mat;
guint8 intra_quant_mat[64];
guint8 load_non_intra_quant_mat;
guint8 non_intra_quant_mat[64];
guint8 quarter_sample;
guint8 complexity_estimation_disable;
guint8 resync_marker_disable;
guint8 data_partitioned;
guint8 reversible_vlc;
guint8 newpred_enable;
guint8 reduced_resolution_vop_enable;
guint8 scalability;
guint8 enhancement_type;
GstMpeg4VideoPlaneShortHdr short_hdr;
};
/**
* GstMpeg4SpriteTrajectory:
*
* The sprite trajectory structure as defined in:
* 7.8.4 Sprite reference point decoding and
* 6.2.5.4 Sprite coding
*/
struct _GstMpeg4SpriteTrajectory {
guint16 vop_ref_points[63]; /* Defined as "du" in 6.2.5.4 */
guint16 sprite_ref_points[63]; /* Defined as "dv" in 6.2.5.4 */
};
/**
* GstMpeg4GroupOfVOP:
*
* The group of video object plane structure as defined in:
* 6.2.4 Group of Video Object Plane
*/
struct _GstMpeg4GroupOfVOP {
guint8 hours;
guint8 minutes;
guint8 seconds;
guint8 closed;
guint8 broken_link;
};
/**
* GstMpeg4VideoObjectPlane:
*
* The Video object plane structure as defined in:
* 6.2.5 Video Object Plane and Video Plane with Short Header
*/
struct _GstMpeg4VideoObjectPlane {
GstMpeg4VideoObjectCodingType coding_type;
guint8 modulo_time_base;
guint16 time_increment;
guint8 coded;
/* if newpred_enable */
guint16 id;
guint8 id_for_prediction_indication;
guint16 id_for_prediction;
guint16 width;
guint16 height;
guint16 horizontal_mc_spatial_ref;
guint16 vertical_mc_spatial_ref;
guint8 rounding_type;
/*if vol->shape != GST_MPEG4_RECTANGULAR */
guint8 background_composition;
guint8 change_conv_ratio_disable;
guint8 constant_alpha;
guint8 constant_alpha_value;
guint8 reduced_resolution;
guint8 intra_dc_vlc_thr;
guint8 top_field_first;
guint8 alternate_vertical_scan_flag;
guint16 quant;
guint8 fcode_forward;
guint8 fcode_backward;
guint8 shape_coding_type;
guint8 load_backward_shape;
guint8 ref_select_code;
/* Computed macroblock informations */
guint16 mb_height;
guint16 mb_width;
guint mb_num;
/* The size of the header */
guint size;
};
/**
* GstMpeg4VideoPacketHdr:
* @size: Size of the header in bit.
*
* The video packet header structure as defined in:
* 6.2.5.2 Video Plane with Short Header
*/
struct _GstMpeg4VideoPacketHdr {
guint8 header_extension_code;
guint16 macroblock_number;
guint16 quant_scale;
guint size;
};
/**
* GstMpeg4Packet:
* @type: the type of the packet that start at @offset
* @data: the data of the packet, statring at @offset
* @offset: offset of the start of the packet (without the 3 bytes startcode), but
* including the #GstMpeg4StartCode byte.
* @size: The size in bytes of the packet or %G_MAXUINT if the end wasn't found.
* @marker_size: The size in bit of the resync marker.
*
* A structure that contains the type of a packet, its offset and its size
*/
struct _GstMpeg4Packet
{
const guint8 *data;
guint offset;
gsize size;
guint marker_size;
GstMpeg4StartCode type;
};
GstMpeg4ParseResult gst_h263_parse (GstMpeg4Packet * packet,
const guint8 * data, guint offset,
gsize size);
GstMpeg4ParseResult gst_mpeg4_parse (GstMpeg4Packet * packet,
gboolean skip_user_data,
GstMpeg4VideoObjectPlane *vop,
const guint8 * data, guint offset,
gsize size);
GstMpeg4ParseResult
gst_mpeg4_parse_video_object_plane (GstMpeg4VideoObjectPlane *vop,
GstMpeg4SpriteTrajectory *sprite_trajectory,
GstMpeg4VideoObjectLayer *vol,
const guint8 * data,
gsize size);
GstMpeg4ParseResult
gst_mpeg4_parse_group_of_vop (GstMpeg4GroupOfVOP *gov,
const guint8 * data, gsize size);
GstMpeg4ParseResult
gst_mpeg4_parse_video_object_layer (GstMpeg4VideoObjectLayer *vol,
GstMpeg4VisualObject *vo,
const guint8 * data, gsize size);
GstMpeg4ParseResult
gst_mpeg4_parse_visual_object (GstMpeg4VisualObject *vo,
GstMpeg4VideoSignalType *signal_type,
const guint8 * data, gsize size);
GstMpeg4ParseResult
gst_mpeg4_parse_visual_object_sequence (GstMpeg4VisualObjectSequence *vos,
const guint8 * data, gsize size);
GstMpeg4ParseResult
gst_mpeg4_parse_video_plane_short_header (GstMpeg4VideoPlaneShortHdr * shorthdr,
const guint8 * data, gsize size);
GstMpeg4ParseResult
gst_mpeg4_parse_video_packet_header (GstMpeg4VideoPacketHdr * videopackethdr,
GstMpeg4VideoObjectLayer * vol,
GstMpeg4VideoObjectPlane * vop,
GstMpeg4SpriteTrajectory * sprite_trajectory,
const guint8 * data, gsize size);
#endif /* __GST_MPEG4UTIL_H__ */

View file

@ -40,6 +40,7 @@
#endif
#include "gstmpegvideoparser.h"
#include "parserutils.h"
#include <string.h>
#include <gst/base/gstbitreader.h>
@ -47,35 +48,8 @@
#define MARKER_BIT 0x1
#define GET_BITS(b, num, bits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
goto failed; \
GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
} G_STMT_END
#define READ_UINT8(br, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT16(br, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint16 (br, &val, nbits)) { \
GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT32(br, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint32 (br, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
/* default intra quant matrix, in zig-zag order */
const guint8 default_intra_quantizer_matrix[64] = {
static const guint8 default_intra_quantizer_matrix[64] = {
8,
16, 16,
19, 16, 19,
@ -93,7 +67,7 @@ const guint8 default_intra_quantizer_matrix[64] = {
83
};
const guint8 mpeg_zigzag_8x8[64] = {
static const guint8 mpeg_zigzag_8x8[64] = {
0, 1, 8, 16, 9, 2, 3, 10,
17, 24, 32, 25, 18, 11, 4, 5,
12, 19, 26, 33, 40, 48, 41, 34,

View file

@ -33,6 +33,7 @@
#endif
#include "gstvc1parser.h"
#include "parserutils.h"
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbitreader.h>
#include <string.h>
@ -64,50 +65,7 @@ ensure_debug_category (void)
#endif /* GST_DISABLE_GST_DEBUG */
/* ------------------------------------------------------------------------- */
#define GET_BITS(b, num, bits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
goto failed; \
GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
} G_STMT_END
#define READ_UINT8(br, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint8 (br, &val, nbits)) { \
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT16(br, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint16 (br, &val, nbits)) { \
GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT32(br, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint32 (br, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define SKIP(br, nbits) G_STMT_START { \
if (!gst_bit_reader_skip (br, nbits)) { \
GST_WARNING ("Failed to skip nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
typedef struct _VLCTable
{
guint value;
guint cword;
guint cbits;
} VLCTable;
const guint8 vc1_pquant_table[3][32] = {
static const guint8 vc1_pquant_table[3][32] = {
{ /* Implicit quantizer */
0, 1, 2, 3, 4, 5, 6, 7, 8, 6, 7, 8, 9, 10, 11, 12,
13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27, 29, 31},
@ -468,41 +426,6 @@ failed:
}
}
static gboolean
decode_vlc (GstBitReader * br, guint * res, const VLCTable * table,
guint length)
{
guint8 i;
guint cbits = 0;
guint32 value = 0;
for (i = 0; i < length; i++) {
if (cbits != table[i].cbits) {
cbits = table[i].cbits;
if (!gst_bit_reader_peek_bits_uint32 (br, &value, cbits)) {
goto failed;
}
}
if (value == table[i].cword) {
SKIP (br, cbits);
if (res)
*res = table[i].value;
return TRUE;
}
}
GST_DEBUG ("Did not find code");
failed:
{
GST_WARNING ("Could not decode VLC returning");
return FALSE;
}
}
/*** bitplanes decoding ***/
static gboolean
bitplane_decoding (GstBitReader * br, guint8 * data,
@ -1734,8 +1657,6 @@ gst_vc1_identify_next_bdu (const guint8 * data, gsize size, GstVC1BDU * bdu)
g_return_val_if_fail (bdu != NULL, GST_VC1_PARSER_ERROR);
ensure_debug_category ();
if (size < 4) {
GST_DEBUG ("Can't parse, buffer has too small size %" G_GSSIZE_FORMAT,
size);
@ -1796,8 +1717,6 @@ gst_vc1_parse_sequence_layer (const guint8 * data, gsize size,
g_return_val_if_fail (seqlayer != NULL, GST_VC1_PARSER_ERROR);
ensure_debug_category ();
READ_UINT32 (&br, tmp, 8);
if (tmp != 0xC5)
goto failed;
@ -1807,7 +1726,7 @@ gst_vc1_parse_sequence_layer (const guint8 * data, gsize size,
READ_UINT32 (&br, tmp, 32);
if (tmp != 0x04)
goto failed;
if (parse_sequence_header_struct_c (&br, &seqlayer->struct_c) ==
GST_VC1_PARSER_ERROR)
goto failed;
@ -1850,9 +1769,6 @@ gst_vc1_parse_sequence_header_struct_a (const guint8 * data,
g_return_val_if_fail (structa != NULL, GST_VC1_PARSER_ERROR);
ensure_debug_category ();
return parse_sequence_header_struct_a (&br, structa);
}
@ -1874,8 +1790,6 @@ gst_vc1_parse_sequence_header_struct_b (const guint8 * data,
g_return_val_if_fail (structb != NULL, GST_VC1_PARSER_ERROR);
ensure_debug_category ();
return parse_sequence_header_struct_b (&br, structb);
}
@ -1897,8 +1811,6 @@ gst_vc1_parse_sequence_header_struct_c (const guint8 * data, gsize size,
g_return_val_if_fail (structc != NULL, GST_VC1_PARSER_ERROR);
ensure_debug_category ();
return parse_sequence_header_struct_c (&br, structc);
}
@ -1920,8 +1832,6 @@ gst_vc1_parse_sequence_header (const guint8 * data, gsize size,
g_return_val_if_fail (seqhdr != NULL, GST_VC1_PARSER_ERROR);
ensure_debug_category ();
if (parse_sequence_header_struct_c (&br, &seqhdr->struct_c) ==
GST_VC1_PARSER_ERROR)
goto failed;
@ -1965,8 +1875,6 @@ gst_vc1_parse_entry_point_header (const guint8 * data, gsize size,
g_return_val_if_fail (entrypoint != NULL, GST_VC1_PARSER_ERROR);
ensure_debug_category ();
gst_bit_reader_init (&br, data, size);
if (gst_bit_reader_get_remaining (&br) < 13)
@ -2089,8 +1997,6 @@ gst_vc1_parse_frame_header (const guint8 * data, gsize size,
GstBitReader br;
GstVC1ParserResult result;
ensure_debug_category ();
gst_bit_reader_init (&br, data, size);
if (seqhdr->profile == GST_VC1_PROFILE_ADVANCED)
@ -2123,8 +2029,6 @@ gst_vc1_parse_field_header (const guint8 * data, gsize size,
GstBitReader br;
GstVC1ParserResult result;
ensure_debug_category ();
gst_bit_reader_init (&br, data, size);
result = parse_frame_header_advanced (&br, fieldhdr, seqhdr, bitplanes, TRUE);

View file

@ -0,0 +1,57 @@
/* Gstreamer
* Copyright (C) <2011> Intel Corporation
* Copyright (C) <2011> Collabora Ltd.
* Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include "parserutils.h"
gboolean
decode_vlc (GstBitReader * br, guint * res, const VLCTable * table,
guint length)
{
guint8 i;
guint cbits = 0;
guint32 value = 0;
for (i = 0; i < length; i++) {
if (cbits != table[i].cbits) {
cbits = table[i].cbits;
if (!gst_bit_reader_peek_bits_uint32 (br, &value, cbits)) {
goto failed;
}
}
if (value == table[i].cword) {
SKIP (br, cbits);
if (res)
*res = table[i].value;
return TRUE;
}
}
GST_DEBUG ("Did not find code");
failed:
{
GST_WARNING ("Could not decode VLC returning");
return FALSE;
}
}

View file

@ -0,0 +1,108 @@
/* Gstreamer
* Copyright (C) <2011> Intel
* Copyright (C) <2011> Collabora Ltd.
* Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __PARSER_UTILS__
#define __PARSER_UTILS__
#include <gst/gst.h>
#include <gst/base/gstbitreader.h>
/* Parsing utils */
#define GET_BITS(b, num, bits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
goto failed; \
GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
} G_STMT_END
#define CHECK_ALLOWED(val, min, max) G_STMT_START { \
if (val < min || val > max) { \
GST_WARNING ("value not in allowed range. value: %d, range %d-%d", \
val, min, max); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT8(reader, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint8 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint8, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT16(reader, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint16 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint16, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT32(reader, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint32 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint32, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define READ_UINT64(reader, val, nbits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint64 (reader, &val, nbits)) { \
GST_WARNING ("failed to read uint64, nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
#define U_READ_UINT8(reader, val, nbits) G_STMT_START { \
val = gst_bit_reader_get_bits_uint8_unchecked (reader, nbits); \
} G_STMT_END
#define U_READ_UINT16(reader, val, nbits) G_STMT_START { \
val = gst_bit_reader_get_bits_uint16_unchecked (reader, nbits); \
} G_STMT_END
#define U_READ_UINT32(reader, val, nbits) G_STMT_START { \
val = gst_bit_reader_get_bits_uint32_unchecked (reader, nbits); \
} G_STMT_END
#define U_READ_UINT64(reader, val, nbits) G_STMT_START { \
val = gst_bit_reader_get_bits_uint64_unchecked (reader, nbits); \
} G_STMT_END
#define SKIP(reader, nbits) G_STMT_START { \
if (!gst_bit_reader_skip (reader, nbits)) { \
GST_WARNING ("failed to skip nbits: %d", nbits); \
goto failed; \
} \
} G_STMT_END
typedef struct _VLCTable VLCTable;
struct _VLCTable
{
guint value;
guint cword;
guint cbits;
};
gboolean
decode_vlc (GstBitReader * br, guint * res, const VLCTable * table,
guint length);
#endif /* __PARSER_UTILS__ */

View file

@ -49,14 +49,14 @@ static GstElementClass *parent_class = NULL;
G_DEFINE_BOXED_TYPE (GstVideoFrameState, gst_video_frame_state,
(GBoxedCopyFunc) gst_video_frame_state_ref,
(GBoxedFreeFunc) gst_video_frame_state_unref)
(GBoxedFreeFunc) gst_video_frame_state_unref);
/* NOTE (Edward): Do not use G_DEFINE_* because we need to have
* a GClassInitFunc called with the target class (which the macros
* don't handle). */
static void gst_base_video_codec_class_init (GstBaseVideoCodecClass *
klass);
static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
* don't handle).
*/
static void gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass);
static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
GstBaseVideoCodecClass * klass);
GType

View file

@ -138,6 +138,7 @@ struct _GstVideoFrameState
GstClockTime deadline;
gboolean force_keyframe;
gboolean force_keyframe_headers;
/* Events that should be pushed downstream *before*
* the next src_buffer */

View file

@ -113,6 +113,34 @@
GST_DEBUG_CATEGORY (basevideoencoder_debug);
#define GST_CAT_DEFAULT basevideoencoder_debug
typedef struct _ForcedKeyUnitEvent ForcedKeyUnitEvent;
struct _ForcedKeyUnitEvent
{
GstClockTime running_time;
gboolean pending; /* TRUE if this was requested already */
gboolean all_headers;
guint count;
};
static void
forced_key_unit_event_free (ForcedKeyUnitEvent * evt)
{
g_slice_free (ForcedKeyUnitEvent, evt);
}
static ForcedKeyUnitEvent *
forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
guint count)
{
ForcedKeyUnitEvent *evt = g_slice_new0 (ForcedKeyUnitEvent);
evt->running_time = running_time;
evt->all_headers = all_headers;
evt->count = count;
return evt;
}
static void gst_base_video_encoder_finalize (GObject * object);
static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad,
@ -132,8 +160,7 @@ static gboolean gst_base_video_encoder_src_query (GstPad * pad,
#define gst_base_video_encoder_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstBaseVideoEncoder, gst_base_video_encoder,
GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);
);
GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL););
static void
gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
@ -160,16 +187,17 @@ gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder)
base_video_encoder->presentation_frame_number = 0;
base_video_encoder->distance_from_sync = 0;
base_video_encoder->force_keyframe = FALSE;
g_list_foreach (base_video_encoder->force_key_unit,
(GFunc) forced_key_unit_event_free, NULL);
g_list_free (base_video_encoder->force_key_unit);
base_video_encoder->force_key_unit = NULL;
base_video_encoder->drained = TRUE;
base_video_encoder->min_latency = 0;
base_video_encoder->max_latency = 0;
if (base_video_encoder->force_keyunit_event) {
gst_event_unref (base_video_encoder->force_keyunit_event);
base_video_encoder->force_keyunit_event = NULL;
}
gst_buffer_replace (&base_video_encoder->headers, NULL);
g_list_foreach (base_video_encoder->current_frame_events,
(GFunc) gst_event_unref, NULL);
@ -202,12 +230,21 @@ gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder)
gst_pad_set_event_function (pad,
GST_DEBUG_FUNCPTR (gst_base_video_encoder_src_event));
base_video_encoder->a.at_eos = FALSE;
base_video_encoder->at_eos = FALSE;
base_video_encoder->headers = NULL;
/* encoder is expected to do so */
base_video_encoder->sink_clipping = TRUE;
}
void
gst_base_video_encoder_set_headers (GstBaseVideoEncoder * base_video_encoder,
GstBuffer * headers)
{
GST_DEBUG_OBJECT (base_video_encoder, "new headers %p", headers);
gst_buffer_replace (&base_video_encoder->headers, headers);
}
static gboolean
gst_base_video_encoder_drain (GstBaseVideoEncoder * enc)
{
@ -447,8 +484,11 @@ gst_base_video_encoder_sink_query (GstPad * pad, GstObject * parent,
static void
gst_base_video_encoder_finalize (GObject * object)
{
GstBaseVideoEncoder *base_video_encoder = (GstBaseVideoEncoder *) object;
GST_DEBUG_OBJECT (object, "finalize");
gst_buffer_replace (&base_video_encoder->headers, NULL);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -477,7 +517,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
GstFlowReturn flow_ret;
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
base_video_encoder->a.at_eos = TRUE;
base_video_encoder->at_eos = TRUE;
if (base_video_encoder_class->finish) {
flow_ret = base_video_encoder_class->finish (base_video_encoder);
@ -508,7 +548,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
break;
}
base_video_encoder->a.at_eos = FALSE;
base_video_encoder->at_eos = FALSE;
gst_segment_copy_into (segment, &GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment);
@ -517,17 +557,26 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
}
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
const GstStructure *s;
if (gst_video_event_is_force_key_unit (event)) {
GstClockTime running_time;
gboolean all_headers;
guint count;
s = gst_event_get_structure (event);
if (gst_video_event_parse_downstream_force_key_unit (event,
NULL, NULL, &running_time, &all_headers, &count)) {
ForcedKeyUnitEvent *fevt;
if (gst_structure_has_name (s, "GstForceKeyUnit")) {
GST_OBJECT_LOCK (base_video_encoder);
base_video_encoder->force_keyframe = TRUE;
if (base_video_encoder->force_keyunit_event)
gst_event_unref (base_video_encoder->force_keyunit_event);
base_video_encoder->force_keyunit_event = gst_event_copy (event);
GST_OBJECT_UNLOCK (base_video_encoder);
GST_OBJECT_LOCK (base_video_encoder);
fevt = forced_key_unit_event_new (running_time, all_headers, count);
base_video_encoder->force_key_unit =
g_list_append (base_video_encoder->force_key_unit, fevt);
GST_OBJECT_UNLOCK (base_video_encoder);
GST_DEBUG_OBJECT (base_video_encoder,
"force-key-unit event: running-time %" GST_TIME_FORMAT
", all_headers %d, count %u",
GST_TIME_ARGS (running_time), all_headers, count);
}
gst_event_unref (event);
ret = TRUE;
}
@ -605,15 +654,26 @@ gst_base_video_encoder_src_event (GstPad * pad, GstObject * parent,
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
{
const GstStructure *s;
if (gst_video_event_is_force_key_unit (event)) {
GstClockTime running_time;
gboolean all_headers;
guint count;
s = gst_event_get_structure (event);
if (gst_video_event_parse_upstream_force_key_unit (event,
&running_time, &all_headers, &count)) {
ForcedKeyUnitEvent *fevt;
if (gst_structure_has_name (s, "GstForceKeyUnit")) {
GST_OBJECT_LOCK (base_video_encoder);
base_video_encoder->force_keyframe = TRUE;
GST_OBJECT_UNLOCK (base_video_encoder);
GST_OBJECT_LOCK (base_video_encoder);
fevt = forced_key_unit_event_new (running_time, all_headers, count);
base_video_encoder->force_key_unit =
g_list_append (base_video_encoder->force_key_unit, fevt);
GST_OBJECT_UNLOCK (base_video_encoder);
GST_DEBUG_OBJECT (base_video_encoder,
"force-key-unit event: running-time %" GST_TIME_FORMAT
", all_headers %d, count %u",
GST_TIME_ARGS (running_time), all_headers, count);
}
gst_event_unref (event);
ret = TRUE;
} else {
@ -716,7 +776,7 @@ gst_base_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
if (base_video_encoder->a.at_eos) {
if (base_video_encoder->at_eos) {
ret = GST_FLOW_UNEXPECTED;
goto done;
}
@ -751,8 +811,47 @@ gst_base_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
frame->presentation_frame_number =
base_video_encoder->presentation_frame_number;
base_video_encoder->presentation_frame_number++;
frame->force_keyframe = base_video_encoder->force_keyframe;
base_video_encoder->force_keyframe = FALSE;
GST_OBJECT_LOCK (base_video_encoder);
if (base_video_encoder->force_key_unit) {
ForcedKeyUnitEvent *fevt = NULL;
GstClockTime running_time;
GList *l;
running_time = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
GST_BUFFER_TIMESTAMP (buf));
for (l = base_video_encoder->force_key_unit; l; l = l->next) {
ForcedKeyUnitEvent *tmp = l->data;
/* Skip pending keyunits */
if (tmp->pending)
continue;
/* Simple case, keyunit ASAP */
if (tmp->running_time == GST_CLOCK_TIME_NONE) {
fevt = tmp;
break;
}
/* Event for before this frame */
if (tmp->running_time <= running_time) {
fevt = tmp;
break;
}
}
if (fevt) {
GST_DEBUG_OBJECT (base_video_encoder,
"Forcing a key unit at running time %" GST_TIME_FORMAT,
GST_TIME_ARGS (running_time));
frame->force_keyframe = TRUE;
frame->force_keyframe_headers = fevt->all_headers;
fevt->pending = TRUE;
}
}
GST_OBJECT_UNLOCK (base_video_encoder);
GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);
@ -839,6 +938,7 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
GstFlowReturn ret = GST_FLOW_OK;
GstBaseVideoEncoderClass *base_video_encoder_class;
GList *l;
GstBuffer *headers = NULL;
base_video_encoder_class =
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
@ -866,45 +966,78 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
break;
}
if (frame->force_keyframe) {
GstClockTime stream_time;
GstClockTime running_time;
GstEvent *ev;
/* no buffer data means this frame is skipped/dropped */
if (!frame->src_buffer) {
GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
goto done;
}
running_time =
gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
stream_time =
gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
if (frame->is_sync_point && base_video_encoder->force_key_unit) {
GstClockTime stream_time, running_time;
GstEvent *ev;
ForcedKeyUnitEvent *fevt = NULL;
GList *l;
running_time = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
/* re-use upstream event if any so it also conveys any additional
* info upstream arranged in there */
GST_OBJECT_LOCK (base_video_encoder);
if (base_video_encoder->force_keyunit_event) {
ev = base_video_encoder->force_keyunit_event;
base_video_encoder->force_keyunit_event = NULL;
} else {
ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new_empty ("GstForceKeyUnit"));
for (l = base_video_encoder->force_key_unit; l; l = l->next) {
ForcedKeyUnitEvent *tmp = l->data;
/* Skip non-pending keyunits */
if (!tmp->pending)
continue;
/* Simple case, keyunit ASAP */
if (tmp->running_time == GST_CLOCK_TIME_NONE) {
fevt = tmp;
break;
}
/* Event for before this frame */
if (tmp->running_time <= running_time) {
fevt = tmp;
break;
}
}
if (fevt) {
base_video_encoder->force_key_unit =
g_list_remove (base_video_encoder->force_key_unit, fevt);
}
GST_OBJECT_UNLOCK (base_video_encoder);
gst_structure_set (gst_event_writable_structure (ev),
"timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
"stream-time", G_TYPE_UINT64, stream_time,
"running-time", G_TYPE_UINT64, running_time, NULL);
if (fevt) {
stream_time =
gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev);
}
ev = gst_video_event_new_downstream_force_key_unit
(frame->presentation_timestamp, stream_time, running_time,
fevt->all_headers, fevt->count);
/* no buffer data means this frame is skipped/dropped */
if (!frame->src_buffer) {
GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
goto done;
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
ev);
if (fevt->all_headers) {
if (base_video_encoder->headers) {
headers = gst_buffer_ref (base_video_encoder->headers);
headers = gst_buffer_make_writable (headers);
}
}
GST_DEBUG_OBJECT (base_video_encoder,
"Forced key unit: running-time %" GST_TIME_FORMAT
", all_headers %d, count %u",
GST_TIME_ARGS (running_time), fevt->all_headers, fevt->count);
forced_key_unit_event_free (fevt);
}
}
if (frame->is_sync_point) {
@ -931,6 +1064,12 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;
if (G_UNLIKELY (headers)) {
GST_BUFFER_TIMESTAMP (headers) = frame->presentation_timestamp;
GST_BUFFER_DURATION (headers) = 0;
GST_BUFFER_OFFSET (headers) = frame->decode_timestamp;
}
/* update rate estimate */
GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes +=
gst_buffer_get_size (frame->src_buffer);

View file

@ -84,26 +84,22 @@ struct _GstBaseVideoEncoder
guint64 presentation_frame_number;
int distance_from_sync;
gboolean force_keyframe;
/*< private >*/
/* FIXME move to real private part ?
* (and introduce a context ?) */
gboolean drained;
gboolean at_eos;
gint64 min_latency;
gint64 max_latency;
GstEvent *force_keyunit_event;
GList *current_frame_events;
union {
void *padding;
gboolean at_eos;
} a;
GstBuffer *headers;
/* FIXME before moving to base */
void *padding[GST_PADDING_LARGE-1];
GList *force_key_unit; /* List of pending forced keyunits */
void *padding[GST_PADDING_LARGE];
};
/**
@ -178,7 +174,8 @@ void gst_base_video_encoder_set_latency (GstBaseVideoEncoder *
GstClockTime min_latency, GstClockTime max_latency);
void gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder *base_video_encoder,
int n_fields);
void gst_base_video_encoder_set_headers (GstBaseVideoEncoder *base_video_encoder,
GstBuffer *headers);
G_END_DECLS
#endif

View file

@ -335,9 +335,9 @@ gst_asf_mux_init (GstAsfMux * asfmux)
gst_pad_use_fixed_caps (asfmux->srcpad);
gst_element_add_pad (GST_ELEMENT (asfmux), asfmux->srcpad);
asfmux->collect = gst_collect_pads_new ();
gst_collect_pads_set_function (asfmux->collect,
(GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_asf_mux_collected),
asfmux->collect = gst_collect_pads2_new ();
gst_collect_pads2_set_function (asfmux->collect,
(GstCollectPads2Function) GST_DEBUG_FUNCPTR (gst_asf_mux_collected),
asfmux);
asfmux->payloads = NULL;
@ -1851,7 +1851,7 @@ gst_asf_mux_process_buffer (GstAsfMux * asfmux, GstAsfPad * pad,
AsfPayload *payload;
payload = g_malloc0 (sizeof (AsfPayload));
payload->pad = (GstCollectData *) pad;
payload->pad = (GstCollectData2 *) pad;
payload->data = buf;
GST_LOG_OBJECT (asfmux,
@ -1911,7 +1911,7 @@ gst_asf_mux_process_buffer (GstAsfMux * asfmux, GstAsfPad * pad,
}
static GstFlowReturn
gst_asf_mux_collected (GstCollectPads * collect, gpointer data)
gst_asf_mux_collected (GstCollectPads2 * collect, gpointer data)
{
GstAsfMux *asfmux = GST_ASF_MUX_CAST (data);
GstFlowReturn ret = GST_FLOW_OK;
@ -1937,15 +1937,15 @@ gst_asf_mux_collected (GstCollectPads * collect, gpointer data)
walk = asfmux->collect->data;
while (walk) {
GstAsfPad *pad;
GstCollectData *data;
GstCollectData2 *data;
GstClockTime time;
data = (GstCollectData *) walk->data;
data = (GstCollectData2 *) walk->data;
pad = (GstAsfPad *) data;
walk = g_slist_next (walk);
buf = gst_collect_pads_peek (collect, data);
buf = gst_collect_pads2_peek (collect, data);
if (buf == NULL) {
GST_LOG_OBJECT (asfmux, "Pad %s has no buffers",
GST_PAD_NAME (pad->collect.pad));
@ -1980,7 +1980,7 @@ gst_asf_mux_collected (GstCollectPads * collect, gpointer data)
/* we have data */
GST_LOG_OBJECT (asfmux, "selected pad %s with time %" GST_TIME_FORMAT,
GST_PAD_NAME (best_pad->collect.pad), GST_TIME_ARGS (best_time));
buf = gst_collect_pads_pop (collect, &best_pad->collect);
buf = gst_collect_pads2_pop (collect, &best_pad->collect);
ret = gst_asf_mux_process_buffer (asfmux, best_pad, buf);
} else {
/* no data, let's finish it up */
@ -2251,16 +2251,16 @@ gst_asf_mux_request_new_pad (GstElement * element,
return NULL;
}
if (templ == gst_element_class_get_pad_template (klass, "audio_%u")) {
name = g_strdup_printf ("audio_%u", asfmux->stream_number + 1);
if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
name = g_strdup_printf ("audio_%02d", asfmux->stream_number + 1);
GST_DEBUG_OBJECT (asfmux, "Adding new pad %s", name);
newpad = gst_pad_new_from_template (templ, name);
g_free (name);
is_audio = TRUE;
gst_pad_set_setcaps_function (newpad,
GST_DEBUG_FUNCPTR (gst_asf_mux_audio_set_caps));
} else if (templ == gst_element_class_get_pad_template (klass, "video_%u")) {
name = g_strdup_printf ("video_%u", asfmux->stream_number + 1);
} else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
name = g_strdup_printf ("video_%02d", asfmux->stream_number + 1);
GST_DEBUG_OBJECT (asfmux, "Adding new pad %s", name);
newpad = gst_pad_new_from_template (templ, name);
g_free (name);
@ -2279,8 +2279,8 @@ gst_asf_mux_request_new_pad (GstElement * element,
collect_size = sizeof (GstAsfVideoPad);
}
collect_pad = (GstAsfPad *)
gst_collect_pads_add_pad (asfmux->collect, newpad, collect_size,
(GstCollectDataDestroyNotify) (gst_asf_mux_pad_reset));
gst_collect_pads2_add_pad_full (asfmux->collect, newpad, collect_size,
(GstCollectData2DestroyNotify) (gst_asf_mux_pad_reset), TRUE);
/* set up pad */
collect_pad->is_audio = is_audio;
@ -2294,7 +2294,7 @@ gst_asf_mux_request_new_pad (GstElement * element,
collect_pad->stream_number = asfmux->stream_number;
/* FIXME: hacked way to override/extend the event function of
* GstCollectPads; because it sets its own event function giving
* GstCollectPads2; because it sets its own event function giving
* the element no access to events.
*/
asfmux->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (newpad);
@ -2391,12 +2391,12 @@ gst_asf_mux_change_state (GstElement * element, GstStateChange transition)
asfmux->packet_size = asfmux->prop_packet_size;
asfmux->preroll = asfmux->prop_preroll;
asfmux->merge_stream_tags = asfmux->prop_merge_stream_tags;
gst_collect_pads_start (asfmux->collect);
gst_collect_pads2_start (asfmux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_collect_pads_stop (asfmux->collect);
gst_collect_pads2_stop (asfmux->collect);
asfmux->state = GST_ASF_MUX_STATE_NONE;
break;
default:

View file

@ -23,7 +23,7 @@
#include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#include <gst/riff/riff-media.h>
#include "gstasfobjects.h"
@ -57,7 +57,7 @@ enum _GstAsfMuxState
struct _GstAsfPad
{
GstCollectData collect;
GstCollectData2 collect;
gboolean is_audio;
guint8 stream_number;
@ -143,7 +143,7 @@ struct _GstAsfMux
/* pads */
GstPad *srcpad;
GstCollectPads *collect;
GstCollectPads2 *collect;
GstPadEventFunction collect_event;
};

View file

@ -22,7 +22,7 @@
#include <glib.h>
#include <gst/gst.h>
#include <gst/base/gstbytereader.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#define ASF_PAYLOAD_IS_KEYFRAME(pay) ((pay->stream_number & 0x80) != 0)
#define ASF_MILI_TO_100NANO(v) (v * 10000)
@ -75,7 +75,7 @@ typedef struct _AsfPayload
guint32 presentation_time;
GstBuffer *data;
GstCollectData *pad;
GstCollectData2 *pad;
/* simple index info */
gboolean has_packet_info;

View file

@ -3,9 +3,8 @@ video-rate. It receives audio-data at the sampling-rate. It needs to render
video-frames at frame-rate. The rendering needs n audio samples (depends on
subclass). The baseclass takes care of that.
Some effects could be enhanced by running geometrictransform elements
afterwards. A blur and/or videozoom element would be great (vertigotv looks
great but has some negotiation issues).
Some effects could be enhanced by running geometrictransform/effecttc elements
afterwards.
= Feedback =
* put 'Audio' to klass as well ?
@ -36,17 +35,35 @@ spectrascope - done
spacescope - stereo wavescope
- left->x, right->y - done
- polar mapping
multiscope :
- like wave/space scope, but run the signal through two filters to split it into
bass, mid and high (200 Hz, 2000 Hz)
- draw 3 wave-scopes into red/gree/blue
- when drawing only draw that component to mix colors
- eventually use the spacescope-position to rotate/shift the wave
wavescope
- we could have a bouncing line as a base, like a quix:
- two dots moving on a linear path and getting a new random dx,dy when hitting
a border
- the abs(dx/dy) - speed of movement - could be scaled by the sound level
- we would need to rotate, stretch and clip the waveform drawing to fit the
line
- we could scratch the rotate part and just stretch/squeeze x and shift/clip y
xxxscope
- have a matrix of source and drawing-functions
- sources: audio, spectrum, audio-low, audio-mid, audio-hi
- drawing: waves (style, color), space (style,color)
- have the usual shade and move operations
- have a way to draw each operator in one or more color-channels
- we could calculate the sound-level (like in level element) and modulate
colors/movements
- for filtered low/mid/hi audio we could use different peak-falloffs
= TODO =
- element maker template
- test for baseclass
- we probably want a VisBin like the gnome video effects
- this way we can specify pipeline fragments
- VisBin can use a videomixer to switch effects based on time or song
- VisBin can e.g. control a text-overlay to render the title into the
visualisation for a while
= Test it =
GST_DEBUG="*:2,*scope*:4"
@ -55,6 +72,7 @@ GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-inspect scopes
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch audiotestsrc ! audioconvert ! wavescope ! colorspace ! ximagesink
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! wavescope ! colorspace ! ximagesink
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! wavescope style=lines shade-amount=0x00080402 ! edgetv ! vertigotv ! ximagesink
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! spacescope style=lines shade-amount=0x00080402 ! ximagesink
GST_PLUGIN_PATH=$GST_PLUGIN_PATH:$PWD gst-launch filesrc location=$HOME/Music/1.mp3 ! decodebin2 ! audioconvert ! spacescope style=lines shade-amount=0x00080402 ! vertigotv ! ximagesink

View file

@ -24,6 +24,23 @@
_vd[(_y * _st) + _x] = _c; \
} G_STMT_END
#define draw_dot_c(_vd, _x, _y, _st, _c) G_STMT_START { \
_vd[(_y * _st) + _x] |= _c; \
} G_STMT_END
#define draw_dot_aa(_vd, _x, _y, _st, _c, _f) G_STMT_START { \
guint32 _oc, _c1, _c2, _c3; \
\
_oc = _vd[(_y * _st) + _x]; \
_c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
_c3 = MIN(_c3, 255); \
_c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
_c2 = MIN(_c2, 255); \
_c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
_c1 = MIN(_c1, 255); \
_vd[(_y * _st) + _x] = (_c1 << 16) | (_c2 << 8) | _c3; \
} G_STMT_END
#define draw_line(_vd, _x1, _x2, _y1, _y2, _st, _c) G_STMT_START { \
guint _i, _j, _x, _y; \
gint _dx = _x2 - _x1, _dy = _y2 - _y1; \
@ -42,7 +59,6 @@
guint _i, _j, _x, _y; \
gint _dx = _x2 - _x1, _dy = _y2 - _y1; \
gfloat _f, _rx, _ry, _fx, _fy; \
guint32 _oc, _nc, _c1, _c2, _c3; \
\
_j = abs (_dx) > abs (_dy) ? abs (_dx) : abs (_dy); \
for (_i = 0; _i < _j; _i++) { \
@ -55,48 +71,16 @@
_fy = _ry - (gfloat)_y; \
\
_f = ((1.0 - _fx) + (1.0 - _fy)) / 2.0; \
_oc = _vd[(_y * _st) + _x]; \
_c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
_c3 = MIN(_c3, 255); \
_c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
_c2 = MIN(_c2, 255); \
_c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
_c1 = MIN(_c1, 255); \
_nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
_vd[(_y * _st) + _x] = _nc; \
draw_dot_aa (_vd, _x, _y, _st, _c, _f); \
\
_f = (_fx + (1.0 - _fy)) / 2.0; \
_oc = _vd[(_y * _st) + _x + 1]; \
_c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
_c3 = MIN(_c3, 255); \
_c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
_c2 = MIN(_c2, 255); \
_c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
_c1 = MIN(_c1, 255); \
_nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
_vd[(_y * _st) + _x + 1] = _nc; \
draw_dot_aa (_vd, (_x + 1), _y, _st, _c, _f); \
\
_f = ((1.0 - _fx) + _fy) / 2.0; \
_oc = _vd[((_y + 1) * _st) + _x]; \
_c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
_c3 = MIN(_c3, 255); \
_c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
_c2 = MIN(_c2, 255); \
_c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
_c1 = MIN(_c1, 255); \
_nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
_vd[((_y + 1) * _st) + _x] = _nc; \
draw_dot_aa (_vd, _x, (_y + 1), _st, _c, _f); \
\
_f = (_fx + _fy) / 2.0; \
_oc = _vd[((_y + 1) * _st) + _x + 1]; \
_c3 = (_oc & 0xff) + ((_c & 0xff) * _f); \
_c3 = MIN(_c3, 255); \
_c2 = ((_oc & 0xff00) >> 8) + (((_c & 0xff00) >> 8) * _f); \
_c2 = MIN(_c2, 255); \
_c1 = ((_oc & 0xff0000) >> 16) + (((_c & 0xff0000) >> 16) * _f); \
_c1 = MIN(_c1, 255); \
_nc = 0x00 | (_c1 << 16) | (_c2 << 8) | _c3; \
_vd[((_y + 1) * _st) + _x + 1] = _nc; \
draw_dot_aa (_vd, (_x + 1), (_y + 1), _st, _c, _f); \
} \
} G_STMT_END

View file

@ -72,6 +72,8 @@ enum
{
STYLE_DOTS = 0,
STYLE_LINES,
STYLE_COLOR_DOTS,
STYLE_COLOR_LINES,
NUM_STYLES
};
@ -85,6 +87,8 @@ gst_space_scope_style_get_type (void)
static const GEnumValue values[] = {
{STYLE_DOTS, "draw dots (default)", "dots"},
{STYLE_LINES, "draw lines", "lines"},
{STYLE_COLOR_DOTS, "draw color dots", "color-dots"},
{STYLE_COLOR_LINES, "draw color lines", "color-lines"},
{0, NULL, NULL}
};
@ -98,9 +102,13 @@ static void gst_space_scope_set_property (GObject * object, guint prop_id,
static void gst_space_scope_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata,
static void render_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
static void render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata,
static void render_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
static void render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
static void render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
static gboolean gst_space_scope_render (GstBaseAudioVisualizer * scope,
@ -160,6 +168,12 @@ gst_space_scope_set_property (GObject * object, guint prop_id,
case STYLE_LINES:
scope->process = render_lines;
break;
case STYLE_COLOR_DOTS:
scope->process = render_color_dots;
break;
case STYLE_COLOR_LINES:
scope->process = render_color_lines;
break;
}
break;
default:
@ -187,18 +201,19 @@ gst_space_scope_get_property (GObject * object, guint prop_id,
#include "gstdrawhelpers.h"
static void
render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
render_dots (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
guint i, s, x, y, ox, oy;
gfloat dx, dy;
guint w = scope->width;
guint w = base->width;
guint h = base->height;
/* draw dots 1st channel x, 2nd channel y */
dx = scope->width / 65536.0;
ox = scope->width / 2;
dy = scope->height / 65536.0;
oy = scope->height / 2;
dx = w / 65536.0;
ox = w / 2;
dy = h / 65536.0;
oy = h / 2;
s = 0;
for (i = 0; i < num_samples; i++) {
x = (guint) (ox + (gfloat) adata[s++] * dx);
@ -208,13 +223,13 @@ render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
static void
render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
render_lines (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
guint i, s, x, y, ox, oy;
gfloat dx, dy;
guint w = scope->width;
guint h = scope->height;
guint w = base->width;
guint h = base->height;
gint x2, y2;
/* draw lines 1st channel x, 2nd channel y */
@ -234,6 +249,179 @@ render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
}
#define CUTOFF_1 0.15
#define CUTOFF_2 0.45
#define RESONANCE (1.0/0.5)
#define filter(il, ir) G_STMT_START { \
f1l_h = il - (f1l_m * RESONANCE) - f1l_l; \
f1l_m += (f1l_h * CUTOFF_1); \
f1l_l += (f1l_m * CUTOFF_1); \
\
f2l_h = (f1l_m + f1l_h) - (f2l_m * RESONANCE) - f2l_l; \
f2l_m += (f2l_h * CUTOFF_2); \
f2l_l += (f2l_m * CUTOFF_2); \
\
f1r_h = ir - (f1r_m * RESONANCE) - f1r_l; \
f1r_m += (f1r_h * CUTOFF_1); \
f1r_l += (f1r_m * CUTOFF_1); \
\
f2r_h = (f1r_m + f1r_h) - (f2r_m * RESONANCE) - f2r_l; \
f2r_m += (f2r_h * CUTOFF_2); \
f2r_l += (f2r_m * CUTOFF_2); \
} G_STMT_END
static void
render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples)
{
GstSpaceScope *scope = (GstSpaceScope *) base;
guint i, s;
gint x, y, ox, oy;
gfloat dx, dy;
gint w = base->width, w1 = w - 2;
gint h = base->height, h1 = h - 2;
gdouble il, ir;
gdouble f1l_l = scope->f1l_l, f1l_m = scope->f1l_m, f1l_h = scope->f1l_h;
gdouble f1r_l = scope->f1r_l, f1r_m = scope->f1r_m, f1r_h = scope->f1r_h;
gdouble f2l_l = scope->f2l_l, f2l_m = scope->f2l_m, f2l_h = scope->f2l_h;
gdouble f2r_l = scope->f2r_l, f2r_m = scope->f2r_m, f2r_h = scope->f2r_h;
/* draw dots 1st channel x, 2nd channel y */
ox = w / 2;
oy = h / 2;
dx = w / 65536.0;
dy = h / 65536.0;
s = 0;
for (i = 0; i < num_samples; i++) {
il = (gdouble) adata[s++];
ir = (gdouble) adata[s++];
filter (il, ir);
x = (gint) (ox + f1l_l * dx);
y = (gint) (oy + f1r_l * dy);
x = CLAMP (x, 0, w1);
y = CLAMP (y, 0, h1);
draw_dot_c (vdata, x, y, w, 0x00FF0000);
x = (gint) (ox + f2l_l * dx);
y = (gint) (oy + f2r_l * dy);
x = CLAMP (x, 0, w1);
y = CLAMP (y, 0, h1);
draw_dot_c (vdata, x, y, w, 0x0000FF00);
x = (gint) (ox + (f2l_m + f2l_h) * dx);
y = (gint) (oy + (f2r_m + f2r_h) * dy);
x = CLAMP (x, 0, w1);
y = CLAMP (y, 0, h1);
draw_dot_c (vdata, x, y, w, 0x000000FF);
}
scope->f1l_l = f1l_l;
scope->f1l_m = f1l_m;
scope->f1l_h = f1l_h;
scope->f1r_l = f1r_l;
scope->f1r_m = f1r_m;
scope->f1r_h = f1r_h;
scope->f2l_l = f2l_l;
scope->f2l_m = f2l_m;
scope->f2l_h = f2l_h;
scope->f2r_l = f2r_l;
scope->f2r_m = f2r_m;
scope->f2r_h = f2r_h;
}
static void
render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples)
{
GstSpaceScope *scope = (GstSpaceScope *) base;
guint i, s;
gint x, y, ox, oy;
gfloat dx, dy;
gint w = base->width, w1 = w - 2;
gint h = base->height, h1 = h - 2;
gdouble il, ir;
gdouble f1l_l = scope->f1l_l, f1l_m = scope->f1l_m, f1l_h = scope->f1l_h;
gdouble f1r_l = scope->f1r_l, f1r_m = scope->f1r_m, f1r_h = scope->f1r_h;
gdouble f2l_l = scope->f2l_l, f2l_m = scope->f2l_m, f2l_h = scope->f2l_h;
gdouble f2r_l = scope->f2r_l, f2r_m = scope->f2r_m, f2r_h = scope->f2r_h;
gint x2, y2, x3, y3, x4, y4;
/* draw lines 1st channel x, 2nd channel y */
ox = w / 2;
oy = h / 2;
dx = w / 65536.0;
dy = h / 65536.0;
s = 0;
/* do first pixels */
il = (gdouble) adata[s++];
ir = (gdouble) adata[s++];
filter (il, ir);
x = (gint) (ox + f1l_l * dx);
y = (gint) (oy + f1r_l * dy);
x2 = CLAMP (x, 0, w1);
y2 = CLAMP (y, 0, h1);
x = (gint) (ox + f2l_l * dx);
y = (gint) (oy + f2r_l * dy);
x3 = CLAMP (x, 0, w1);
y3 = CLAMP (y, 0, h1);
x = (gint) (ox + (f2l_m + f2l_h) * dx);
y = (gint) (oy + (f2r_m + f2r_h) * dy);
x4 = CLAMP (x, 0, w1);
y4 = CLAMP (y, 0, h1);
for (i = 1; i < num_samples; i++) {
il = (gdouble) adata[s++];
ir = (gdouble) adata[s++];
filter (il, ir);
x = (gint) (ox + f1l_l * dx);
y = (gint) (oy + f1r_l * dy);
x = CLAMP (x, 0, w1);
y = CLAMP (y, 0, h1);
draw_line_aa (vdata, x2, x, y2, y, w, 0x00FF0000);
x2 = x;
y2 = y;
x = (gint) (ox + f2l_l * dx);
y = (gint) (oy + f2r_l * dy);
x = CLAMP (x, 0, w1);
y = CLAMP (y, 0, h1);
draw_line_aa (vdata, x3, x, y3, y, w, 0x0000FF00);
x3 = x;
y3 = y;
x = (gint) (ox + (f2l_m + f2l_h) * dx);
y = (gint) (oy + (f2r_m + f2r_h) * dy);
x = CLAMP (x, 0, w1);
y = CLAMP (y, 0, h1);
draw_line_aa (vdata, x4, x, y4, y, w, 0x000000FF);
x4 = x;
y4 = y;
}
scope->f1l_l = f1l_l;
scope->f1l_m = f1l_m;
scope->f1l_h = f1l_h;
scope->f1r_l = f1r_l;
scope->f1r_m = f1r_m;
scope->f1r_h = f1r_h;
scope->f2l_l = f2l_l;
scope->f2l_m = f2l_m;
scope->f2l_h = f2l_h;
scope->f2r_l = f2r_l;
scope->f2r_m = f2r_m;
scope->f2r_h = f2r_h;
}
static gboolean
gst_space_scope_render (GstBaseAudioVisualizer * base, GstBuffer * audio,
GstBuffer * video)

View file

@ -42,6 +42,12 @@ struct _GstSpaceScope
/* < private > */
GstSpaceScopeProcessFunc process;
gint style;
/* filter specific data */
gdouble f1l_l, f1l_m, f1l_h;
gdouble f1r_l, f1r_m, f1r_h;
gdouble f2l_l, f2l_m, f2l_h;
gdouble f2r_l, f2r_m, f2r_h;
};
struct _GstSpaceScopeClass

View file

@ -72,6 +72,8 @@ enum
{
STYLE_DOTS = 0,
STYLE_LINES,
STYLE_COLOR_DOTS,
STYLE_COLOR_LINES,
NUM_STYLES
};
@ -85,6 +87,8 @@ gst_wave_scope_style_get_type (void)
static const GEnumValue values[] = {
{STYLE_DOTS, "draw dots (default)", "dots"},
{STYLE_LINES, "draw lines", "lines"},
{STYLE_COLOR_DOTS, "draw color dots", "color-dots"},
{STYLE_COLOR_LINES, "draw color lines", "color-lines"},
{0, NULL, NULL}
};
@ -97,15 +101,22 @@ static void gst_wave_scope_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_wave_scope_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_wave_scope_finalize (GObject * object);
static void render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata,
gint16 * adata, guint num_samples);
static void render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata,
gint16 * adata, guint num_samples);
static void render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
static void render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples);
static gboolean gst_wave_scope_setup (GstBaseAudioVisualizer * scope);
static gboolean gst_wave_scope_render (GstBaseAudioVisualizer * base,
GstBuffer * audio, GstBuffer * video);
#define gst_wave_scope_parent_class parent_class
G_DEFINE_TYPE (GstWaveScope, gst_wave_scope, GST_TYPE_BASE_AUDIO_VISUALIZER);
static void
@ -118,6 +129,10 @@ gst_wave_scope_class_init (GstWaveScopeClass * g_class)
gobject_class->set_property = gst_wave_scope_set_property;
gobject_class->get_property = gst_wave_scope_get_property;
gobject_class->finalize = gst_wave_scope_finalize;
scope_class->setup = GST_DEBUG_FUNCPTR (gst_wave_scope_setup);
scope_class->render = GST_DEBUG_FUNCPTR (gst_wave_scope_render);
g_object_class_install_property (gobject_class, PROP_STYLE,
g_param_spec_enum ("style", "drawing style",
@ -143,6 +158,32 @@ gst_wave_scope_init (GstWaveScope * scope)
/* do nothing */
}
static void
gst_wave_scope_finalize (GObject * object)
{
GstWaveScope *scope = GST_WAVE_SCOPE (object);
if (scope->flt) {
g_free (scope->flt);
scope->flt = NULL;
}
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
gst_wave_scope_setup (GstBaseAudioVisualizer * bscope)
{
GstWaveScope *scope = GST_WAVE_SCOPE (bscope);
if (scope->flt)
g_free (scope->flt);
scope->flt = g_new0 (gdouble, 6 * bscope->channels);
return TRUE;
}
static void
gst_wave_scope_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -159,6 +200,12 @@ gst_wave_scope_set_property (GObject * object, guint prop_id,
case STYLE_LINES:
scope->process = render_lines;
break;
case STYLE_COLOR_DOTS:
scope->process = render_color_dots;
break;
case STYLE_COLOR_LINES:
scope->process = render_color_lines;
break;
}
break;
default:
@ -186,18 +233,19 @@ gst_wave_scope_get_property (GObject * object, guint prop_id,
#include "gstdrawhelpers.h"
static void
render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
render_dots (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
gint channels = scope->channels;
gint channels = base->channels;
guint i, c, s, x, y, oy;
gfloat dx, dy;
guint w = scope->width;
guint w = base->width;
guint h = base->height;
/* draw dots */
dx = (gfloat) w / (gfloat) num_samples;
dy = scope->height / 65536.0;
oy = scope->height / 2;
dy = h / 65536.0;
oy = h / 2;
for (c = 0; c < channels; c++) {
s = c;
for (i = 0; i < num_samples; i++) {
@ -210,14 +258,14 @@ render_dots (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
static void
render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
render_lines (GstBaseAudioVisualizer * base, guint32 * vdata, gint16 * adata,
guint num_samples)
{
gint channels = scope->channels;
gint channels = base->channels;
guint i, c, s, x, y, oy;
gfloat dx, dy;
guint w = scope->width;
guint h = scope->height;
guint w = base->width;
guint h = base->height;
gint x2, y2;
/* draw lines */
@ -239,6 +287,119 @@ render_lines (GstBaseAudioVisualizer * scope, guint32 * vdata, gint16 * adata,
}
}
#define CUTOFF_1 0.15
#define CUTOFF_2 0.45
#define RESONANCE (1.0/0.5)
#define filter(in) G_STMT_START { \
flt[2] = in - (flt[1] * RESONANCE) - flt[0]; \
flt[1] += (flt[2] * CUTOFF_1); \
flt[0] += (flt[1] * CUTOFF_1); \
\
flt[5] = (flt[1] + flt[2]) - (flt[4] * RESONANCE) - flt[3]; \
flt[4] += (flt[5] * CUTOFF_2); \
flt[3] += (flt[4] * CUTOFF_2); \
} G_STMT_END
static void
render_color_dots (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples)
{
GstWaveScope *scope = (GstWaveScope *) base;
gint channels = base->channels;
guint i, c, s, x, y, oy;
gfloat dx, dy;
guint w = base->width;
guint h = base->height, h1 = h - 2;
gdouble *flt = scope->flt;
/* draw dots */
dx = (gfloat) w / (gfloat) num_samples;
dy = h / 65536.0;
oy = h / 2;
for (c = 0; c < channels; c++) {
s = c;
for (i = 0; i < num_samples; i++) {
x = (guint) ((gfloat) i * dx);
filter ((gfloat) adata[s]);
y = (guint) (oy + flt[0] * dy);
y = CLAMP (y, 0, h1);
draw_dot_c (vdata, x, y, w, 0x00FF0000);
y = (guint) (oy + flt[3] * dy);
y = CLAMP (y, 0, h1);
draw_dot_c (vdata, x, y, w, 0x0000FF00);
y = (guint) (oy + (flt[4] + flt[5]) * dy);
y = CLAMP (y, 0, h1);
draw_dot_c (vdata, x, y, w, 0x000000FF);
s += channels;
}
flt += 6;
}
}
static void
render_color_lines (GstBaseAudioVisualizer * base, guint32 * vdata,
gint16 * adata, guint num_samples)
{
GstWaveScope *scope = (GstWaveScope *) base;
gint channels = base->channels;
guint i, c, s, x, y, oy;
gfloat dx, dy;
guint w = base->width;
guint h = base->height, h1 = h - 2;
gdouble *flt = scope->flt;
gint x2, y2, y3, y4;
/* draw lines */
dx = (gfloat) (w - 1) / (gfloat) num_samples;
dy = (h - 1) / 65536.0;
oy = (h - 1) / 2;
for (c = 0; c < channels; c++) {
s = c;
/* do first pixels */
x2 = 0;
filter ((gfloat) adata[s]);
y = (guint) (oy + flt[0] * dy);
y2 = CLAMP (y, 0, h1);
y = (guint) (oy + flt[3] * dy);
y3 = CLAMP (y, 0, h1);
y = (guint) (oy + (flt[4] + flt[5]) * dy);
y4 = CLAMP (y, 0, h1);
for (i = 1; i < num_samples; i++) {
x = (guint) ((gfloat) i * dx);
filter ((gfloat) adata[s]);
y = (guint) (oy + flt[0] * dy);
y = CLAMP (y, 0, h1);
draw_line_aa (vdata, x2, x, y2, y, w, 0x00FF0000);
y2 = y;
y = (guint) (oy + flt[3] * dy);
y = CLAMP (y, 0, h1);
draw_line_aa (vdata, x2, x, y3, y, w, 0x0000FF00);
y3 = y;
y = (guint) (oy + (flt[4] + flt[5]) * dy);
y = CLAMP (y, 0, h1);
draw_line_aa (vdata, x2, x, y4, y, w, 0x000000FF);
y4 = y;
x2 = x;
s += channels;
}
flt += 6;
}
}
static gboolean
gst_wave_scope_render (GstBaseAudioVisualizer * base, GstBuffer * audio,
GstBuffer * video)

View file

@ -42,6 +42,9 @@ struct _GstWaveScope
/* < private > */
GstWaveScopeProcessFunc process;
gint style;
/* filter specific data */
gdouble *flt;
};
struct _GstWaveScopeClass

View file

@ -45,8 +45,6 @@
GST_DEBUG_CATEGORY (autoconvert_debug);
#define GST_CAT_DEFAULT (autoconvert_debug)
#define DEFAULT_INITIAL_IDENTITY FALSE
#define GST_AUTOCONVERT_LOCK(ac) GST_OBJECT_LOCK (ac)
#define GST_AUTOCONVERT_UNLOCK(ac) GST_OBJECT_UNLOCK (ac)
@ -83,8 +81,7 @@ enum
enum
{
PROP_0,
PROP_FACTORIES,
PROP_INITIAL_IDENTITY
PROP_FACTORIES
};
static void gst_auto_convert_set_property (GObject * object,
@ -103,6 +100,8 @@ static GstPad *gst_auto_convert_get_internal_sinkpad (GstAutoConvert *
static GstPad *gst_auto_convert_get_internal_srcpad (GstAutoConvert *
autoconvert);
static GstIterator *gst_auto_convert_iterate_internal_links (GstPad * pad);
static gboolean gst_auto_convert_sink_setcaps (GstPad * pad, GstCaps * caps);
static GstCaps *gst_auto_convert_sink_getcaps (GstPad * pad);
static GstFlowReturn gst_auto_convert_sink_chain (GstPad * pad,
@ -199,14 +198,6 @@ gst_auto_convert_class_init (GstAutoConvertClass * klass)
" elements), can only be set once",
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_INITIAL_IDENTITY,
g_param_spec_boolean ("initial-identity",
"Install identity initially",
"If true, then the identity element will be installed initially "
"and used for event passing until the first data buffer arrives ",
DEFAULT_INITIAL_IDENTITY,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_auto_convert_change_state);
}
@ -233,6 +224,8 @@ gst_auto_convert_init (GstAutoConvert * autoconvert,
GST_DEBUG_FUNCPTR (gst_auto_convert_sink_query_type));
gst_pad_set_bufferalloc_function (autoconvert->sinkpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_sink_buffer_alloc));
gst_pad_set_iterate_internal_links_function (autoconvert->sinkpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_iterate_internal_links));
gst_pad_set_event_function (autoconvert->srcpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_src_event));
@ -240,13 +233,13 @@ gst_auto_convert_init (GstAutoConvert * autoconvert,
GST_DEBUG_FUNCPTR (gst_auto_convert_src_query));
gst_pad_set_query_type_function (autoconvert->srcpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_src_query_type));
gst_pad_set_iterate_internal_links_function (autoconvert->sinkpad,
GST_DEBUG_FUNCPTR (gst_auto_convert_iterate_internal_links));
gst_element_add_pad (GST_ELEMENT (autoconvert), autoconvert->sinkpad);
gst_element_add_pad (GST_ELEMENT (autoconvert), autoconvert->srcpad);
gst_segment_init (&autoconvert->sink_segment, GST_FORMAT_UNDEFINED);
autoconvert->initial_identity = DEFAULT_INITIAL_IDENTITY;
}
static void
@ -299,11 +292,6 @@ gst_auto_convert_set_property (GObject * object,
" have been set or auto-discovered");
GST_AUTOCONVERT_UNLOCK (autoconvert);
break;
case PROP_INITIAL_IDENTITY:
GST_AUTOCONVERT_LOCK (autoconvert);
autoconvert->initial_identity = g_value_get_boolean (value);
GST_AUTOCONVERT_UNLOCK (autoconvert);
break;
}
}
@ -322,11 +310,6 @@ gst_auto_convert_get_property (GObject * object,
g_value_set_pointer (value, &autoconvert->factories);
GST_AUTOCONVERT_UNLOCK (autoconvert);
break;
case PROP_INITIAL_IDENTITY:
GST_AUTOCONVERT_LOCK (autoconvert);
g_value_set_boolean (value, autoconvert->initial_identity);
GST_AUTOCONVERT_UNLOCK (autoconvert);
break;
}
}
@ -474,37 +457,12 @@ gst_auto_convert_get_subelement (GstAutoConvert * autoconvert,
gboolean query_only)
{
GstElement *element = NULL;
gboolean initial_identity;
GST_AUTOCONVERT_LOCK (autoconvert);
if (autoconvert->current_subelement)
element = gst_object_ref (autoconvert->current_subelement);
initial_identity = autoconvert->initial_identity;
GST_AUTOCONVERT_UNLOCK (autoconvert);
if (G_UNLIKELY (!query_only && element == NULL && initial_identity)) {
/* No current sub-element - create an identity and install it */
GstElementFactory *identity_feature;
GstElement *identity;
GST_INFO_OBJECT (autoconvert,
"No existing child element - instantiating identity");
/* if the identity feature doesn't exist - something is very wrong */
identity_feature =
GST_ELEMENT_FACTORY_CAST (gst_default_registry_find_feature ("identity",
GST_TYPE_ELEMENT_FACTORY));
identity =
gst_auto_convert_get_or_make_element_from_factory (autoconvert,
identity_feature);
if (identity
&& gst_auto_convert_activate_element (autoconvert, identity, NULL)) {
GST_AUTOCONVERT_LOCK (autoconvert);
if (autoconvert->current_subelement)
element = gst_object_ref (autoconvert->current_subelement);
GST_AUTOCONVERT_UNLOCK (autoconvert);
}
}
return element;
}
@ -796,6 +754,32 @@ gst_auto_convert_activate_element (GstAutoConvert * autoconvert,
return TRUE;
}
static GstIterator *
gst_auto_convert_iterate_internal_links (GstPad * pad)
{
GstAutoConvert *autoconvert = GST_AUTO_CONVERT (gst_pad_get_parent (pad));
GstIterator *it = NULL;
GstPad *internal;
if (!autoconvert)
return NULL;
if (pad == autoconvert->sinkpad)
internal = gst_auto_convert_get_internal_srcpad (autoconvert);
else
internal = gst_auto_convert_get_internal_sinkpad (autoconvert);
if (internal) {
it = gst_iterator_new_single (GST_TYPE_PAD, internal,
(GstCopyFunction) gst_object_ref, (GFreeFunc) gst_object_unref);
gst_object_unref (internal);
}
gst_object_unref (autoconvert);
return it;
}
/*
* If there is already an internal element, it will try to call set_caps on it
*
@ -1094,15 +1078,6 @@ gst_auto_convert_sink_event (GstPad * pad, GstEvent * event)
}
internal_srcpad = gst_auto_convert_get_internal_srcpad (autoconvert);
if (internal_srcpad == NULL) {
/* Query the subelement - triggers creation of an identity if necessary */
GstElement *subelement =
gst_auto_convert_get_subelement (autoconvert, FALSE);
if (subelement)
gst_object_unref (subelement);
internal_srcpad = gst_auto_convert_get_internal_srcpad (autoconvert);
}
if (internal_srcpad) {
ret = gst_pad_push_event (internal_srcpad, event);
gst_object_unref (internal_srcpad);

View file

@ -56,8 +56,6 @@ struct _GstAutoConvert
GList *cached_events;
GstSegment sink_segment;
gboolean drop_newseg;
gboolean initial_identity;
};
struct _GstAutoConvertClass

View file

@ -125,7 +125,8 @@ gst_camerabin_try_add_element (GstBin * bin, const gchar * srcpad,
GST_DEBUG_PAD_NAME (bin_pad));
bin_elem = gst_pad_get_parent_element (bin_pad);
gst_object_unref (bin_pad);
if (!gst_element_link_pads (bin_elem, srcpad, new_elem, dstpad)) {
if (!gst_element_link_pads_full (bin_elem, srcpad, new_elem, dstpad,
GST_PAD_LINK_CHECK_CAPS)) {
gst_object_ref (new_elem);
gst_bin_remove (bin, new_elem);
ret = FALSE;
@ -257,29 +258,3 @@ gst_camerabin_remove_elements_from_bin (GstBin * bin)
}
gst_iterator_free (iter);
}
/**
* gst_camerabin_drop_eos_probe:
* @pad: pad receiving the event
* @event: received event
* @u_data: not used
*
* Event probe that drop all eos events.
*
* Returns: FALSE to drop the event, TRUE otherwise
*/
gboolean
gst_camerabin_drop_eos_probe (GstPad * pad, GstEvent * event, gpointer u_data)
{
gboolean ret = TRUE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
GST_DEBUG ("dropping eos in %s:%s", GST_DEBUG_PAD_NAME (pad));
ret = FALSE;
break;
default:
break;
}
return ret;
}

View file

@ -34,6 +34,4 @@ GstElement * gst_camerabin_setup_default_element (GstBin * bin, GstElement *user
void gst_camerabin_remove_elements_from_bin (GstBin * bin);
gboolean gst_camerabin_drop_eos_probe (GstPad * pad, GstEvent * event, gpointer u_data);
#endif /* #ifndef __CAMERABIN_GENERAL_H_ */

View file

@ -430,6 +430,7 @@ gst_camera_bin_start_capture (GstCameraBin2 * camerabin)
g_signal_emit_by_name (camerabin->src, "start-capture", NULL);
if (camerabin->mode == MODE_VIDEO) {
camerabin->audio_send_newseg = TRUE;
if (camerabin->audio_src)
gst_element_set_state (camerabin->audio_src, GST_STATE_PLAYING);
@ -1043,7 +1044,6 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
}
dec_counter = TRUE;
} else if (gst_structure_has_name (structure, "preview-image")) {
GValue *value;
gchar *location = NULL;
g_mutex_lock (camerabin->preview_list_mutex);
@ -1063,11 +1063,11 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
g_mutex_unlock (camerabin->preview_list_mutex);
if (location) {
value = g_new0 (GValue, 1);
g_value_init (value, G_TYPE_STRING);
g_value_take_string (value, location);
GValue value = { 0 };
g_value_init (&value, G_TYPE_STRING);
g_value_take_string (&value, location);
gst_structure_take_value ((GstStructure *) structure, "location",
value);
&value);
}
GST_LOG_OBJECT (bin, "received preview-image message");
@ -1089,6 +1089,8 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
}
dec_counter = TRUE;
}
g_error_free (err);
g_free (debug);
}
break;
case GST_MESSAGE_EOS:{
@ -1440,19 +1442,48 @@ gst_camera_bin_image_sink_event_probe (GstPad * pad, GstPadProbeInfo * info,
}
static GstPadProbeReturn
gst_camera_bin_audio_src_event_probe (GstPad * pad, GstPadProbeInfo * info,
gst_camera_bin_audio_src_data_probe (GstPad * pad, GstPadProbeInfo * info,
gpointer data)
{
GstCameraBin2 *camera = data;
gboolean ret = GST_PAD_PROBE_OK;
GstEvent *event = GST_EVENT (info->data);
if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
/* we only let an EOS pass when the user is stopping a capture */
if (camera->audio_drop_eos) {
if (GST_IS_BUFFER (data)) {
if (G_UNLIKELY (camera->audio_send_newseg)) {
GstBuffer *buf = GST_BUFFER_CAST (data);
GstClockTime ts = GST_BUFFER_TIMESTAMP (buf);
GstPad *peer;
GstSegment segment;
if (!GST_CLOCK_TIME_IS_VALID (ts)) {
ts = 0;
}
peer = gst_pad_get_peer (pad);
g_return_val_if_fail (peer != NULL, TRUE);
gst_segment_init (&segment, GST_FORMAT_TIME);
segment.start = ts;
gst_pad_send_event (peer, gst_event_new_segment (&segment));
gst_object_unref (peer);
camera->audio_send_newseg = FALSE;
}
} else {
GstEvent *event = GST_EVENT_CAST (data);
if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
/* we only let an EOS pass when the user is stopping a capture */
if (camera->audio_drop_eos) {
ret = GST_PAD_PROBE_DROP;
} else {
camera->audio_drop_eos = TRUE;
/* should already be false, but reinforce in case no buffers get
* pushed */
camera->audio_send_newseg = FALSE;
}
} else if (GST_EVENT_TYPE (event) == GST_EVENT_SEGMENT) {
ret = GST_PAD_PROBE_DROP;
} else {
camera->audio_drop_eos = TRUE;
}
}
@ -1517,12 +1548,12 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
GstEncodingContainerProfile *prof;
GstCaps *caps;
caps = gst_caps_new_simple ("application/ogg", NULL, NULL);
caps = gst_caps_new_empty_simple ("application/ogg");
prof = gst_encoding_container_profile_new ("ogg", "theora+vorbis+ogg",
caps, NULL);
gst_caps_unref (caps);
caps = gst_caps_new_simple ("video/x-theora", NULL, NULL);
caps = gst_caps_new_empty_simple ("video/x-theora");
if (!gst_encoding_container_profile_add_profile (prof,
(GstEncodingProfile *) gst_encoding_video_profile_new (caps,
NULL, NULL, 1))) {
@ -1530,7 +1561,7 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
}
gst_caps_unref (caps);
caps = gst_caps_new_simple ("audio/x-vorbis", NULL, NULL);
caps = gst_caps_new_empty_simple ("audio/x-vorbis");
if (!gst_encoding_container_profile_add_profile (prof,
(GstEncodingProfile *) gst_encoding_audio_profile_new (caps,
NULL, NULL, 1))) {
@ -1569,7 +1600,7 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
GstEncodingVideoProfile *vprof;
GstCaps *caps;
caps = gst_caps_new_simple ("image/jpeg", NULL, NULL);
caps = gst_caps_new_empty_simple ("image/jpeg");
vprof = gst_encoding_video_profile_new (caps, NULL, NULL, 1);
gst_encoding_video_profile_set_variableframerate (vprof, TRUE);
@ -1803,15 +1834,17 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
gst_bin_add (GST_BIN_CAST (camera),
gst_object_ref (camera->audio_capsfilter));
gst_element_link_many (camera->audio_src, camera->audio_volume,
camera->audio_capsfilter, NULL);
gst_element_link_pads_full (camera->audio_src, "src",
camera->audio_volume, "sink", GST_PAD_LINK_CHECK_CAPS);
gst_element_link_pads_full (camera->audio_volume, "src",
camera->audio_capsfilter, "sink", GST_PAD_LINK_CHECK_CAPS);
srcpad = gst_element_get_static_pad (camera->audio_src, "src");
/* drop EOS for audiosrc elements that push them on state_changes
* (basesrc does this) */
gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
gst_camera_bin_audio_src_event_probe, gst_object_ref (camera),
gst_pad_add_probe (srcpad, GST_PAD_PROBE_TYPE_DATA_DOWNSTREAM,
gst_camera_bin_audio_src_data_probe, gst_object_ref (camera),
gst_object_unref);
gst_object_unref (srcpad);
@ -1870,6 +1903,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
case GST_STATE_CHANGE_READY_TO_PAUSED:
GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera);
camera->audio_drop_eos = TRUE;
camera->audio_send_newseg = FALSE;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
if (GST_STATE (camera->videosink) >= GST_STATE_PAUSED)
@ -1939,6 +1973,9 @@ gst_camera_bin_send_event (GstElement * element, GstEvent * event)
GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (element);
gboolean res;
/* avoid losing our ref to send_event */
gst_event_ref (event);
res = GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
@ -1964,6 +2001,7 @@ gst_camera_bin_send_event (GstElement * element, GstEvent * event)
break;
}
gst_event_unref (event);
return res;
}

View file

@ -126,6 +126,7 @@ struct _GstCameraBin2
gboolean image_profile_switch;
gboolean audio_drop_eos;
gboolean audio_send_newseg;
GMutex *video_capture_mutex;
GCond *video_state_cond;

View file

@ -185,11 +185,11 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
/* remove the elements, user doesn't want them */
gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
csp = gst_bin_get_by_name (GST_BIN (vfbin), "vfbin-csp");
videoscale = gst_bin_get_by_name (GST_BIN (vfbin), "vfbin-videoscale");
csp = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-csp");
videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-videoscale");
gst_bin_remove (GST_BIN (vfbin), csp);
gst_bin_remove (GST_BIN (vfbin), videoscale);
gst_bin_remove (GST_BIN_CAST (vfbin), csp);
gst_bin_remove (GST_BIN_CAST (vfbin), videoscale);
gst_object_unref (csp);
gst_object_unref (videoscale);
@ -199,21 +199,22 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
/* add the elements, user wants them */
csp =
gst_camerabin_create_and_add_element (GST_BIN (vfbin),
"ffmpegcolorspace", "vfbin-csp");
csp = gst_element_factory_make ("ffmpegcolorspace", "vfbin-csp");
if (!csp) {
missing_element_name = "ffmpegcolorspace";
goto missing_element;
}
gst_bin_add (GST_BIN_CAST (vfbin), csp);
videoscale =
gst_camerabin_create_and_add_element (GST_BIN (vfbin), "videoscale",
"vfbin-videoscale");
videoscale = gst_element_factory_make ("videoscale", "vfbin->videoscale");
if (!videoscale) {
missing_element_name = "videoscale";
goto missing_element;
}
gst_bin_add (GST_BIN_CAST (vfbin), videoscale);
gst_element_link_pads_full (csp, "src", videoscale, "sink",
GST_PAD_LINK_CHECK_NOTHING);
vfbin->elements_created = TRUE;
GST_DEBUG_OBJECT (vfbin, "Elements succesfully created and linked");
@ -231,7 +232,8 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
unref = TRUE;
}
if (!gst_element_link_pads (videoscale, "src", vfbin->video_sink, "sink")) {
if (!gst_element_link_pads_full (videoscale, "src", vfbin->video_sink,
"sink", GST_PAD_LINK_CHECK_CAPS)) {
GST_ELEMENT_ERROR (vfbin, CORE, NEGOTIATION, (NULL),
("linking videoscale and viewfindersink failed"));
}

View file

@ -31,6 +31,8 @@
# include <config.h>
#endif
#include <gst/interfaces/photography.h>
#include "gstwrappercamerabinsrc.h"
#include "camerabingeneral.h"
@ -257,9 +259,19 @@ gst_wrapper_camera_bin_src_vidsrc_probe (GstPad * pad, GstPadProbeInfo * info,
if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_DONE) {
/* NOP */
} else if (self->video_rec_status == GST_VIDEO_RECORDING_STATUS_STARTING) {
GstClockTime ts;
GstSegment segment;
GST_DEBUG_OBJECT (self, "Starting video recording");
self->video_rec_status = GST_VIDEO_RECORDING_STATUS_RUNNING;
ts = GST_BUFFER_TIMESTAMP (buffer);
if (!GST_CLOCK_TIME_IS_VALID (ts))
ts = 0;
gst_segment_init (&segment, GST_FORMAT_TIME);
segment.start = ts;
gst_pad_push_event (self->vidsrc, gst_event_new_segment (&segment));
/* post preview */
GST_DEBUG_OBJECT (self, "Posting preview for video");
gst_base_camera_src_post_preview (camerasrc, buffer);
@ -706,7 +718,9 @@ static gboolean
start_image_capture (GstWrapperCameraBinSrc * self)
{
GstBaseCameraSrc *bcamsrc = GST_BASE_CAMERA_SRC (self);
GstPhotography *photography = gst_base_camera_src_get_photography (bcamsrc);
GstPhotography *photography =
(GstPhotography *) gst_bin_get_by_interface (GST_BIN_CAST (bcamsrc),
GST_TYPE_PHOTOGRAPHY);
gboolean ret = FALSE;
GstCaps *caps;
@ -747,7 +761,9 @@ static gboolean
gst_wrapper_camera_bin_src_set_mode (GstBaseCameraSrc * bcamsrc,
GstCameraBinMode mode)
{
GstPhotography *photography = gst_base_camera_src_get_photography (bcamsrc);
GstPhotography *photography =
(GstPhotography *) gst_bin_get_by_interface (GST_BIN_CAST (bcamsrc),
GST_TYPE_PHOTOGRAPHY);
GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
if (self->output_selector) {

View file

@ -26,7 +26,7 @@
#include <string.h>
#include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#include <gst/video/video.h>
#include "gstcompare.h"
@ -109,7 +109,7 @@ static void gst_compare_get_property (GObject * object,
static void gst_compare_reset (GstCompare * overlay);
static GstCaps *gst_compare_getcaps (GstPad * pad);
static GstFlowReturn gst_compare_collect_pads (GstCollectPads * cpads,
static GstFlowReturn gst_compare_collect_pads (GstCollectPads2 * cpads,
GstCompare * comp);
static GstStateChangeReturn gst_compare_change_state (GstElement * element,
@ -189,9 +189,9 @@ gst_compare_class_init (GstCompareClass * klass)
static void
gst_compare_init (GstCompare * comp, GstCompareClass * klass)
{
comp->cpads = gst_collect_pads_new ();
gst_collect_pads_set_function (comp->cpads,
(GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_compare_collect_pads),
comp->cpads = gst_collect_pads2_new ();
gst_collect_pads2_set_function (comp->cpads,
(GstCollectPads2Function) GST_DEBUG_FUNCPTR (gst_compare_collect_pads),
comp);
comp->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
@ -203,10 +203,10 @@ gst_compare_init (GstCompare * comp, GstCompareClass * klass)
gst_pad_set_getcaps_function (comp->checkpad, gst_compare_getcaps);
gst_element_add_pad (GST_ELEMENT (comp), comp->checkpad);
gst_collect_pads_add_pad (comp->cpads, comp->sinkpad,
sizeof (GstCollectData), NULL);
gst_collect_pads_add_pad (comp->cpads, comp->checkpad,
sizeof (GstCollectData), NULL);
gst_collect_pads2_add_pad_full (comp->cpads, comp->sinkpad,
sizeof (GstCollectData2), NULL, TRUE);
gst_collect_pads2_add_pad_full (comp->cpads, comp->checkpad,
sizeof (GstCollectData2), NULL, TRUE);
comp->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (comp->srcpad, gst_compare_getcaps);
@ -539,14 +539,14 @@ gst_compare_buffers (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2)
}
static GstFlowReturn
gst_compare_collect_pads (GstCollectPads * cpads, GstCompare * comp)
gst_compare_collect_pads (GstCollectPads2 * cpads, GstCompare * comp)
{
GstBuffer *buf1, *buf2;
buf1 = gst_collect_pads_pop (comp->cpads,
buf1 = gst_collect_pads2_pop (comp->cpads,
gst_pad_get_element_private (comp->sinkpad));
buf2 = gst_collect_pads_pop (comp->cpads,
buf2 = gst_collect_pads2_pop (comp->cpads,
gst_pad_get_element_private (comp->checkpad));
if (!buf1 && !buf2) {
@ -638,10 +638,10 @@ gst_compare_change_state (GstElement * element, GstStateChange transition)
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_collect_pads_start (comp->cpads);
gst_collect_pads2_start (comp->cpads);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_collect_pads_stop (comp->cpads);
gst_collect_pads2_stop (comp->cpads);
break;
default:
break;

View file

@ -52,7 +52,7 @@ struct _GstCompare {
GstPad *sinkpad;
GstPad *checkpad;
GstCollectPads *cpads;
GstCollectPads2 *cpads;
gint count;

View file

@ -120,7 +120,7 @@ gst_frei0r_mixer_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_collect_pads_start (self->collect);
gst_collect_pads2_start (self->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
@ -129,11 +129,11 @@ gst_frei0r_mixer_change_state (GstElement * element, GstStateChange transition)
}
/* Stop before calling the parent's state change function as
* GstCollectPads might take locks and we would deadlock in that
* GstCollectPads2 might take locks and we would deadlock in that
* case
*/
if (transition == GST_STATE_CHANGE_PAUSED_TO_READY)
gst_collect_pads_stop (self->collect);
gst_collect_pads2_stop (self->collect);
ret =
GST_ELEMENT_CLASS (g_type_class_peek_parent (klass))->change_state
@ -488,7 +488,7 @@ gst_frei0r_mixer_src_event (GstPad * pad, GstEvent * event)
/* check if we are flushing */
if (flags & GST_SEEK_FLAG_FLUSH) {
/* make sure we accept nothing anymore and return WRONG_STATE */
gst_collect_pads_set_flushing (self->collect, TRUE);
gst_collect_pads2_set_flushing (self->collect, TRUE);
/* flushing seek, start flush downstream, the flush will be done
* when all pads received a FLUSH_STOP. */
@ -532,7 +532,7 @@ gst_frei0r_mixer_sink0_event (GstPad * pad, GstEvent * event)
break;
}
/* now GstCollectPads can take care of the rest, e.g. EOS */
/* now GstCollectPads2 can take care of the rest, e.g. EOS */
ret = self->collect_event (pad, event);
gst_object_unref (self);
@ -541,7 +541,7 @@ gst_frei0r_mixer_sink0_event (GstPad * pad, GstEvent * event)
}
static GstFlowReturn
gst_frei0r_mixer_collected (GstCollectPads * pads, GstFrei0rMixer * self)
gst_frei0r_mixer_collected (GstCollectPads2 * pads, GstFrei0rMixer * self)
{
GstBuffer *inbuf0 = NULL, *inbuf1 = NULL, *inbuf2 = NULL;
GstBuffer *outbuf = NULL;
@ -575,15 +575,15 @@ gst_frei0r_mixer_collected (GstCollectPads * pads, GstFrei0rMixer * self)
return ret;
for (l = pads->data; l; l = l->next) {
GstCollectData *cdata = l->data;
GstCollectData2 *cdata = l->data;
if (cdata->pad == self->sink0) {
inbuf0 = gst_collect_pads_pop (pads, cdata);
inbuf0 = gst_collect_pads2_pop (pads, cdata);
segment = &cdata->segment;
} else if (cdata->pad == self->sink1) {
inbuf1 = gst_collect_pads_pop (pads, cdata);
inbuf1 = gst_collect_pads2_pop (pads, cdata);
} else if (cdata->pad == self->sink2) {
inbuf2 = gst_collect_pads_pop (pads, cdata);
inbuf2 = gst_collect_pads2_pop (pads, cdata);
}
}
@ -675,22 +675,26 @@ gst_frei0r_mixer_class_init (GstFrei0rMixerClass * klass,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
gst_object_unref (templ);
templ =
gst_pad_template_new ("sink_0", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
gst_object_unref (templ);
templ =
gst_pad_template_new ("sink_1", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
gst_object_unref (templ);
if (klass->info->plugin_type == F0R_PLUGIN_TYPE_MIXER3) {
templ =
gst_pad_template_new ("sink_2", GST_PAD_SINK, GST_PAD_ALWAYS,
gst_caps_ref (caps));
gst_element_class_add_pad_template (gstelement_class, templ);
gst_object_unref (templ);
}
gst_caps_unref (caps);
@ -704,9 +708,9 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
self->property_cache =
gst_frei0r_property_cache_init (klass->properties, klass->n_properties);
self->collect = gst_collect_pads_new ();
gst_collect_pads_set_function (self->collect,
(GstCollectPadsFunction) gst_frei0r_mixer_collected, self);
self->collect = gst_collect_pads2_new ();
gst_collect_pads2_set_function (self->collect,
(GstCollectPads2Function) gst_frei0r_mixer_collected, self);
self->src =
gst_pad_new_from_template (gst_element_class_get_pad_template
@ -730,8 +734,8 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_set_caps));
gst_pad_set_query_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink_query));
gst_collect_pads_add_pad (self->collect, self->sink0,
sizeof (GstCollectData), NULL);
gst_collect_pads2_add_pad (self->collect, self->sink0,
sizeof (GstCollectData2));
self->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (self->sink0);
gst_pad_set_event_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink0_event));
@ -746,8 +750,8 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_set_caps));
gst_pad_set_query_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink_query));
gst_collect_pads_add_pad (self->collect, self->sink1,
sizeof (GstCollectData), NULL);
gst_collect_pads2_add_pad (self->collect, self->sink1,
sizeof (GstCollectData2));
gst_element_add_pad (GST_ELEMENT_CAST (self), self->sink1);
if (klass->info->plugin_type == F0R_PLUGIN_TYPE_MIXER3) {
@ -760,8 +764,8 @@ gst_frei0r_mixer_init (GstFrei0rMixer * self, GstFrei0rMixerClass * klass)
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_set_caps));
gst_pad_set_query_function (self->sink0,
GST_DEBUG_FUNCPTR (gst_frei0r_mixer_sink_query));
gst_collect_pads_add_pad (self->collect, self->sink2,
sizeof (GstCollectData), NULL);
gst_collect_pads2_add_pad (self->collect, self->sink2,
sizeof (GstCollectData2));
gst_element_add_pad (GST_ELEMENT_CAST (self), self->sink2);
}

View file

@ -22,7 +22,7 @@
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#include "frei0r.h"
#include "gstfrei0r.h"
@ -42,7 +42,7 @@ typedef struct _GstFrei0rMixerClass GstFrei0rMixerClass;
struct _GstFrei0rMixer {
GstElement parent;
GstCollectPads *collect;
GstCollectPads2 *collect;
GstPad *src;
GstPad *sink0, *sink1, *sink2;

View file

@ -1091,10 +1091,11 @@ gst_live_live_adder_chain (GstPad * pad, GstBuffer * buffer)
if (skip) {
GstClockTime subbuffer_duration = GST_BUFFER_DURATION (buffer) - skip;
GstClockTime subbuffer_ts = GST_BUFFER_TIMESTAMP (buffer) + skip;
buffer = gst_buffer_create_sub (buffer,
GstBuffer *new_buffer = gst_buffer_create_sub (buffer,
gst_live_adder_length_from_duration (adder, skip),
gst_live_adder_length_from_duration (adder, subbuffer_duration));
gst_buffer_unref (buffer);
buffer = new_buffer;
GST_BUFFER_TIMESTAMP (buffer) = subbuffer_ts;
GST_BUFFER_DURATION (buffer) = subbuffer_duration;
}

View file

@ -1,24 +0,0 @@
plugin_LTLIBRARIES = libgstmpeg4videoparse.la
libgstmpeg4videoparse_la_SOURCES = mpeg4videoparse.c mpeg4parse.c
libgstmpeg4videoparse_la_CFLAGS = $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstmpeg4videoparse_la_LIBADD = $(GST_BASE_LIBS) $(GST_LIBS)
libgstmpeg4videoparse_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstmpeg4videoparse_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = mpeg4videoparse.h mpeg4parse.h
Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \
-:PROJECT libgstmpeg4videoparse -:SHARED libgstmpeg4videoparse \
-:TAGS eng debug \
-:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \
-:SOURCES $(libgstmpeg4videoparse_la_SOURCES) \
-:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstmpeg4videoparse_la_CFLAGS) \
-:LDFLAGS $(libgstmpeg4videoparse_la_LDFLAGS) \
$(libgstmpeg4videoparse_la_LIBADD) \
-ldl \
-:PASSTHROUGH LOCAL_ARM_MODE:=arm \
LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \
> $@

View file

@ -1,294 +0,0 @@
/* GStreamer MPEG4-2 video Parser
* Copyright (C) <2008> Mindfruit B.V.
* @author Sjoerd Simons <sjoerd@luon.net>
* Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include "mpeg4parse.h"
#include <gst/base/gstbitreader.h>
GST_DEBUG_CATEGORY_EXTERN (mpeg4v_parse_debug);
#define GST_CAT_DEFAULT mpeg4v_parse_debug
#define GET_BITS(b, num, bits) G_STMT_START { \
if (!gst_bit_reader_get_bits_uint32(b, bits, num)) \
goto failed; \
GST_TRACE ("parsed %d bits: %d", num, *(bits)); \
} G_STMT_END
#define MARKER_BIT(b) G_STMT_START { \
guint32 i; \
GET_BITS(b, 1, &i); \
if (i != 0x1) \
goto failed; \
} G_STMT_END
static inline gboolean
next_start_code (GstBitReader * b)
{
guint32 bits = 0;
GET_BITS (b, 1, &bits);
if (bits != 0)
goto failed;
while (b->bit != 0) {
GET_BITS (b, 1, &bits);
if (bits != 0x1)
goto failed;
}
return TRUE;
failed:
return FALSE;
}
static inline gboolean
skip_user_data (GstBitReader * bs, guint32 * bits)
{
while (*bits == MPEG4_USER_DATA_STARTCODE_MARKER) {
guint32 b = 0;
do {
GET_BITS (bs, 8, &b);
*bits = (*bits << 8) | b;
} while ((*bits >> 8) != MPEG4_START_MARKER);
}
return TRUE;
failed:
return FALSE;
}
static gint aspect_ratio_table[6][2] = {
{-1, -1}, {1, 1}, {12, 11}, {10, 11}, {16, 11}, {40, 33}
};
static gboolean
gst_mpeg4_params_parse_vo (MPEG4Params * params, GstBitReader * br)
{
guint32 bits;
guint16 time_increment_resolution = 0;
guint16 fixed_time_increment = 0;
gint aspect_ratio_width = -1, aspect_ratio_height = -1;
gint height = -1, width = -1;
/* expecting a video object startcode */
GET_BITS (br, 32, &bits);
if (bits > 0x11F)
goto failed;
/* expecting a video object layer startcode */
GET_BITS (br, 32, &bits);
if (bits < 0x120 || bits > 0x12F)
goto failed;
/* ignore random accessible vol and video object type indication */
GET_BITS (br, 9, &bits);
GET_BITS (br, 1, &bits);
if (bits) {
/* skip video object layer verid and priority */
GET_BITS (br, 7, &bits);
}
/* aspect ratio info */
GET_BITS (br, 4, &bits);
if (bits == 0)
goto failed;
/* check if aspect ratio info is extended par */
if (bits == 0xf) {
GET_BITS (br, 8, &bits);
aspect_ratio_width = bits;
GET_BITS (br, 8, &bits);
aspect_ratio_height = bits;
} else if (bits < 0x6) {
aspect_ratio_width = aspect_ratio_table[bits][0];
aspect_ratio_height = aspect_ratio_table[bits][1];
}
GST_DEBUG ("aspect ratio %d/%d", aspect_ratio_width, aspect_ratio_height);
GET_BITS (br, 1, &bits);
if (bits) {
/* vol control parameters, skip chroma and low delay */
GET_BITS (br, 3, &bits);
GET_BITS (br, 1, &bits);
if (bits) {
/* skip vbv_parameters */
if (!gst_bit_reader_skip (br, 79))
goto failed;
}
}
/* layer shape */
GET_BITS (br, 2, &bits);
/* only support rectangular */
if (bits != 0)
goto failed;
MARKER_BIT (br);
GET_BITS (br, 16, &bits);
time_increment_resolution = bits;
MARKER_BIT (br);
GST_DEBUG ("time increment resolution %d", time_increment_resolution);
GET_BITS (br, 1, &bits);
if (bits) {
/* fixed time increment */
int n;
/* Length of the time increment is the minimal number of bits needed to
* represent time_increment_resolution-1 */
for (n = 0; ((time_increment_resolution - 1) >> n) != 0; n++);
GET_BITS (br, n, &bits);
fixed_time_increment = bits;
} else {
/* When fixed_vop_rate is not set we can't guess any framerate */
fixed_time_increment = 0;
}
GST_DEBUG ("fixed time increment %d", fixed_time_increment);
/* assuming rectangular shape */
MARKER_BIT (br);
GET_BITS (br, 13, &bits);
width = bits;
MARKER_BIT (br);
GET_BITS (br, 13, &bits);
height = bits;
MARKER_BIT (br);
GST_DEBUG ("width x height: %d x %d", width, height);
/* so we got it all, report back */
params->width = width;
params->height = height;
params->time_increment_resolution = time_increment_resolution;
params->fixed_time_increment = fixed_time_increment;
params->aspect_ratio_width = aspect_ratio_width;
params->aspect_ratio_height = aspect_ratio_height;
return TRUE;
/* ERRORS */
failed:
{
GST_WARNING ("Failed to parse config data");
return FALSE;
}
}
static gboolean
gst_mpeg4_params_parse_vos (MPEG4Params * params, GstBitReader * br)
{
guint32 bits = 0;
GET_BITS (br, 32, &bits);
if (bits != MPEG4_VOS_STARTCODE_MARKER)
goto failed;
GET_BITS (br, 8, &bits);
params->profile = bits;
/* invalid profile, warn but carry on */
if (params->profile == 0) {
GST_WARNING ("Invalid profile in VOS");
}
/* Expect Visual Object startcode */
GET_BITS (br, 32, &bits);
/* but skip optional user data */
if (!skip_user_data (br, &bits))
goto failed;
if (bits != MPEG4_VISUAL_OBJECT_STARTCODE_MARKER)
goto failed;
GET_BITS (br, 1, &bits);
if (bits == 0x1) {
/* Skip visual_object_verid and priority */
GET_BITS (br, 7, &bits);
}
GET_BITS (br, 4, &bits);
/* Only support video ID */
if (bits != 0x1)
goto failed;
/* video signal type */
GET_BITS (br, 1, &bits);
if (bits == 0x1) {
/* video signal type, ignore format and range */
GET_BITS (br, 4, &bits);
GET_BITS (br, 1, &bits);
if (bits == 0x1) {
/* ignore color description */
GET_BITS (br, 24, &bits);
}
}
if (!next_start_code (br))
goto failed;
/* skip optional user data */
GET_BITS (br, 32, &bits);
if (!skip_user_data (br, &bits))
goto failed;
/* rewind to start code */
gst_bit_reader_set_pos (br, gst_bit_reader_get_pos (br) - 32);
return gst_mpeg4_params_parse_vo (params, br);
/* ERRORS */
failed:
{
GST_WARNING ("Failed to parse config data");
return FALSE;
}
}
gboolean
gst_mpeg4_params_parse_config (MPEG4Params * params, const guint8 * data,
guint size)
{
GstBitReader br;
if (size < 4)
return FALSE;
gst_bit_reader_init (&br, data, size);
if (data[3] == MPEG4_VOS_STARTCODE)
return gst_mpeg4_params_parse_vos (params, &br);
else
return gst_mpeg4_params_parse_vo (params, &br);
}

View file

@ -1,63 +0,0 @@
/* GStreamer MPEG4-2 video Parser
* Copyright (C) <2008> Mindfruit B.V.
* @author Sjoerd Simons <sjoerd@luon.net>
* Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_MPEG4_PARAMS_H__
#define __GST_MPEG4_PARAMS_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define MPEG4_VIDEO_OBJECT_STARTCODE_MIN 0x00
#define MPEG4_VIDEO_OBJECT_STARTCODE_MAX 0x1F
#define MPEG4_VOS_STARTCODE 0xB0
#define MPEG4_VOS_ENDCODE 0xB1
#define MPEG4_USER_DATA_STARTCODE 0xB2
#define MPEG4_GOP_STARTCODE 0xB3
#define MPEG4_VISUAL_OBJECT_STARTCODE 0xB5
#define MPEG4_VOP_STARTCODE 0xB6
#define MPEG4_START_MARKER 0x000001
#define MPEG4_VISUAL_OBJECT_STARTCODE_MARKER \
((MPEG4_START_MARKER << 8) + MPEG4_VISUAL_OBJECT_STARTCODE)
#define MPEG4_VOS_STARTCODE_MARKER \
((MPEG4_START_MARKER << 8) + MPEG4_VOS_STARTCODE)
#define MPEG4_USER_DATA_STARTCODE_MARKER \
((MPEG4_START_MARKER << 8) + MPEG4_USER_DATA_STARTCODE)
typedef struct _MPEG4Params MPEG4Params;
struct _MPEG4Params
{
gint profile;
gint width, height;
gint aspect_ratio_width, aspect_ratio_height;
gint time_increment_resolution;
gint fixed_time_increment;
};
GstFlowReturn gst_mpeg4_params_parse_config (MPEG4Params * params,
const guint8 * data, guint size);
G_END_DECLS
#endif

View file

@ -1607,7 +1607,7 @@ gst_flups_demux_parse_pack_start (GstFluPSDemux * demux)
/* adjustment of the SCR */
if (G_LIKELY (demux->current_scr != G_MAXUINT64)) {
gint64 diff;
guint64 diff;
guint64 old_scr, old_mux_rate, bss, adjust = 0;
/* keep SCR of the previous packet */

View file

@ -191,6 +191,7 @@ static gboolean gst_mpegts_demux_src_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_mpegts_demux_chain (GstPad * pad, GstBuffer * buffer);
static gboolean gst_mpegts_demux_sink_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_mpegts_demux_is_live (GstMpegTSDemux * demux);
static GstClock *gst_mpegts_demux_provide_clock (GstElement * element);
static gboolean gst_mpegts_demux_src_pad_query (GstPad * pad, GstQuery * query);
static const GstQueryType *gst_mpegts_demux_src_pad_query_type (GstPad * pad);
@ -1127,7 +1128,7 @@ gst_mpegts_demux_add_all_streams (GstMpegTSDemux * demux, GstClockTime pts)
}
if (!gst_mpegts_demux_fill_stream (stream, stream->filter.id,
stream->stream_type)) {
GST_ERROR ("Unknown type for PID 0x%04x", stream->PID);
GST_WARNING_OBJECT (demux, "Unknown type for PID 0x%04x", stream->PID);
/* ignore */
continue;
}
@ -1279,12 +1280,14 @@ gst_mpegts_demux_data_cb (GstPESFilter * filter, gboolean first,
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (demux->in_gap))) {
if (GST_CLOCK_TIME_IS_VALID (demux->first_buf_ts)
&& GST_CLOCK_TIME_IS_VALID (filter->pts)) {
&& GST_CLOCK_TIME_IS_VALID (filter->pts)
&& gst_mpegts_demux_is_live (demux)) {
int i;
GstClockTime pts = GST_CLOCK_TIME_NONE;
for (i = 0; i < MPEGTS_MAX_PID + 1; i++) {
GstMpegTSStream *stream = demux->streams[i];
if (stream && (pts == GST_CLOCK_TIME_NONE || stream->last_time < pts)) {
if (stream && stream->last_time > 0 && (pts == GST_CLOCK_TIME_NONE
|| stream->last_time < pts)) {
pts = stream->last_time;
}
}
@ -2952,14 +2955,12 @@ gst_mpegts_demux_sink_event (GstPad * pad, GstEvent * event)
}
static gboolean
gst_mpegts_demux_provides_clock (GstElement * element)
gst_mpegts_demux_is_live (GstMpegTSDemux * demux)
{
GstMpegTSDemux *demux;
GstQuery *query;
gboolean is_live = FALSE;
GstPad *peer;
demux = GST_MPEGTS_DEMUX (element);
query = gst_query_new_latency ();
peer = gst_pad_get_peer (demux->sinkpad);
@ -2973,6 +2974,12 @@ gst_mpegts_demux_provides_clock (GstElement * element)
return is_live;
}
static gboolean
gst_mpegts_demux_provides_clock (GstElement * element)
{
return gst_mpegts_demux_is_live (GST_MPEGTS_DEMUX (element));
}
static GstClock *
gst_mpegts_demux_provide_clock (GstElement * element)
{

View file

@ -896,6 +896,14 @@ mpegts_parse_is_psi (MpegTSParse * parse, MpegTSPacketizerPacket * packet)
data = packet->data;
pointer = *data++;
data += pointer;
/* 'pointer' value may be invalid on malformed packet
* so we need to avoid out of range
*/
if (!(data < packet->data_end)) {
GST_WARNING_OBJECT (parse,
"Wrong offset when retrieving table id: 0x%x", pointer);
return FALSE;
}
table_id = *data;
i = 0;
while (si_tables[i] != TABLE_ID_UNSET) {

View file

@ -54,9 +54,11 @@ GST_DEBUG_CATEGORY (mpegpsmux_debug);
enum
{
ARG_0
PROP_AGGREGATE_GOPS = 1
};
#define DEFAULT_AGGREGATE_GOPS FALSE
static GstStaticPadTemplate mpegpsmux_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink_%u",
GST_PAD_SINK,
@ -94,7 +96,7 @@ static gboolean new_packet_cb (guint8 * data, guint len, void *user_data);
static void release_buffer_cb (guint8 * data, void *user_data);
static gboolean mpegpsdemux_prepare_srcpad (MpegPsMux * mux);
static GstFlowReturn mpegpsmux_collected (GstCollectPads * pads,
static GstFlowReturn mpegpsmux_collected (GstCollectPads2 * pads,
MpegPsMux * mux);
static GstPad *mpegpsmux_request_new_pad (GstElement * element,
GstPadTemplate * templ, const gchar * name);
@ -135,6 +137,10 @@ mpegpsmux_class_init (MpegPsMuxClass * klass)
gstelement_class->release_pad = mpegpsmux_release_pad;
gstelement_class->change_state = mpegpsmux_change_state;
g_object_class_install_property (gobject_class, PROP_AGGREGATE_GOPS,
g_param_spec_boolean ("aggregate-gops", "Aggregate GOPs",
"Whether to aggregate GOPs and push them out as buffer lists",
DEFAULT_AGGREGATE_GOPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
static void
@ -145,9 +151,9 @@ mpegpsmux_init (MpegPsMux * mux, MpegPsMuxClass * g_class)
gst_pad_use_fixed_caps (mux->srcpad);
gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
mux->collect = gst_collect_pads_new ();
gst_collect_pads_set_function (mux->collect,
(GstCollectPadsFunction) GST_DEBUG_FUNCPTR (mpegpsmux_collected), mux);
mux->collect = gst_collect_pads2_new ();
gst_collect_pads2_set_function (mux->collect,
(GstCollectPads2Function) GST_DEBUG_FUNCPTR (mpegpsmux_collected), mux);
mux->psmux = psmux_new ();
psmux_set_write_func (mux->psmux, new_packet_cb, mux);
@ -171,6 +177,11 @@ mpegpsmux_dispose (GObject * object)
mux->psmux = NULL;
}
if (mux->gop_list != NULL) {
gst_buffer_list_unref (mux->gop_list);
mux->gop_list = NULL;
}
GST_CALL_PARENT (G_OBJECT_CLASS, dispose, (object));
}
@ -178,9 +189,12 @@ static void
gst_mpegpsmux_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
/* MpegPsMux *mux = GST_MPEG_PSMUX (object); */
MpegPsMux *mux = GST_MPEG_PSMUX (object);
switch (prop_id) {
case PROP_AGGREGATE_GOPS:
mux->aggregate_gops = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -191,9 +205,12 @@ static void
gst_mpegpsmux_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
/* MpegPsMux *mux = GST_MPEG_PSMUX (object); */
MpegPsMux *mux = GST_MPEG_PSMUX (object);
switch (prop_id) {
case PROP_AGGREGATE_GOPS:
g_value_set_boolean (value, mux->aggregate_gops);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -217,6 +234,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
GstFlowReturn ret = GST_FLOW_ERROR;
GstCaps *caps = gst_pad_get_negotiated_caps (pad);
GstStructure *s;
gboolean is_video = FALSE;
if (caps == NULL) {
GST_DEBUG_OBJECT (pad, "Sink pad caps were not set before pushing");
@ -229,6 +247,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
if (gst_structure_has_name (s, "video/x-dirac")) {
GST_DEBUG_OBJECT (pad, "Creating Dirac stream");
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_DIRAC);
is_video = TRUE;
} else if (gst_structure_has_name (s, "audio/x-ac3")) {
GST_DEBUG_OBJECT (pad, "Creating AC3 stream");
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_PS_AUDIO_AC3);
@ -252,6 +271,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
ps_data->codec_data = NULL;
}
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_H264);
is_video = TRUE;
} else if (gst_structure_has_name (s, "audio/mpeg")) {
gint mpegversion;
if (!gst_structure_get_int (s, "mpegversion", &mpegversion)) {
@ -312,6 +332,7 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
GST_DEBUG_OBJECT (pad, "Creating MPEG Video, version 4 stream");
ps_data->stream = psmux_create_stream (mux->psmux, PSMUX_ST_VIDEO_MPEG4);
}
is_video = TRUE;
}
if (ps_data->stream != NULL) {
@ -327,6 +348,11 @@ mpegpsmux_create_stream (MpegPsMux * mux, MpegPsPadData * ps_data, GstPad * pad)
psmux_stream_set_buffer_release_func (ps_data->stream, release_buffer_cb);
ret = GST_FLOW_OK;
if (is_video && mux->video_stream_id == 0) {
mux->video_stream_id = ps_data->stream_id;
GST_INFO_OBJECT (mux, "video pad stream_id 0x%02x", mux->video_stream_id);
}
}
beach:
@ -343,7 +369,7 @@ mpegpsmux_create_streams (MpegPsMux * mux)
/* Create the streams */
while (walk) {
GstCollectData *c_data = (GstCollectData *) walk->data;
GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegPsPadData *ps_data = (MpegPsPadData *) walk->data;
walk = g_slist_next (walk);
@ -368,11 +394,11 @@ mpegpsmux_choose_best_stream (MpegPsMux * mux)
/* Choose from which stream to mux with */
MpegPsPadData *best = NULL;
GstCollectData *c_best = NULL;
GstCollectData2 *c_best = NULL;
GSList *walk;
for (walk = mux->collect->data; walk != NULL; walk = g_slist_next (walk)) {
GstCollectData *c_data = (GstCollectData *) walk->data;
GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegPsPadData *ps_data = (MpegPsPadData *) walk->data;
if (ps_data->eos == FALSE) {
@ -380,7 +406,7 @@ mpegpsmux_choose_best_stream (MpegPsMux * mux)
GstBuffer *buf;
ps_data->queued_buf = buf =
gst_collect_pads_peek (mux->collect, c_data);
gst_collect_pads2_peek (mux->collect, c_data);
if (buf != NULL) {
if (ps_data->prepare_func) {
@ -441,19 +467,34 @@ mpegpsmux_choose_best_stream (MpegPsMux * mux)
}
}
if (c_best) {
gst_buffer_unref (gst_collect_pads_pop (mux->collect, c_best));
gst_buffer_unref (gst_collect_pads2_pop (mux->collect, c_best));
}
return best;
}
static GstFlowReturn
mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux)
mpegpsmux_push_gop_list (MpegPsMux * mux)
{
GstFlowReturn flow;
g_assert (mux->gop_list != NULL);
GST_DEBUG_OBJECT (mux, "Sending pending GOP of %u buffers",
gst_buffer_list_n_groups (mux->gop_list));
flow = gst_pad_push_list (mux->srcpad, mux->gop_list);
mux->gop_list = NULL;
return flow;
}
static GstFlowReturn
mpegpsmux_collected (GstCollectPads2 * pads, MpegPsMux * mux)
{
/* main muxing function */
GstFlowReturn ret = GST_FLOW_OK;
MpegPsPadData *best = NULL;
gboolean keyunit;
GST_DEBUG_OBJECT (mux, "Pads collected");
@ -496,9 +537,20 @@ mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux)
G_GINT64_FORMAT, GST_TIME_ARGS (best->cur_ts), pts);
}
/* start of new GOP? */
keyunit = !GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
if (keyunit && best->stream_id == mux->video_stream_id
&& mux->gop_list != NULL) {
ret = mpegpsmux_push_gop_list (mux);
if (ret != GST_FLOW_OK)
goto done;
}
/* give the buffer to libpsmux for processing */
psmux_stream_add_data (best->stream, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf), buf, pts, -1);
GST_BUFFER_SIZE (buf), buf, pts, -1, keyunit);
best->queued_buf = NULL;
/* write the data from libpsmux to stream */
@ -513,12 +565,17 @@ mpegpsmux_collected (GstCollectPads * pads, MpegPsMux * mux)
} else {
/* FIXME: Drain all remaining streams */
/* At EOS */
if (mux->gop_list != NULL)
mpegpsmux_push_gop_list (mux);
if (psmux_write_end_code (mux->psmux)) {
GST_WARNING_OBJECT (mux, "Writing MPEG PS Program end code failed.");
}
gst_pad_push_event (mux->srcpad, gst_event_new_eos ());
}
done:
return ret;
new_seg_fail:
return GST_FLOW_ERROR;
@ -538,8 +595,8 @@ mpegpsmux_request_new_pad (GstElement * element,
pad = gst_pad_new_from_template (templ, name);
pad_data = (MpegPsPadData *) gst_collect_pads_add_pad (mux->collect, pad,
sizeof (MpegPsPadData), NULL);
pad_data = (MpegPsPadData *) gst_collect_pads2_add_pad (mux->collect, pad,
sizeof (MpegPsPadData));
if (pad_data == NULL)
goto pad_failure;
@ -555,7 +612,7 @@ mpegpsmux_request_new_pad (GstElement * element,
could_not_add:
GST_ELEMENT_ERROR (element, STREAM, FAILED,
("Internal data stream error."), ("Could not add pad to element"));
gst_collect_pads_remove_pad (mux->collect, pad);
gst_collect_pads2_remove_pad (mux->collect, pad);
gst_object_unref (pad);
return NULL;
pad_failure:
@ -586,9 +643,31 @@ mpegpsmux_release_pad (GstElement * element, GstPad * pad)
pad_data->codec_data = NULL;
}
}
if (pad_data->stream_id == mux->video_stream_id)
mux->video_stream_id = 0;
GST_OBJECT_UNLOCK (pad);
gst_collect_pads_remove_pad (mux->collect, pad);
gst_collect_pads2_remove_pad (mux->collect, pad);
}
static void
add_buffer_to_goplist (MpegPsMux * mux, GstBuffer * buf)
{
GstBufferListIterator *it;
if (mux->gop_list == NULL)
mux->gop_list = gst_buffer_list_new ();
it = gst_buffer_list_iterate (mux->gop_list);
/* move iterator to end */
while (gst_buffer_list_iterator_next_group (it)) {
/* .. */
}
gst_buffer_list_iterator_add_group (it);
gst_buffer_list_iterator_add (it, buf);
gst_buffer_list_iterator_free (it);
}
static gboolean
@ -611,7 +690,14 @@ new_packet_cb (guint8 * data, guint len, void *user_data)
memcpy (GST_BUFFER_DATA (buf), data, len);
GST_BUFFER_TIMESTAMP (buf) = mux->last_ts;
if (mux->aggregate_gops) {
add_buffer_to_goplist (mux, buf);
return TRUE;
}
ret = gst_pad_push (mux->srcpad, buf);
if (G_UNLIKELY (ret != GST_FLOW_OK)) {
mux->last_flow_ret = ret;
return FALSE;
@ -623,6 +709,9 @@ new_packet_cb (guint8 * data, guint len, void *user_data)
static gboolean
mpegpsdemux_prepare_srcpad (MpegPsMux * mux)
{
GValue val = { 0, };
GList *headers, *l;
/* prepare the source pad for output */
GstEvent *new_seg =
@ -634,6 +723,21 @@ mpegpsdemux_prepare_srcpad (MpegPsMux * mux)
/* gst_static_pad_template_get_caps (&mpegpsmux_src_factory); */
headers = psmux_get_stream_headers (mux->psmux);
g_value_init (&val, GST_TYPE_ARRAY);
for (l = headers; l != NULL; l = l->next) {
GValue buf_val = { 0, };
g_value_init (&buf_val, GST_TYPE_BUFFER);
gst_value_take_buffer (&buf_val, GST_BUFFER (l->data));
l->data = NULL;
gst_value_array_append_value (&val, &buf_val);
g_value_unset (&buf_val);
}
gst_caps_set_value (caps, "streamheader", &val);
g_value_unset (&val);
g_list_free (headers);
/* Set caps on src pad from our template and push new segment */
gst_pad_set_caps (mux->srcpad, caps);
@ -657,12 +761,12 @@ mpegpsmux_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_collect_pads_start (mux->collect);
gst_collect_pads2_start (mux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_collect_pads_stop (mux->collect);
gst_collect_pads2_stop (mux->collect);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;

View file

@ -46,7 +46,7 @@
#define __MPEGPSMUX_H__
#include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
@ -68,7 +68,9 @@ struct MpegPsMux {
GstPad *srcpad;
GstCollectPads *collect; // pads collector
guint video_stream_id; /* stream id of primary video stream */
GstCollectPads2 *collect; /* pads collector */
PsMux *psmux;
@ -76,6 +78,9 @@ struct MpegPsMux {
GstFlowReturn last_flow_ret;
GstClockTime last_ts;
GstBufferList *gop_list;
gboolean aggregate_gops;
};
struct MpegPsMuxClass {
@ -83,7 +88,7 @@ struct MpegPsMuxClass {
};
struct MpegPsPadData {
GstCollectData collect; /* Parent */
GstCollectData2 collect; /* Parent */
guint8 stream_id;
guint8 stream_id_ext;

View file

@ -141,6 +141,12 @@ psmux_free (PsMux * mux)
}
g_list_free (mux->streams);
if (mux->sys_header != NULL)
gst_buffer_unref (mux->sys_header);
if (mux->psm != NULL)
gst_buffer_unref (mux->psm);
g_slice_free (PsMux, mux);
}
@ -332,17 +338,23 @@ psmux_write_pack_header (PsMux * mux)
return psmux_packet_out (mux);
}
static gboolean
psmux_write_system_header (PsMux * mux)
static void
psmux_ensure_system_header (PsMux * mux)
{
GstBuffer *buf;
bits_buffer_t bw;
guint len = 12 + (mux->nb_streams +
(mux->nb_private_streams > 1 ? mux->nb_private_streams - 1 : 0)) * 3;
GList *cur;
gboolean private_hit = FALSE;
if (mux->sys_header != NULL)
return;
buf = gst_buffer_new_and_alloc (len);
/* system_header_start_code */
bits_initwrite (&bw, len, mux->packet_buf);
bits_initwrite (&bw, len, GST_BUFFER_DATA (buf));
/* system_header start code */
bits_write (&bw, 24, PSMUX_START_CODE_PREFIX);
@ -378,19 +390,36 @@ psmux_write_system_header (PsMux * mux)
private_hit = TRUE;
}
mux->packet_bytes_written = len;
return psmux_packet_out (mux);
GST_MEMDUMP ("System Header", GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
mux->sys_header = buf;
}
static gboolean
psmux_write_program_stream_map (PsMux * mux)
psmux_write_system_header (PsMux * mux)
{
psmux_ensure_system_header (mux);
memcpy (mux->packet_buf, GST_BUFFER_DATA (mux->sys_header),
GST_BUFFER_SIZE (mux->sys_header));
mux->packet_bytes_written = GST_BUFFER_SIZE (mux->sys_header);
return psmux_packet_out (mux);
}
static void
psmux_ensure_program_stream_map (PsMux * mux)
{
GstBuffer *buf;
gint psm_size = 16, es_map_size = 0;
bits_buffer_t bw;
GList *cur;
guint16 len;
guint8 *pos;
if (mux->psm != NULL)
return;
/* pre-write the descriptor loop */
pos = mux->es_info_buf;
for (cur = g_list_first (mux->streams); cur != NULL; cur = g_list_next (cur)) {
@ -412,7 +441,10 @@ psmux_write_program_stream_map (PsMux * mux)
}
psm_size += es_map_size;
bits_initwrite (&bw, psm_size, mux->packet_buf);
buf = gst_buffer_new_and_alloc (psm_size);
bits_initwrite (&bw, psm_size, GST_BUFFER_DATA (buf));
/* psm start code */
bits_write (&bw, 24, PSMUX_START_CODE_PREFIX);
@ -429,15 +461,44 @@ psmux_write_program_stream_map (PsMux * mux)
/* program_stream_info empty */
bits_write (&bw, 16, es_map_size); /* elementary_stream_map_length */
memcpy (bw.p_data + bw.i_data, mux->es_info_buf, es_map_size);
/* CRC32 */
{
guint32 crc = calc_crc32 (mux->packet_buf, psm_size - 4);
guint8 *pos = mux->packet_buf + psm_size - 4;
guint32 crc = calc_crc32 (bw.p_data, psm_size - 4);
guint8 *pos = bw.p_data + psm_size - 4;
psmux_put32 (&pos, crc);
}
mux->packet_bytes_written = psm_size;
GST_MEMDUMP ("Program Stream Map", GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
mux->psm = buf;
}
static gboolean
psmux_write_program_stream_map (PsMux * mux)
{
psmux_ensure_program_stream_map (mux);
memcpy (mux->packet_buf, GST_BUFFER_DATA (mux->psm),
GST_BUFFER_SIZE (mux->psm));
mux->packet_bytes_written = GST_BUFFER_SIZE (mux->psm);
return psmux_packet_out (mux);
}
GList *
psmux_get_stream_headers (PsMux * mux)
{
GList *list;
psmux_ensure_system_header (mux);
psmux_ensure_program_stream_map (mux);
list = g_list_append (NULL, gst_buffer_ref (mux->sys_header));
list = g_list_append (list, gst_buffer_ref (mux->psm));
return list;
}

View file

@ -93,6 +93,10 @@ struct PsMux {
guint8 audio_bound;
guint8 video_bound;
guint32 rate_bound;
/* stream headers */
GstBuffer *sys_header;
GstBuffer *psm;
};
/* create/free new muxer session */
@ -109,6 +113,8 @@ PsMuxStream * psmux_create_stream (PsMux *mux, PsMuxStreamType stream_type);
gboolean psmux_write_stream_packet (PsMux *mux, PsMuxStream *stream);
gboolean psmux_write_end_code (PsMux *mux);
GList * psmux_get_stream_headers (PsMux *mux);
G_END_DECLS
#endif

View file

@ -513,7 +513,7 @@ psmux_stream_write_pes_header (PsMuxStream * stream, guint8 * data)
*/
void
psmux_stream_add_data (PsMuxStream * stream, guint8 * data, guint len,
void *user_data, gint64 pts, gint64 dts)
void *user_data, gint64 pts, gint64 dts, gboolean keyunit)
{
PsMuxStreamBuffer *packet;
@ -524,6 +524,7 @@ psmux_stream_add_data (PsMuxStream * stream, guint8 * data, guint len,
packet->size = len;
packet->user_data = user_data;
packet->keyunit = keyunit;
packet->pts = pts;
packet->dts = dts;

View file

@ -86,6 +86,8 @@ struct PsMuxStreamBuffer
guint8 *data;
guint32 size;
gboolean keyunit;
/* PTS & DTS associated with the contents of this buffer */
GstClockTime pts;
GstClockTime dts;
@ -146,7 +148,8 @@ void psmux_stream_set_buffer_release_func (PsMuxStream *stream,
/* Add a new buffer to the pool of available bytes. If pts or dts are not -1, they
* indicate the PTS or DTS of the first access unit within this packet */
void psmux_stream_add_data (PsMuxStream *stream, guint8 *data, guint len,
void *user_data, gint64 pts, gint64 dts);
void *user_data, gint64 pts, gint64 dts,
gboolean keyunit);
/* total bytes in buffer */
gint psmux_stream_bytes_in_buffer (PsMuxStream *stream);

View file

@ -7,8 +7,9 @@ libgstmpegtsmux_la_SOURCES = \
mpegtsmux_h264.c \
mpegtsmux_aac.c
libgstmpegtsmux_la_CFLAGS = $(GST_CFLAGS)
libgstmpegtsmux_la_LIBADD = $(top_builddir)/gst/mpegtsmux/tsmux/libtsmux.la $(GST_LIBS) $(GST_BASE_LIBS)
libgstmpegtsmux_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstmpegtsmux_la_LIBADD = $(top_builddir)/gst/mpegtsmux/tsmux/libtsmux.la \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@ $(GST_BASE_LIBS) $(GST_LIBS)
libgstmpegtsmux_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstmpegtsmux_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -88,6 +88,8 @@
#include <stdio.h>
#include <string.h>
#include <gst/video/video.h>
#include "mpegtsmux.h"
#include "mpegtsmux_h264.h"
@ -143,7 +145,7 @@ static gboolean new_packet_cb (guint8 * data, guint len, void *user_data,
static void release_buffer_cb (guint8 * data, void *user_data);
static void mpegtsdemux_prepare_srcpad (MpegTsMux * mux);
static GstFlowReturn mpegtsmux_collected (GstCollectPads * pads,
static GstFlowReturn mpegtsmux_collected (GstCollectPads2 * pads,
MpegTsMux * mux);
static GstPad *mpegtsmux_request_new_pad (GstElement * element,
GstPadTemplate * templ, const gchar * name);
@ -151,6 +153,8 @@ static void mpegtsmux_release_pad (GstElement * element, GstPad * pad);
static GstStateChangeReturn mpegtsmux_change_state (GstElement * element,
GstStateChange transition);
static void mpegtsdemux_set_header_on_caps (MpegTsMux * mux);
static gboolean mpegtsmux_sink_event (GstPad * pad, GstEvent * event);
static gboolean mpegtsmux_src_event (GstPad * pad, GstEvent * event);
GST_BOILERPLATE (MpegTsMux, mpegtsmux, GstElement, GST_TYPE_ELEMENT);
@ -215,11 +219,12 @@ mpegtsmux_init (MpegTsMux * mux, MpegTsMuxClass * g_class)
mux->srcpad =
gst_pad_new_from_static_template (&mpegtsmux_src_factory, "src");
gst_pad_use_fixed_caps (mux->srcpad);
gst_pad_set_event_function (mux->srcpad, mpegtsmux_src_event);
gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
mux->collect = gst_collect_pads_new ();
gst_collect_pads_set_function (mux->collect,
(GstCollectPadsFunction) GST_DEBUG_FUNCPTR (mpegtsmux_collected), mux);
mux->collect = gst_collect_pads2_new ();
gst_collect_pads2_set_function (mux->collect,
(GstCollectPads2Function) GST_DEBUG_FUNCPTR (mpegtsmux_collected), mux);
mux->tsmux = tsmux_new ();
tsmux_set_write_func (mux->tsmux, new_packet_cb, mux);
@ -238,6 +243,8 @@ mpegtsmux_init (MpegTsMux * mux, MpegTsMuxClass * g_class)
mux->prog_map = NULL;
mux->streamheader = NULL;
mux->streamheader_sent = FALSE;
mux->force_key_unit_event = NULL;
mux->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
}
static void
@ -509,7 +516,7 @@ mpegtsmux_create_streams (MpegTsMux * mux)
/* Create the streams */
while (walk) {
GstCollectData *c_data = (GstCollectData *) walk->data;
GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegTsPadData *ts_data = (MpegTsPadData *) walk->data;
gchar *name = NULL;
@ -569,11 +576,11 @@ static MpegTsPadData *
mpegtsmux_choose_best_stream (MpegTsMux * mux)
{
MpegTsPadData *best = NULL;
GstCollectData *c_best = NULL;
GstCollectData2 *c_best = NULL;
GSList *walk;
for (walk = mux->collect->data; walk != NULL; walk = g_slist_next (walk)) {
GstCollectData *c_data = (GstCollectData *) walk->data;
GstCollectData2 *c_data = (GstCollectData2 *) walk->data;
MpegTsPadData *ts_data = (MpegTsPadData *) walk->data;
if (ts_data->eos == FALSE) {
@ -581,7 +588,7 @@ mpegtsmux_choose_best_stream (MpegTsMux * mux)
GstBuffer *buf;
ts_data->queued_buf = buf =
gst_collect_pads_peek (mux->collect, c_data);
gst_collect_pads2_peek (mux->collect, c_data);
if (buf != NULL) {
if (ts_data->prepare_func) {
@ -643,17 +650,212 @@ mpegtsmux_choose_best_stream (MpegTsMux * mux)
}
if (c_best) {
GstBuffer *buffer;
if ((buffer = gst_collect_pads_pop (mux->collect, c_best)))
if ((buffer = gst_collect_pads2_pop (mux->collect, c_best)))
gst_buffer_unref (buffer);
}
return best;
}
#define COLLECT_DATA_PAD(collect_data) (((GstCollectData *)(collect_data))->pad)
#define COLLECT_DATA_PAD(collect_data) (((GstCollectData2 *)(collect_data))->pad)
static MpegTsPadData *
find_pad_data (MpegTsMux * mux, GstPad * pad)
{
GSList *walk;
MpegTsPadData *ts_data = NULL;
GST_COLLECT_PADS2_STREAM_LOCK (mux->collect);
walk = mux->collect->pad_list;
while (walk) {
if (((GstCollectData2 *) walk->data)->pad == pad) {
ts_data = (MpegTsPadData *) walk->data;
break;
}
walk = g_slist_next (walk);
}
GST_COLLECT_PADS2_STREAM_UNLOCK (mux->collect);
return ts_data;
}
static gboolean
mpegtsmux_sink_event (GstPad * pad, GstEvent * event)
{
MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad));
MpegTsPadData *ts_data;
gboolean res = TRUE;
gboolean forward = TRUE;
ts_data = find_pad_data (mux, pad);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
GstClockTime timestamp, stream_time, running_time;
gboolean all_headers;
guint count;
if (!gst_video_event_is_force_key_unit (event))
goto out;
forward = FALSE;
gst_video_event_parse_downstream_force_key_unit (event,
&timestamp, &stream_time, &running_time, &all_headers, &count);
GST_INFO_OBJECT (mux, "have downstream force-key-unit event on pad %s, "
"seqnum %d, running-time %" GST_TIME_FORMAT " count %d",
gst_pad_get_name (pad), gst_event_get_seqnum (event),
GST_TIME_ARGS (running_time), count);
if (mux->force_key_unit_event != NULL) {
GST_INFO_OBJECT (mux, "skipping downstream force key unit event "
"as an upstream force key unit is already queued");
goto out;
}
if (!all_headers)
goto out;
mux->pending_key_unit_ts = running_time;
gst_event_replace (&mux->force_key_unit_event, event);
break;
}
default:
break;
}
out:
if (forward)
res = ts_data->eventfunc (pad, event);
gst_object_unref (mux);
return res;
}
static gboolean
mpegtsmux_src_event (GstPad * pad, GstEvent * event)
{
MpegTsMux *mux = GST_MPEG_TSMUX (gst_pad_get_parent (pad));
gboolean res = TRUE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
{
GstIterator *iter;
GstIteratorResult iter_ret;
GstPad *sinkpad;
GstClockTime running_time;
gboolean all_headers, done;
guint count;
if (!gst_video_event_is_force_key_unit (event))
break;
gst_video_event_parse_upstream_force_key_unit (event,
&running_time, &all_headers, &count);
GST_INFO_OBJECT (mux, "received upstream force-key-unit event, "
"seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
all_headers, count);
if (!all_headers)
break;
mux->pending_key_unit_ts = running_time;
gst_event_replace (&mux->force_key_unit_event, event);
iter = gst_element_iterate_sink_pads (GST_ELEMENT_CAST (mux));
done = FALSE;
while (!done) {
gboolean res = FALSE, tmp;
iter_ret = gst_iterator_next (iter, (gpointer *) & sinkpad);
switch (iter_ret) {
case GST_ITERATOR_DONE:
done = TRUE;
break;
case GST_ITERATOR_OK:
GST_INFO_OBJECT (mux, "forwarding to %s",
gst_pad_get_name (sinkpad));
tmp = gst_pad_push_event (sinkpad, gst_event_ref (event));
GST_INFO_OBJECT (mux, "result %d", tmp);
/* succeed if at least one pad succeeds */
res |= tmp;
gst_object_unref (sinkpad);
break;
case GST_ITERATOR_ERROR:
done = TRUE;
break;
case GST_ITERATOR_RESYNC:
break;
}
}
gst_event_unref (event);
break;
}
default:
res = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (mux);
return res;
}
static GstEvent *
check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment,
GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts)
{
GstClockTime running_time, stream_time;
gboolean all_headers;
guint count;
GstEvent *event = NULL;
g_return_val_if_fail (pending_event != NULL, NULL);
g_return_val_if_fail (segment != NULL, NULL);
if (pending_event == NULL)
goto out;
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
timestamp == GST_CLOCK_TIME_NONE)
goto out;
running_time = gst_segment_to_running_time (segment,
GST_FORMAT_TIME, timestamp);
GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
running_time < pending_key_unit_ts)
goto out;
if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
GST_INFO ("pending force key unit, waiting for keyframe");
goto out;
}
stream_time = gst_segment_to_stream_time (segment,
GST_FORMAT_TIME, timestamp);
gst_video_event_parse_upstream_force_key_unit (pending_event,
NULL, &all_headers, &count);
event =
gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
running_time, all_headers, count);
gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
out:
return event;
}
static GstFlowReturn
mpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux)
mpegtsmux_collected (GstCollectPads2 * pads, MpegTsMux * mux)
{
GstFlowReturn ret = GST_FLOW_OK;
MpegTsPadData *best = NULL;
@ -686,6 +888,42 @@ mpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux)
return GST_FLOW_ERROR;
}
if (mux->force_key_unit_event != NULL && best->stream->is_video_stream) {
GstEvent *event;
event = check_pending_key_unit_event (mux->force_key_unit_event,
&best->collect.segment, GST_BUFFER_TIMESTAMP (buf),
GST_BUFFER_FLAGS (buf), mux->pending_key_unit_ts);
if (event) {
GstClockTime running_time;
guint count;
GList *cur;
mux->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
gst_event_replace (&mux->force_key_unit_event, NULL);
gst_video_event_parse_downstream_force_key_unit (event,
NULL, NULL, &running_time, NULL, &count);
GST_INFO_OBJECT (mux, "pushing downstream force-key-unit event %d "
"%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
GST_TIME_ARGS (running_time), count);
gst_pad_push_event (mux->srcpad, event);
/* output PAT */
mux->tsmux->last_pat_ts = -1;
/* output PMT for each program */
for (cur = g_list_first (mux->tsmux->programs); cur != NULL;
cur = g_list_next (cur)) {
TsMuxProgram *program = (TsMuxProgram *) cur->data;
program->last_pmt_ts = -1;
}
tsmux_program_set_pcr_stream (prog, NULL);
}
}
if (G_UNLIKELY (prog->pcr_stream == NULL)) {
/* Take the first data stream for the PCR */
GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best),
@ -749,22 +987,25 @@ mpegtsmux_request_new_pad (GstElement * element,
GstPad *pad = NULL;
MpegTsPadData *pad_data = NULL;
if (name != NULL && sscanf (name, "sink_%u", &pid) == 1) {
if (name != NULL && sscanf (name, "sink_%d", &pid) == 1) {
if (tsmux_find_stream (mux->tsmux, pid))
goto stream_exists;
} else {
pid = tsmux_get_new_pid (mux->tsmux);
}
pad_name = g_strdup_printf ("sink_%u", pid);
pad_name = g_strdup_printf ("sink_%d", pid);
pad = gst_pad_new_from_template (templ, pad_name);
g_free (pad_name);
pad_data = (MpegTsPadData *) gst_collect_pads_add_pad (mux->collect, pad,
sizeof (MpegTsPadData), NULL);
pad_data = (MpegTsPadData *) gst_collect_pads2_add_pad (mux->collect, pad,
sizeof (MpegTsPadData));
if (pad_data == NULL)
goto pad_failure;
pad_data->eventfunc = pad->eventfunc;
gst_pad_set_event_function (pad, mpegtsmux_sink_event);
pad_data->pid = pid;
pad_data->last_ts = GST_CLOCK_TIME_NONE;
pad_data->codec_data = NULL;
@ -786,7 +1027,7 @@ stream_exists:
could_not_add:
GST_ELEMENT_ERROR (element, STREAM, FAILED,
("Internal data stream error."), ("Could not add pad to element"));
gst_collect_pads_remove_pad (mux->collect, pad);
gst_collect_pads2_remove_pad (mux->collect, pad);
gst_object_unref (pad);
return NULL;
pad_failure:
@ -804,7 +1045,7 @@ mpegtsmux_release_pad (GstElement * element, GstPad * pad)
GST_DEBUG_OBJECT (mux, "Pad %" GST_PTR_FORMAT " being released", pad);
if (mux->collect) {
gst_collect_pads_remove_pad (mux->collect, pad);
gst_collect_pads2_remove_pad (mux->collect, pad);
}
/* chain up */
@ -1065,12 +1306,12 @@ mpegtsmux_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_collect_pads_start (mux->collect);
gst_collect_pads2_start (mux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_collect_pads_stop (mux->collect);
gst_collect_pads2_stop (mux->collect);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
if (mux->adapter)

View file

@ -84,7 +84,7 @@
#define __MPEGTSMUX_H__
#include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#include <gst/base/gstadapter.h>
G_BEGIN_DECLS
@ -108,7 +108,7 @@ struct MpegTsMux {
GstPad *srcpad;
GstCollectPads *collect;
GstCollectPads2 *collect;
TsMux *tsmux;
TsMuxProgram **programs;
@ -128,6 +128,8 @@ struct MpegTsMux {
GList *streamheader;
gboolean streamheader_sent;
GstClockTime pending_key_unit_ts;
GstEvent *force_key_unit_event;
};
struct MpegTsMuxClass {
@ -137,7 +139,7 @@ struct MpegTsMuxClass {
#define MPEG_TS_PAD_DATA(data) ((MpegTsPadData *)(data))
struct MpegTsPadData {
GstCollectData collect; /* Parent */
GstCollectData2 collect; /* Parent */
gint pid;
TsMuxStream *stream;
@ -157,7 +159,8 @@ struct MpegTsPadData {
gboolean eos;
gint prog_id; /* The program id to which it is attached to (not program pid) */
TsMuxProgram *prog; /* The program to which this stream belongs to */
TsMuxProgram *prog; /* The program to which this stream belongs to */
GstPadEventFunction eventfunc;
};
GType mpegtsmux_get_type (void);

View file

@ -146,9 +146,9 @@ gst_mxf_mux_init (GstMXFMux * mux, GstMXFMuxClass * g_class)
gst_caps_unref (caps);
gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
mux->collect = gst_collect_pads_new ();
gst_collect_pads_set_function (mux->collect,
(GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_mxf_mux_collected), mux);
mux->collect = gst_collect_pads2_new ();
gst_collect_pads2_set_function (mux->collect,
(GstCollectPads2Function) GST_DEBUG_FUNCPTR (gst_mxf_mux_collected), mux);
gst_mxf_mux_reset (mux);
}
@ -209,7 +209,7 @@ gst_mxf_mux_reset (GstMXFMux * mux)
g_object_unref (cpad->adapter);
g_free (cpad->mapping_data);
gst_collect_pads_remove_pad (mux->collect, cpad->collect.pad);
gst_collect_pads2_remove_pad (mux->collect, cpad->collect.pad);
}
mux->state = GST_MXF_MUX_STATE_HEADER;
@ -268,7 +268,7 @@ gst_mxf_mux_handle_sink_event (GstPad * pad, GstEvent * event)
break;
}
/* now GstCollectPads can take care of the rest, e.g. EOS */
/* now GstCollectPads2 can take care of the rest, e.g. EOS */
if (ret)
ret = mux->collect_event (pad, event);
gst_object_unref (mux);
@ -338,13 +338,13 @@ gst_mxf_mux_setcaps (GstPad * pad, GstCaps * caps)
for (i = 0; i < mux->preface->content_storage->n_packages; i++) {
MXFMetadataSourcePackage *package;
if (!MXF_IS_METADATA_SOURCE_PACKAGE (mux->preface->content_storage->
packages[i]))
if (!MXF_IS_METADATA_SOURCE_PACKAGE (mux->preface->
content_storage->packages[i]))
continue;
package =
MXF_METADATA_SOURCE_PACKAGE (mux->preface->content_storage->
packages[i]);
MXF_METADATA_SOURCE_PACKAGE (mux->preface->
content_storage->packages[i]);
if (!package->descriptor)
continue;
@ -420,13 +420,13 @@ gst_mxf_mux_request_new_pad (GstElement * element,
pad = gst_pad_new_from_template (templ, name);
g_free (name);
cpad = (GstMXFMuxPad *)
gst_collect_pads_add_pad (mux->collect, pad, sizeof (GstMXFMuxPad), NULL);
gst_collect_pads2_add_pad (mux->collect, pad, sizeof (GstMXFMuxPad));
cpad->last_timestamp = 0;
cpad->adapter = gst_adapter_new ();
cpad->writer = writer;
/* FIXME: hacked way to override/extend the event function of
* GstCollectPads; because it sets its own event function giving the
* GstCollectPads2; because it sets its own event function giving the
* element no access to events.
*/
mux->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (pad);
@ -450,7 +450,7 @@ gst_mxf_mux_release_pad (GstElement * element, GstPad * pad)
g_object_unref (cpad->adapter);
g_free (cpad->mapping_data);
gst_collect_pads_remove_pad (mux->collect, pad);
gst_collect_pads2_remove_pad (mux->collect, pad);
gst_element_remove_pad (element, pad); */
}
@ -706,8 +706,8 @@ gst_mxf_mux_create_metadata (GstMXFMux * mux)
if (p->parent.n_tracks == 1) {
p->descriptor = (MXFMetadataGenericDescriptor *) cpad->descriptor;
} else {
MXF_METADATA_MULTIPLE_DESCRIPTOR (p->descriptor)->
sub_descriptors[n] =
MXF_METADATA_MULTIPLE_DESCRIPTOR (p->
descriptor)->sub_descriptors[n] =
(MXFMetadataGenericDescriptor *) cpad->descriptor;
}
@ -1099,9 +1099,8 @@ gst_mxf_mux_handle_buffer (GstMXFMux * mux, GstMXFMuxPad * cpad)
GstBuffer *packet;
GstFlowReturn ret = GST_FLOW_OK;
guint8 slen, ber[9];
gboolean flush =
(cpad->collect.abidata.ABI.eos && !cpad->have_complete_edit_unit
&& cpad->collect.buffer == NULL);
gboolean flush = ((cpad->collect.state & GST_COLLECT_PADS2_STATE_EOS)
&& !cpad->have_complete_edit_unit && cpad->collect.buffer == NULL);
if (cpad->have_complete_edit_unit) {
GST_DEBUG_OBJECT (cpad->collect.pad,
@ -1109,7 +1108,7 @@ gst_mxf_mux_handle_buffer (GstMXFMux * mux, GstMXFMuxPad * cpad)
cpad->source_track->parent.track_id, cpad->pos);
buf = NULL;
} else if (!flush) {
buf = gst_collect_pads_pop (mux->collect, &cpad->collect);
buf = gst_collect_pads2_pop (mux->collect, &cpad->collect);
}
if (buf) {
@ -1247,22 +1246,23 @@ gst_mxf_mux_handle_eos (GstMXFMux * mux)
/* Update durations */
cpad->source_track->parent.sequence->duration = cpad->pos;
MXF_METADATA_SOURCE_CLIP (cpad->source_track->parent.sequence->
structural_components[0])->parent.duration = cpad->pos;
MXF_METADATA_SOURCE_CLIP (cpad->source_track->parent.
sequence->structural_components[0])->parent.duration = cpad->pos;
for (i = 0; i < mux->preface->content_storage->packages[0]->n_tracks; i++) {
MXFMetadataTimelineTrack *track;
if (!MXF_IS_METADATA_TIMELINE_TRACK (mux->preface->content_storage->
packages[0]->tracks[i])
|| !MXF_IS_METADATA_SOURCE_CLIP (mux->preface->content_storage->
packages[0]->tracks[i]->sequence->structural_components[0]))
if (!MXF_IS_METADATA_TIMELINE_TRACK (mux->preface->
content_storage->packages[0]->tracks[i])
|| !MXF_IS_METADATA_SOURCE_CLIP (mux->preface->
content_storage->packages[0]->tracks[i]->sequence->
structural_components[0]))
continue;
track =
MXF_METADATA_TIMELINE_TRACK (mux->preface->content_storage->
packages[0]->tracks[i]);
if (MXF_METADATA_SOURCE_CLIP (track->parent.sequence->
structural_components[0])->source_track_id ==
MXF_METADATA_TIMELINE_TRACK (mux->preface->
content_storage->packages[0]->tracks[i]);
if (MXF_METADATA_SOURCE_CLIP (track->parent.
sequence->structural_components[0])->source_track_id ==
cpad->source_track->parent.track_id) {
track->parent.sequence->structural_components[0]->duration = cpad->pos;
track->parent.sequence->duration = cpad->pos;
@ -1273,8 +1273,8 @@ gst_mxf_mux_handle_eos (GstMXFMux * mux)
/* Update timecode track duration */
{
MXFMetadataTimelineTrack *track =
MXF_METADATA_TIMELINE_TRACK (mux->preface->content_storage->
packages[0]->tracks[0]);
MXF_METADATA_TIMELINE_TRACK (mux->preface->
content_storage->packages[0]->tracks[0]);
MXFMetadataSequence *sequence = track->parent.sequence;
MXFMetadataTimecodeComponent *component =
MXF_METADATA_TIMECODE_COMPONENT (sequence->structural_components[0]);
@ -1369,7 +1369,7 @@ _sort_mux_pads (gconstpointer a, gconstpointer b)
}
static GstFlowReturn
gst_mxf_mux_collected (GstCollectPads * pads, gpointer user_data)
gst_mxf_mux_collected (GstCollectPads2 * pads, gpointer user_data)
{
GstMXFMux *mux = GST_MXF_MUX (user_data);
GstMXFMuxPad *best = NULL;
@ -1424,14 +1424,17 @@ gst_mxf_mux_collected (GstCollectPads * pads, gpointer user_data)
do {
for (sl = mux->collect->data; sl; sl = sl->next) {
gboolean pad_eos;
GstMXFMuxPad *cpad = sl->data;
GstClockTime next_gc_timestamp =
gst_util_uint64_scale ((mux->last_gc_position + 1) * GST_SECOND,
mux->min_edit_rate.d, mux->min_edit_rate.n);
eos &= cpad->collect.abidata.ABI.eos;
pad_eos = cpad->collect.state & GST_COLLECT_PADS2_STATE_EOS;
if (!pad_eos)
eos = FALSE;
if ((!cpad->collect.abidata.ABI.eos || cpad->have_complete_edit_unit ||
if ((!pad_eos || cpad->have_complete_edit_unit ||
gst_adapter_available (cpad->adapter) > 0 || cpad->collect.buffer)
&& cpad->last_timestamp < next_gc_timestamp) {
best = cpad;
@ -1479,12 +1482,12 @@ gst_mxf_mux_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_collect_pads_start (mux->collect);
gst_collect_pads2_start (mux->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_collect_pads_stop (mux->collect);
gst_collect_pads2_stop (mux->collect);
break;
default:
break;

View file

@ -22,7 +22,7 @@
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#include "mxfessence.h"
@ -41,7 +41,7 @@ G_BEGIN_DECLS
typedef struct
{
GstCollectData collect;
GstCollectData2 collect;
guint64 pos;
GstClockTime last_timestamp;
@ -71,7 +71,7 @@ typedef struct _GstMXFMux {
GstElement element;
GstPad *srcpad;
GstCollectPads *collect;
GstCollectPads2 *collect;
/* <private> */
GstPadEventFunction collect_event;

View file

@ -114,7 +114,7 @@ static void gst_ssim_release_pad (GstElement * element, GstPad * pad);
static GstStateChangeReturn gst_ssim_change_state (GstElement * element,
GstStateChange transition);
static GstFlowReturn gst_ssim_collected (GstCollectPads * pads,
static GstFlowReturn gst_ssim_collected (GstCollectPads2 * pads,
gpointer user_data);
static GstElementClass *parent_class = NULL;
@ -901,7 +901,7 @@ gst_ssim_src_event (GstPad * pad, GstEvent * event)
/* check if we are flushing */
if (flags & GST_SEEK_FLAG_FLUSH) {
/* make sure we accept nothing anymore and return WRONG_STATE */
gst_collect_pads_set_flushing (ssim->collect, TRUE);
gst_collect_pads2_set_flushing (ssim->collect, TRUE);
/* flushing seek, start flush downstream, the flush will be done
* when all pads received a FLUSH_STOP. */
@ -991,7 +991,7 @@ gst_ssim_sink_event (GstPad * pad, GstEvent * event)
break;
}
/* now GstCollectPads can take care of the rest, e.g. EOS */
/* now GstCollectPads2 can take care of the rest, e.g. EOS */
GST_DEBUG ("Dispatching %s event on pad %s:%s", GST_EVENT_TYPE_NAME (event),
GST_DEBUG_PAD_NAME (pad));
ret = ssim->collect_event (pad, event);
@ -1158,11 +1158,10 @@ gst_ssim_request_new_pad (GstElement * element, GstPadTemplate * templ,
gst_pad_set_getcaps_function (newpad,
GST_DEBUG_FUNCPTR (gst_ssim_sink_getcaps));
gst_pad_set_setcaps_function (newpad, GST_DEBUG_FUNCPTR (gst_ssim_setcaps));
gst_collect_pads_add_pad (ssim->collect, newpad, sizeof (GstCollectData),
NULL);
gst_collect_pads2_add_pad (ssim->collect, newpad, sizeof (GstCollectData2));
/* FIXME: hacked way to override/extend the event function of
* GstCollectPads; because it sets its own event function giving the
* GstCollectPads2; because it sets its own event function giving the
* element no access to events
*/
GST_DEBUG_OBJECT (ssim, "Current collect_event is %p, changing to %p",
@ -1234,7 +1233,7 @@ could_not_add_src:
could_not_add_sink:
{
GST_DEBUG_OBJECT (ssim, "could not add sink pad");
gst_collect_pads_remove_pad (ssim->collect, newpad);
gst_collect_pads2_remove_pad (ssim->collect, newpad);
gst_object_unref (newpad);
return NULL;
}
@ -1249,7 +1248,7 @@ gst_ssim_release_pad (GstElement * element, GstPad * pad)
GST_DEBUG_OBJECT (ssim, "release pad %s:%s", GST_DEBUG_PAD_NAME (pad));
gst_collect_pads_remove_pad (ssim->collect, pad);
gst_collect_pads2_remove_pad (ssim->collect, pad);
gst_element_remove_pad (element, pad);
}
@ -1268,8 +1267,8 @@ gst_ssim_init (GstSSim * ssim)
ssim->sinkcaps = NULL;
/* keep track of the sinkpads requested */
ssim->collect = gst_collect_pads_new ();
gst_collect_pads_set_function (ssim->collect,
ssim->collect = gst_collect_pads2_new ();
gst_collect_pads2_set_function (ssim->collect,
GST_DEBUG_FUNCPTR (gst_ssim_collected), ssim);
}
@ -1408,7 +1407,7 @@ gst_ssim_regenerate_windows (GstSSim * ssim)
}
static GstFlowReturn
gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
gst_ssim_collected (GstCollectPads2 * pads, gpointer user_data)
{
GstSSim *ssim;
GSList *collected;
@ -1441,12 +1440,12 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
}
for (collected = pads->data; collected; collected = g_slist_next (collected)) {
GstCollectData *collect_data;
GstCollectData2 *collect_data;
GstBuffer *inbuf;
collect_data = (GstCollectData *) collected->data;
collect_data = (GstCollectData2 *) collected->data;
inbuf = gst_collect_pads_peek (pads, collect_data);
inbuf = gst_collect_pads2_peek (pads, collect_data);
if (inbuf == NULL) {
GST_LOG_OBJECT (ssim, "channel %p: no bytes available", collect_data);
@ -1470,12 +1469,12 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
for (collected = pads->data; collected;
collected = g_slist_next (collected)) {
GstCollectData *collect_data;
GstCollectData2 *collect_data;
collect_data = (GstCollectData *) collected->data;
collect_data = (GstCollectData2 *) collected->data;
if (collect_data->pad == ssim->orig) {
orgbuf = gst_collect_pads_pop (pads, collect_data);;
orgbuf = gst_collect_pads2_pop (pads, collect_data);;
GST_DEBUG_OBJECT (ssim, "Original stream - flags(0x%x), timestamp(%"
GST_TIME_FORMAT "), duration(%" GST_TIME_FORMAT ")",
@ -1492,14 +1491,14 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data)
GST_LOG_OBJECT (ssim, "starting to cycle through streams");
for (collected = pads->data; collected; collected = g_slist_next (collected)) {
GstCollectData *collect_data;
GstCollectData2 *collect_data;
GstBuffer *inbuf;
guint8 *indata;
collect_data = (GstCollectData *) collected->data;
collect_data = (GstCollectData2 *) collected->data;
if (collect_data->pad != ssim->orig) {
inbuf = gst_collect_pads_pop (pads, collect_data);
inbuf = gst_collect_pads2_pop (pads, collect_data);
indata = GST_BUFFER_DATA (inbuf);
@ -1660,7 +1659,7 @@ gst_ssim_change_state (GstElement * element, GstStateChange transition)
ssim->segment_position = 0;
ssim->segment_rate = 1.0;
gst_segment_init (&ssim->segment, GST_FORMAT_UNDEFINED);
gst_collect_pads_start (ssim->collect);
gst_collect_pads2_start (ssim->collect);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
@ -1668,7 +1667,7 @@ gst_ssim_change_state (GstElement * element, GstStateChange transition)
/* need to unblock the collectpads before calling the
* parent change_state so that streaming can finish
*/
gst_collect_pads_stop (ssim->collect);
gst_collect_pads2_stop (ssim->collect);
break;
default:
break;

View file

@ -21,7 +21,7 @@
#define __GST_SSIM_H__
#include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
#include <gst/base/gstcollectpads2.h>
#include <gst/video/video.h>
G_BEGIN_DECLS
@ -85,7 +85,7 @@ struct _GstSSim {
gint padcount;
GstCollectPads *collect;
GstCollectPads2 *collect;
GstPad *orig;
gint frame_rate;

View file

@ -3,21 +3,24 @@ plugin_LTLIBRARIES = libgstvideoparsersbad.la
libgstvideoparsersbad_la_SOURCES = plugin.c \
h263parse.c gsth263parse.c \
gstdiracparse.c dirac_parse.c \
gsth264parse.c gstmpegvideoparse.c
gsth264parse.c gstmpegvideoparse.c \
gstmpeg4videoparse.c
libgstvideoparsersbad_la_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
-DGST_USE_UNSTABLE_API \
$(GST_BASE_CFLAGS) $(GST_CFLAGS)
libgstvideoparsersbad_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
libgstvideoparsersbad_la_LIBADD = \
$(top_builddir)/gst-libs/gst/codecparsers/libgstcodecparsers-$(GST_MAJORMINOR).la \
$(GST_PLUGINS_BASE_LIBS) -lgstpbutils-$(GST_MAJORMINOR) -lgstvideo-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) $(GST_LIBS)
libgstvideoparsersbad_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideoparsersbad_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gsth263parse.h h263parse.h \
gstdiracparse.h dirac_parse.h \
gsth264parse.h gstmpegvideoparse.h
gsth264parse.h gstmpegvideoparse.h \
gstmpeg4videoparse.h
Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \

View file

@ -29,6 +29,7 @@
#include <gst/base/gstbytereader.h>
#include <gst/base/gstbytewriter.h>
#include <gst/base/gstadapter.h>
#include <gst/video/video.h>
#include "gsth264parse.h"
#include <string.h>
@ -95,6 +96,9 @@ static GstCaps *gst_h264_parse_get_caps (GstBaseParse * parse,
GstCaps * filter);
static GstFlowReturn gst_h264_parse_chain (GstPad * pad, GstObject * parent,
GstBuffer * buffer);
static gboolean gst_h264_parse_event (GstBaseParse * parse, GstEvent * event);
static gboolean gst_h264_parse_src_event (GstBaseParse * parse,
GstEvent * event);
static void
gst_h264_parse_class_init (GstH264ParseClass * klass)
@ -127,6 +131,8 @@ gst_h264_parse_class_init (GstH264ParseClass * klass)
GST_DEBUG_FUNCPTR (gst_h264_parse_pre_push_frame);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_set_caps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_h264_parse_get_caps);
parse_class->event = GST_DEBUG_FUNCPTR (gst_h264_parse_event);
parse_class->src_event = GST_DEBUG_FUNCPTR (gst_h264_parse_src_event);
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&srctemplate));
@ -190,6 +196,11 @@ gst_h264_parse_reset (GstH264Parse * h264parse)
h264parse->height = 0;
h264parse->fps_num = 0;
h264parse->fps_den = 0;
h264parse->aspect_ratio_idc = 0;
h264parse->sar_width = 0;
h264parse->sar_height = 0;
h264parse->upstream_par_n = -1;
h264parse->upstream_par_d = -1;
gst_buffer_replace (&h264parse->codec_data, NULL);
h264parse->nal_length_size = 4;
h264parse->packetized = FALSE;
@ -199,6 +210,11 @@ gst_h264_parse_reset (GstH264Parse * h264parse)
h264parse->last_report = GST_CLOCK_TIME_NONE;
h264parse->push_codec = FALSE;
h264parse->have_pps = FALSE;
h264parse->have_sps = FALSE;
h264parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
h264parse->force_key_unit_event = NULL;
gst_h264_parse_reset_frame (h264parse);
}
@ -399,15 +415,11 @@ static void
gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
{
guint nal_type;
GstH264SliceHdr slice;
GstH264PPS pps;
GstH264SPS sps;
GstH264SEIMessage sei;
gboolean slcparsed = FALSE;
GstH264NalParser *nalparser = h264parse->nalparser;
/* nothing to do for broken input */
if (G_UNLIKELY (nalu->size < 2)) {
GST_DEBUG_OBJECT (h264parse, "not processing nal size %u", nalu->size);
@ -427,8 +439,15 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
h264parse->update_caps = TRUE;
/* found in stream, no need to forcibly push at start */
h264parse->push_codec = FALSE;
h264parse->have_sps = TRUE;
if (h264parse->push_codec && h264parse->have_pps) {
/* SPS and PPS found in stream before the first pre_push_frame, no need
* to forcibly push at start */
GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
h264parse->push_codec = FALSE;
h264parse->have_sps = FALSE;
h264parse->have_pps = FALSE;
}
gst_h264_parser_store_nal (h264parse, sps.id, nal_type, nalu);
break;
@ -437,8 +456,15 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
/* parameters might have changed, force caps check */
GST_DEBUG_OBJECT (h264parse, "triggering src caps check");
h264parse->update_caps = TRUE;
/* found in stream, no need to forcibly push at start */
h264parse->push_codec = FALSE;
h264parse->have_pps = TRUE;
if (h264parse->push_codec && h264parse->have_sps) {
/* SPS and PPS found in stream before the first pre_push_frame, no need
* to forcibly push at start */
GST_INFO_OBJECT (h264parse, "have SPS/PPS in stream");
h264parse->push_codec = FALSE;
h264parse->have_sps = FALSE;
h264parse->have_pps = FALSE;
}
gst_h264_parser_store_nal (h264parse, pps.id, nal_type, nalu);
break;
@ -470,32 +496,33 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
case GST_H264_NAL_SLICE_DPA:
case GST_H264_NAL_SLICE_DPB:
case GST_H264_NAL_SLICE_DPC:
slcparsed = TRUE;
if (gst_h264_parser_parse_slice_hdr (nalparser, nalu,
&slice, FALSE, FALSE) == GST_H264_PARSER_ERROR)
return;
case GST_H264_NAL_SLICE_IDR:
/* don't need to parse the whole slice (header) here */
if (*(nalu->data + nalu->offset + 1) & 0x80) {
/* means first_mb_in_slice == 0 */
/* real frame data */
GST_DEBUG_OBJECT (h264parse, "first_mb_in_slice = 0");
h264parse->frame_start = TRUE;
}
GST_DEBUG_OBJECT (h264parse, "frame start: %i", h264parse->frame_start);
#ifndef GST_DISABLE_GST_DEBUG
{
GstH264SliceHdr slice;
GstH264ParserResult pres;
/* real frame data */
h264parse->frame_start |= (slice.first_mb_in_slice == 0);
pres = gst_h264_parser_parse_slice_hdr (nalparser, nalu, &slice,
FALSE, FALSE);
GST_DEBUG_OBJECT (h264parse,
"parse result %d, first MB: %u, slice type: %u",
pres, slice.first_mb_in_slice, slice.type);
}
#endif
if (G_LIKELY (nal_type != GST_H264_NAL_SLICE_IDR &&
!h264parse->push_codec))
break;
/* if we need to sneak codec NALs into the stream,
* this is a good place, so fake it as IDR
* (which should be at start anyway) */
GST_DEBUG_OBJECT (h264parse, "frame start: %i first_mb_in_slice %i",
h264parse->frame_start, slice.first_mb_in_slice);
if (G_LIKELY (!h264parse->push_codec))
break;
/* fall-through */
case GST_H264_NAL_SLICE_IDR:
if (!slcparsed) {
if (gst_h264_parser_parse_slice_hdr (nalparser, nalu,
&slice, FALSE, FALSE) == GST_H264_PARSER_ERROR)
return;
GST_DEBUG_OBJECT (h264parse, "frame start: %i first_mb_in_slice %i",
h264parse->frame_start, slice.first_mb_in_slice);
}
/* real frame data */
h264parse->frame_start |= (slice.first_mb_in_slice == 0);
/* mark where config needs to go if interval expired */
/* mind replacement buffer if applicable */
if (h264parse->idr_pos == -1) {
@ -506,9 +533,6 @@ gst_h264_parse_process_nal (GstH264Parse * h264parse, GstH264NalUnit * nalu)
GST_DEBUG_OBJECT (h264parse, "marking IDR in frame at offset %d",
h264parse->idr_pos);
}
GST_DEBUG_OBJECT (h264parse, "first MB: %u, slice type: %u",
slice.first_mb_in_slice, slice.type);
break;
default:
gst_h264_parser_parse_nal (nalparser, nalu);
@ -622,8 +646,18 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
GST_DEBUG_OBJECT (h264parse, "last parse position %u", current_off);
while (TRUE) {
switch (gst_h264_parser_identify_nalu (nalparser, data, current_off,
size, &nalu)) {
GstH264ParserResult pres;
if (h264parse->packetized)
pres =
gst_h264_parser_identify_nalu_unchecked (nalparser, data, current_off,
size, &nalu);
else
pres =
gst_h264_parser_identify_nalu (nalparser, data, current_off, size,
&nalu);
switch (pres) {
case GST_H264_PARSER_OK:
GST_DEBUG_OBJECT (h264parse, "complete nal found. "
"current offset: %u, Nal offset: %u, Nal Size: %u",
@ -631,10 +665,12 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
GST_DEBUG_OBJECT (h264parse, "current off. %u",
nalu.offset + nalu.size);
if (!h264parse->nalu.size && !h264parse->nalu.valid)
h264parse->nalu = nalu;
/* need 2 bytes of next nal */
if (nalu.offset + nalu.size + 4 + 2 > size) {
if (!h264parse->packetized && (nalu.offset + nalu.size + 4 + 2 > size)) {
if (GST_BASE_PARSE_DRAINING (parse)) {
drain = TRUE;
} else {
@ -704,6 +740,12 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse,
/* if no next nal, we know it's complete here */
if (drain || gst_h264_parse_collect_nal (h264parse, data, size, &nalu))
break;
/* In packetized mode we know there's only on NALU in each input packet */
if (h264parse->packetized)
break;
GST_DEBUG_OBJECT (h264parse, "Looking for more");
}
end:
@ -716,8 +758,8 @@ end:
parsing_error:
GST_DEBUG_OBJECT (h264parse, "error parsing Nal Unit");
more:
more:
/* ask for best next available */
*framesize = G_MAXUINT;
if (!h264parse->nalu.size) {
@ -823,6 +865,98 @@ gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
return buf;
}
static void
gst_h264_parse_get_par (GstH264Parse * h264parse, gint * num, gint * den)
{
gint par_n, par_d;
if (h264parse->upstream_par_n != -1 && h264parse->upstream_par_d != -1) {
*num = h264parse->upstream_par_n;
*den = h264parse->upstream_par_d;
return;
}
par_n = par_d = 0;
switch (h264parse->aspect_ratio_idc) {
case 0:
par_n = par_d = 0;
break;
case 1:
par_n = 1;
par_d = 1;
break;
case 2:
par_n = 12;
par_d = 11;
break;
case 3:
par_n = 10;
par_d = 11;
break;
case 4:
par_n = 16;
par_d = 11;
break;
case 5:
par_n = 40;
par_d = 33;
break;
case 6:
par_n = 24;
par_d = 11;
break;
case 7:
par_n = 20;
par_d = 11;
break;
case 8:
par_n = 32;
par_d = 11;
break;
case 9:
par_n = 80;
par_d = 33;
break;
case 10:
par_n = 18;
par_d = 11;
break;
case 11:
par_n = 15;
par_d = 11;
break;
case 12:
par_n = 64;
par_d = 33;
break;
case 13:
par_n = 160;
par_d = 99;
break;
case 14:
par_n = 4;
par_d = 3;
break;
case 15:
par_n = 3;
par_d = 2;
break;
case 16:
par_n = 2;
par_d = 1;
break;
case 255:
par_n = h264parse->sar_width;
par_d = h264parse->sar_height;
break;
default:
par_n = par_d = 0;
}
*num = par_n;
*den = par_d;
}
static void
gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
{
@ -875,33 +1009,80 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
caps = NULL;
if (G_UNLIKELY (!sps)) {
caps = gst_caps_copy (sink_caps);
} else if (G_UNLIKELY (h264parse->width != sps->width ||
h264parse->height != sps->height || h264parse->fps_num != sps->fps_num
|| h264parse->fps_den != sps->fps_den || modified)) {
caps = gst_caps_copy (sink_caps);
/* sps should give this */
gst_caps_set_simple (caps, "width", G_TYPE_INT, sps->width,
"height", G_TYPE_INT, sps->height, NULL);
h264parse->height = sps->height;
h264parse->width = sps->width;
/* but not necessarily or reliably this */
if ((!h264parse->fps_num || !h264parse->fps_den) &&
sps->fps_num > 0 && sps->fps_den > 0) {
gst_caps_set_simple (caps, "framerate",
GST_TYPE_FRACTION, sps->fps_num, sps->fps_den, NULL);
h264parse->fps_num = sps->fps_num;
h264parse->fps_den = sps->fps_den;
gst_base_parse_set_frame_rate (GST_BASE_PARSE (h264parse),
h264parse->fps_num, h264parse->fps_den, 0, 0);
} else {
if (G_UNLIKELY (h264parse->width != sps->width ||
h264parse->height != sps->height)) {
GST_INFO_OBJECT (h264parse, "resolution changed %dx%d",
sps->width, sps->height);
h264parse->width = sps->width;
h264parse->height = sps->height;
modified = TRUE;
}
/* 0/1 is set as the default in the codec parser */
if (sps->vui_parameters.timing_info_present_flag &&
!(sps->fps_num == 0 && sps->fps_den == 1)) {
if (G_UNLIKELY (h264parse->fps_num != sps->fps_num
|| h264parse->fps_den != sps->fps_den)) {
GST_INFO_OBJECT (h264parse, "framerate changed %d/%d",
sps->fps_num, sps->fps_den);
h264parse->fps_num = sps->fps_num;
h264parse->fps_den = sps->fps_den;
gst_base_parse_set_frame_rate (GST_BASE_PARSE (h264parse),
h264parse->fps_num, h264parse->fps_den, 0, 0);
modified = TRUE;
}
}
if (sps->vui_parameters.aspect_ratio_info_present_flag) {
if (G_UNLIKELY (h264parse->aspect_ratio_idc !=
sps->vui_parameters.aspect_ratio_idc)) {
h264parse->aspect_ratio_idc = sps->vui_parameters.aspect_ratio_idc;
GST_INFO_OBJECT (h264parse, "aspect ratio idc changed %d",
h264parse->aspect_ratio_idc);
modified = TRUE;
}
/* 255 means sar_width and sar_height present */
if (G_UNLIKELY (sps->vui_parameters.aspect_ratio_idc == 255 &&
(h264parse->sar_width != sps->vui_parameters.sar_width ||
h264parse->sar_height != sps->vui_parameters.sar_height))) {
h264parse->sar_width = sps->vui_parameters.sar_width;
h264parse->sar_height = sps->vui_parameters.sar_height;
GST_INFO_OBJECT (h264parse, "aspect ratio SAR changed %d/%d",
h264parse->sar_width, h264parse->sar_height);
modified = TRUE;
}
}
if (G_UNLIKELY (modified)) {
caps = gst_caps_copy (sink_caps);
/* sps should give this */
gst_caps_set_simple (caps, "width", G_TYPE_INT, sps->width,
"height", G_TYPE_INT, sps->height, NULL);
/* but not necessarily or reliably this */
if (h264parse->fps_num > 0 && h264parse->fps_den > 0)
gst_caps_set_simple (caps, "framerate",
GST_TYPE_FRACTION, sps->fps_num, sps->fps_den, NULL);
}
}
if (caps) {
gint par_n, par_d;
gst_caps_set_simple (caps, "parsed", G_TYPE_BOOLEAN, TRUE,
"stream-format", G_TYPE_STRING,
gst_h264_parse_get_string (h264parse, TRUE, h264parse->format),
"alignment", G_TYPE_STRING,
gst_h264_parse_get_string (h264parse, FALSE, h264parse->align), NULL);
gst_h264_parse_get_par (h264parse, &par_n, &par_d);
if (par_n != 0 && par_d != 0) {
GST_INFO_OBJECT (h264parse, "PAR %d/%d", par_n, par_d);
gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
par_n, par_d, NULL);
}
if (buf) {
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_replace (&h264parse->codec_data, buf);
@ -1104,15 +1285,109 @@ gst_h264_parse_push_codec_buffer (GstH264Parse * h264parse, GstBuffer * nal,
return gst_pad_push (GST_BASE_PARSE_SRC_PAD (h264parse), nal);
}
static GstEvent *
check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment,
GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts)
{
GstClockTime running_time, stream_time;
gboolean all_headers;
guint count;
GstEvent *event = NULL;
g_return_val_if_fail (segment != NULL, NULL);
if (pending_event == NULL)
goto out;
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
timestamp == GST_CLOCK_TIME_NONE)
goto out;
running_time = gst_segment_to_running_time (segment,
GST_FORMAT_TIME, timestamp);
GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
running_time < pending_key_unit_ts)
goto out;
if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
GST_DEBUG ("pending force key unit, waiting for keyframe");
goto out;
}
stream_time = gst_segment_to_stream_time (segment,
GST_FORMAT_TIME, timestamp);
gst_video_event_parse_upstream_force_key_unit (pending_event,
NULL, &all_headers, &count);
event =
gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
running_time, all_headers, count);
gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
out:
return event;
}
static void
gst_h264_parse_prepare_key_unit (GstH264Parse * parse, GstEvent * event)
{
GstClockTime running_time;
guint count;
gboolean have_sps, have_pps;
gint i;
parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
gst_event_replace (&parse->force_key_unit_event, NULL);
gst_video_event_parse_downstream_force_key_unit (event,
NULL, NULL, &running_time, NULL, &count);
GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
"%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
GST_TIME_ARGS (running_time), count);
gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
have_sps = have_pps = FALSE;
for (i = 0; i < GST_H264_MAX_SPS_COUNT; i++) {
if (parse->sps_nals[i] != NULL) {
have_sps = TRUE;
break;
}
}
for (i = 0; i < GST_H264_MAX_PPS_COUNT; i++) {
if (parse->pps_nals[i] != NULL) {
have_pps = TRUE;
break;
}
}
GST_INFO_OBJECT (parse, "preparing key unit, have sps %d have pps %d",
have_sps, have_pps);
/* set push_codec to TRUE so that pre_push_frame sends SPS/PPS again */
parse->push_codec = TRUE;
}
static GstFlowReturn
gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstH264Parse *h264parse;
GstBuffer *buffer;
GstEvent *event;
h264parse = GST_H264_PARSE (parse);
buffer = frame->buffer;
if ((event = check_pending_key_unit_event (h264parse->force_key_unit_event,
&parse->segment, GST_BUFFER_TIMESTAMP (buffer),
GST_BUFFER_FLAGS (buffer), h264parse->pending_key_unit_ts))) {
gst_h264_parse_prepare_key_unit (h264parse, event);
}
/* periodic SPS/PPS sending */
if (h264parse->interval > 0 || h264parse->push_codec) {
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
@ -1207,6 +1482,8 @@ gst_h264_parse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
}
/* we pushed whatever we had */
h264parse->push_codec = FALSE;
h264parse->have_sps = FALSE;
h264parse->have_pps = FALSE;
}
}
@ -1239,6 +1516,8 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
gst_structure_get_int (str, "height", &h264parse->height);
gst_structure_get_fraction (str, "framerate", &h264parse->fps_num,
&h264parse->fps_den);
gst_structure_get_fraction (str, "pixel-aspect-ratio",
&h264parse->upstream_par_n, &h264parse->upstream_par_d);
/* get upstream format and align from caps */
gst_h264_parse_format_from_caps (caps, &format, &align);
@ -1348,6 +1627,8 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps)
/* arrange to insert codec-data in-stream if needed.
* src caps are only arranged for later on */
h264parse->push_codec = TRUE;
h264parse->have_sps = FALSE;
h264parse->have_pps = FALSE;
h264parse->split_packetized = TRUE;
h264parse->packetized = TRUE;
}
@ -1416,6 +1697,87 @@ gst_h264_parse_get_caps (GstBaseParse * parse, GstCaps * filter)
return res;
}
static gboolean
gst_h264_parse_event (GstBaseParse * parse, GstEvent * event)
{
gboolean handled = FALSE;
GstH264Parse *h264parse = GST_H264_PARSE (parse);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
GstClockTime timestamp, stream_time, running_time;
gboolean all_headers;
guint count;
if (!gst_video_event_is_force_key_unit (event))
break;
gst_video_event_parse_downstream_force_key_unit (event,
&timestamp, &stream_time, &running_time, &all_headers, &count);
GST_INFO_OBJECT (h264parse, "received downstream force key unit event, "
"seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
all_headers, count);
handled = TRUE;
if (h264parse->force_key_unit_event) {
GST_INFO_OBJECT (h264parse, "ignoring force key unit event "
"as one is already queued");
break;
}
h264parse->pending_key_unit_ts = running_time;
gst_event_replace (&h264parse->force_key_unit_event, event);
break;
}
default:
break;
}
return handled;
}
static gboolean
gst_h264_parse_src_event (GstBaseParse * parse, GstEvent * event)
{
gboolean handled = FALSE;
GstH264Parse *h264parse = GST_H264_PARSE (parse);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
{
GstClockTime running_time;
gboolean all_headers;
guint count;
if (!gst_video_event_is_force_key_unit (event))
break;
gst_video_event_parse_upstream_force_key_unit (event,
&running_time, &all_headers, &count);
GST_INFO_OBJECT (h264parse, "received upstream force-key-unit event, "
"seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
all_headers, count);
if (!all_headers)
break;
h264parse->pending_key_unit_ts = running_time;
gst_event_replace (&h264parse->force_key_unit_event, event);
/* leave handled = FALSE so that the event gets propagated upstream */
break;
}
default:
break;
}
return handled;
}
static GstFlowReturn
gst_h264_parse_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{

View file

@ -58,6 +58,9 @@ struct _GstH264Parse
/* stream */
gint width, height;
gint fps_num, fps_den;
gint aspect_ratio_idc;
gint sar_width, sar_height;
gint upstream_par_n, upstream_par_d;
GstBuffer *codec_data;
guint nal_length_size;
gboolean packetized;
@ -71,6 +74,8 @@ struct _GstH264Parse
GstClockTime last_report;
gboolean push_codec;
gboolean have_sps;
gboolean have_pps;
/* collected SPS and PPS NALUs */
GstBuffer *sps_nals[GST_H264_MAX_SPS_COUNT];
@ -102,6 +107,9 @@ struct _GstH264Parse
/* props */
gboolean split_packetized;
guint interval;
GstClockTime pending_key_unit_ts;
GstEvent *force_key_unit_event;
};
struct _GstH264ParseClass

View file

@ -3,8 +3,10 @@
* @author Sjoerd Simons <sjoerd@luon.net>
* Copyright (C) <2007> Julien Moutte <julien@fluendo.com>
* Copyright (C) <2011> Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
* Copyright (C) <2011> Collabora Multimedia
* Copyright (C) <2011> Nokia Corporation
* Copyright (C) <2011> Intel
* Copyright (C) <2011> Collabora Ltd.
* Copyright (C) <2011> Thibault Saunier <thibault.saunier@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
@ -28,7 +30,10 @@
#include <string.h>
#include <gst/base/gstbytereader.h>
#include "mpeg4videoparse.h"
#include <gst/pbutils/codec-utils.h>
#include <gst/video/video.h>
#include "gstmpeg4videoparse.h"
GST_DEBUG_CATEGORY (mpeg4v_parse_debug);
#define GST_CAT_DEFAULT mpeg4v_parse_debug
@ -49,7 +54,7 @@ GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK,
);
/* Properties */
#define DEFAULT_PROP_DROP TRUE
#define DEFAULT_PROP_DROP TRUE
#define DEFAULT_CONFIG_INTERVAL (0)
enum
@ -79,12 +84,15 @@ static void gst_mpeg4vparse_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_mpeg4vparse_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_mpeg4vparse_event (GstBaseParse * parse, GstEvent * event);
static gboolean gst_mpeg4vparse_src_event (GstBaseParse * parse,
GstEvent * event);
static void
gst_mpeg4vparse_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (object);
GstMpeg4VParse *parse = GST_MPEG4VIDEO_PARSE (object);
switch (property_id) {
case PROP_DROP:
@ -102,7 +110,7 @@ static void
gst_mpeg4vparse_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
GstMpeg4VParse *parse = GST_MPEG4VIDEOPARSE (object);
GstMpeg4VParse *parse = GST_MPEG4VIDEO_PARSE (object);
switch (property_id) {
case PROP_DROP:
@ -120,8 +128,8 @@ static void
gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
GstBaseParseClass *parse_class = GST_BASE_PARSE_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
@ -142,16 +150,19 @@ gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
0, 3600, DEFAULT_CONFIG_INTERVAL,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_add_pad_template (gstelement_class,
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
gst_element_class_add_pad_template (gstelement_class,
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_set_details_simple (gstelement_class,
gst_element_class_set_details_simple (element_class,
"MPEG 4 video elementary stream parser", "Codec/Parser/Video",
"Parses MPEG-4 Part 2 elementary video streams",
"Julien Moutte <julien@fluendo.com>");
GST_DEBUG_CATEGORY_INIT (mpeg4v_parse_debug, "mpeg4videoparse", 0,
"MPEG-4 video parser");
/* Override BaseParse vfuncs */
parse_class->start = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_start);
parse_class->stop = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_stop);
@ -162,6 +173,8 @@ gst_mpeg4vparse_class_init (GstMpeg4VParseClass * klass)
GST_DEBUG_FUNCPTR (gst_mpeg4vparse_pre_push_frame);
parse_class->set_sink_caps = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_set_caps);
parse_class->get_sink_caps = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_get_caps);
parse_class->event = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_event);
parse_class->src_event = GST_DEBUG_FUNCPTR (gst_mpeg4vparse_src_event);
}
static void
@ -177,25 +190,28 @@ gst_mpeg4vparse_reset_frame (GstMpeg4VParse * mp4vparse)
/* done parsing; reset state */
mp4vparse->last_sc = -1;
mp4vparse->vop_offset = -1;
mp4vparse->vos_offset = -1;
mp4vparse->vo_offset = -1;
mp4vparse->vo_found = FALSE;
mp4vparse->vol_offset = -1;
}
static void
gst_mpeg4vparse_reset (GstMpeg4VParse * mp4vparse)
{
gst_mpeg4vparse_reset_frame (mp4vparse);
mp4vparse->profile = 0;
mp4vparse->update_caps = TRUE;
mp4vparse->profile = NULL;
mp4vparse->level = NULL;
mp4vparse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
mp4vparse->force_key_unit_event = NULL;
gst_buffer_replace (&mp4vparse->config, NULL);
memset (&mp4vparse->params, 0, sizeof (mp4vparse->params));
memset (&mp4vparse->vol, 0, sizeof (mp4vparse->vol));
}
static gboolean
gst_mpeg4vparse_start (GstBaseParse * parse)
{
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GST_DEBUG_OBJECT (parse, "start");
@ -209,7 +225,7 @@ gst_mpeg4vparse_start (GstBaseParse * parse)
static gboolean
gst_mpeg4vparse_stop (GstBaseParse * parse)
{
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GST_DEBUG_OBJECT (parse, "stop");
@ -219,29 +235,41 @@ gst_mpeg4vparse_stop (GstBaseParse * parse)
}
static gboolean
gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse, const guint8 * data,
gsize size)
gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse,
const guint8 * data, guint offset, gsize size)
{
/* only do stuff if something new */
if (mp4vparse->config && size == gst_buffer_get_size (mp4vparse->config) &&
gst_buffer_memcmp (mp4vparse->config, 0, data, size) == 0)
if (!gst_buffer_memcmp (mp4vparse->config, offset, data, size))
return TRUE;
if (!gst_mpeg4_params_parse_config (&mp4vparse->params, data, size)) {
GST_DEBUG_OBJECT (mp4vparse, "failed to parse config data (size %"
G_GSSIZE_FORMAT ")", size);
if (mp4vparse->vol_offset < 0) {
GST_WARNING ("No video object Layer parsed in this frame, cannot accept "
"config");
return FALSE;
}
/* If the parsing fail, we accept the config only if we don't have
* any config yet. */
if (gst_mpeg4_parse_video_object_layer (&mp4vparse->vol,
NULL, data + mp4vparse->vol_offset,
size - mp4vparse->vol_offset) != GST_MPEG4_PARSER_OK &&
mp4vparse->config)
return FALSE;
GST_LOG_OBJECT (mp4vparse, "Width/Height: %u/%u, "
"time increment resolution: %u fixed time increment: %u",
mp4vparse->vol.width, mp4vparse->vol.height,
mp4vparse->vol.vop_time_increment_resolution,
mp4vparse->vol.fixed_vop_time_increment);
GST_LOG_OBJECT (mp4vparse, "accepting parsed config size %" G_GSSIZE_FORMAT,
size);
/* parsing ok, so accept it as new config */
if (mp4vparse->config != NULL)
gst_buffer_unref (mp4vparse->config);
mp4vparse->config = gst_buffer_new_and_alloc (size);
gst_buffer_fill (mp4vparse->config, 0, data, size);
mp4vparse->config = gst_buffer_new_wrapped (g_memdup (data, size), size);
/* trigger src caps update */
mp4vparse->update_caps = TRUE;
@ -251,74 +279,80 @@ gst_mpeg4vparse_process_config (GstMpeg4VParse * mp4vparse, const guint8 * data,
/* caller guarantees at least start code in @buf at @off */
static gboolean
gst_mpeg4vparse_process_sc (GstMpeg4VParse * mp4vparse, GstBuffer * buf,
gint off)
gst_mpeg4vparse_process_sc (GstMpeg4VParse * mp4vparse, GstMpeg4Packet * packet,
gsize size)
{
guint8 *data;
gsize size;
guint code;
g_return_val_if_fail (buf && gst_buffer_get_size (buf) >= off + 4, FALSE);
data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
code = data[off + 3];
GST_LOG_OBJECT (mp4vparse, "process startcode %x", code);
GST_LOG_OBJECT (mp4vparse, "process startcode %x", packet->type);
/* if we found a VOP, next start code ends it,
* except for final VOS end sequence code included in last VOP-frame */
if (mp4vparse->vop_offset >= 0 && code != MPEG4_VOS_ENDCODE) {
if (G_LIKELY (size > mp4vparse->vop_offset + 4)) {
if (mp4vparse->vop_offset >= 0 &&
packet->type != GST_MPEG4_VISUAL_OBJ_SEQ_END) {
if (G_LIKELY (size > mp4vparse->vop_offset + 1)) {
mp4vparse->intra_frame =
((data[mp4vparse->vop_offset + 4] >> 6 & 0x3) == 0);
((packet->data[mp4vparse->vop_offset + 1] >> 6 & 0x3) == 0);
} else {
GST_WARNING_OBJECT (mp4vparse, "no data following VOP startcode");
mp4vparse->intra_frame = FALSE;
}
GST_LOG_OBJECT (mp4vparse, "ending frame of size %d, is intra %d", off,
mp4vparse->intra_frame);
gst_buffer_unmap (buf, data, size);
GST_LOG_OBJECT (mp4vparse, "ending frame of size %d, is intra %d",
packet->offset - 3, mp4vparse->intra_frame);
return TRUE;
}
switch (code) {
case MPEG4_VOP_STARTCODE:
case MPEG4_GOP_STARTCODE:
switch (packet->type) {
case GST_MPEG4_VIDEO_OBJ_PLANE:
case GST_MPEG4_GROUP_OF_VOP:
{
gint offset;
if (code == MPEG4_VOP_STARTCODE) {
if (packet->type == GST_MPEG4_VIDEO_OBJ_PLANE) {
GST_LOG_OBJECT (mp4vparse, "startcode is VOP");
mp4vparse->vop_offset = off;
mp4vparse->vop_offset = packet->offset;
} else {
GST_LOG_OBJECT (mp4vparse, "startcode is GOP");
}
/* parse config data ending here if proper startcodes found earlier;
* preferably start at VOS (visual object sequence),
* otherwise at VO (video object) */
offset = mp4vparse->vos_offset >= 0 ?
mp4vparse->vos_offset : mp4vparse->vo_offset;
if (offset >= 0) {
gst_mpeg4vparse_process_config (mp4vparse, data, off);
if (mp4vparse->vo_found) {
/*Do not take care startcode into account */
gst_mpeg4vparse_process_config (mp4vparse,
packet->data, packet->offset, packet->offset - 3);
/* avoid accepting again for a VOP sc following a GOP sc */
mp4vparse->vos_offset = -1;
mp4vparse->vo_offset = -1;
mp4vparse->vo_found = FALSE;
}
break;
}
case MPEG4_VOS_STARTCODE:
GST_LOG_OBJECT (mp4vparse, "startcode is VOS");
mp4vparse->vos_offset = off;
case GST_MPEG4_VISUAL_OBJ_SEQ_START:
GST_LOG_OBJECT (mp4vparse, "Visual Sequence Start");
mp4vparse->vo_found = TRUE;
mp4vparse->profile = gst_codec_utils_mpeg4video_get_profile (packet->data
+ packet->offset + 1, packet->offset);
mp4vparse->level = gst_codec_utils_mpeg4video_get_level (packet->data
+ packet->offset + 1, packet->offset);
break;
case GST_MPEG4_VISUAL_OBJ:
GST_LOG_OBJECT (mp4vparse, "Visual Object");
default:
/* VO (video object) cases */
if (code <= 0x1f) {
GST_LOG_OBJECT (mp4vparse, "startcode is VO");
mp4vparse->vo_offset = off;
if (packet->type >= GST_MPEG4_VIDEO_LAYER_FIRST &&
packet->type <= GST_MPEG4_VIDEO_LAYER_LAST) {
GST_LOG_OBJECT (mp4vparse, "Video Object Layer");
/* wee keep track of the offset to parse later on */
if (mp4vparse->vol_offset < 0)
mp4vparse->vol_offset = packet->offset;
/* VO (video object) cases */
} else if (packet->type <= GST_MPEG4_VIDEO_OBJ_LAST) {
GST_LOG_OBJECT (mp4vparse, "Video object");
mp4vparse->vo_found = TRUE;
}
break;
}
gst_buffer_unmap (buf, data, size);
/* at least need to have a VOP in a frame */
return FALSE;
@ -332,22 +366,19 @@ static gboolean
gst_mpeg4vparse_check_valid_frame (GstBaseParse * parse,
GstBaseParseFrame * frame, guint * framesize, gint * skipsize)
{
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
GstBuffer *buf = frame->buffer;
GstByteReader reader;
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GstMpeg4Packet packet;
guint8 *data = NULL;
gsize size;
gint off = 0;
gboolean ret = FALSE;
guint code;
guint8 *data;
gsize size;
data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
gst_byte_reader_init (&reader, data, size);
data = gst_buffer_map (frame->buffer, &size, NULL, GST_MAP_READ);
retry:
/* at least start code and subsequent byte */
if (G_UNLIKELY (size - off < 5))
goto done;
goto out;
/* avoid stale cached parsing state */
if (!(frame->flags & GST_BASE_PARSE_FRAME_FLAG_PARSING)) {
@ -364,36 +395,33 @@ retry:
goto next;
}
off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffff00, 0x00000100,
off, size - off);
GST_LOG_OBJECT (mp4vparse, "possible sync at buffer offset %d", off);
/* didn't find anything that looks like a sync word, skip */
if (G_UNLIKELY (off < 0)) {
*skipsize = size - 3;
goto done;
switch (gst_mpeg4_parse (&packet, TRUE, NULL, data, off, size)) {
case (GST_MPEG4_PARSER_NO_PACKET):
case (GST_MPEG4_PARSER_ERROR):
*skipsize = size - 3;
goto out;
default:
break;
}
off = packet.offset;
/* possible frame header, but not at offset 0? skip bytes before sync */
if (G_UNLIKELY (off > 0)) {
*skipsize = off;
goto done;
if (G_UNLIKELY (off > 3)) {
*skipsize = off - 3;
goto out;
}
/* ensure start code looks like a real starting start code */
code = data[3];
switch (code) {
case MPEG4_VOP_STARTCODE:
case MPEG4_VOS_STARTCODE:
case MPEG4_GOP_STARTCODE:
switch (packet.type) {
case GST_MPEG4_GROUP_OF_VOP:
case GST_MPEG4_VISUAL_OBJ_SEQ_START:
case GST_MPEG4_VIDEO_OBJ_PLANE:
break;
default:
if (code <= 0x1f)
if (packet.type <= GST_MPEG4_VIDEO_OBJ_LAST)
break;
/* undesirable sc */
GST_LOG_OBJECT (mp4vparse, "start code is no VOS, VO, VOP or GOP");
off++;
goto retry;
}
@ -401,44 +429,52 @@ retry:
mp4vparse->last_sc = 0;
/* examine start code, which should not end frame at present */
gst_mpeg4vparse_process_sc (mp4vparse, buf, 0);
gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
next:
GST_LOG_OBJECT (mp4vparse, "Looking for frame end");
/* start is fine as of now */
*skipsize = 0;
/* position a bit further than last sc */
off++;
/* so now we have start code at start of data; locate next start code */
off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffff00, 0x00000100,
off, size - off);
GST_LOG_OBJECT (mp4vparse, "next start code at %d", off);
if (off < 0) {
/* if draining, take all */
if (GST_BASE_PARSE_DRAINING (parse)) {
off = size;
ret = TRUE;
} else {
/* resume scan where we left it */
mp4vparse->last_sc = size - 4;
/* request best next available */
*framesize = G_MAXUINT;
goto done;
}
} else {
/* decide whether this startcode ends a frame */
ret = gst_mpeg4vparse_process_sc (mp4vparse, buf, off);
/* so now we have start code at start of data; locate next packet */
switch (gst_mpeg4_parse (&packet, TRUE, NULL, data, off, size)) {
case (GST_MPEG4_PARSER_NO_PACKET_END):
ret = gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
if (ret)
break;
case (GST_MPEG4_PARSER_NO_PACKET):
case (GST_MPEG4_PARSER_ERROR):
/* if draining, take all */
if (GST_BASE_PARSE_DRAINING (parse)) {
*framesize = size;
ret = TRUE;
} else {
/* resume scan where we left it */
mp4vparse->last_sc = size - 3;
/* request best next available */
*framesize = G_MAXUINT;
}
goto out;
break;
default:
/* decide whether this startcode ends a frame */
ret = gst_mpeg4vparse_process_sc (mp4vparse, &packet, size);
break;
}
off = packet.offset;
if (ret) {
*framesize = off;
*framesize = off - 3;
} else {
goto next;
}
done:
gst_buffer_unmap (buf, data, size);
out:
gst_buffer_unmap (frame->buffer, data, size);
return ret;
}
@ -447,6 +483,8 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
{
GstCaps *caps = NULL;
GST_LOG_OBJECT (mp4vparse, "Updating caps");
/* only update if no src caps yet or explicitly triggered */
if (G_LIKELY (gst_pad_has_current_caps (GST_BASE_PARSE_SRC_PAD (mp4vparse)) &&
!mp4vparse->update_caps))
@ -455,7 +493,9 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
/* carry over input caps as much as possible; override with our own stuff */
caps = gst_pad_get_current_caps (GST_BASE_PARSE_SINK_PAD (mp4vparse));
if (caps) {
caps = gst_caps_make_writable (caps);
GstCaps *tmp = gst_caps_copy (caps);
gst_caps_unref (caps);
caps = tmp;
} else {
caps = gst_caps_new_simple ("video/mpeg",
"mpegversion", G_TYPE_INT, 4, NULL);
@ -464,14 +504,9 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
gst_caps_set_simple (caps, "systemstream", G_TYPE_BOOLEAN, FALSE,
"parsed", G_TYPE_BOOLEAN, TRUE, NULL);
if (mp4vparse->profile != 0) {
gchar *profile = NULL;
/* FIXME does it make sense to expose the profile in the caps ? */
profile = g_strdup_printf ("%d", mp4vparse->profile);
gst_caps_set_simple (caps, "profile-level-id",
G_TYPE_STRING, profile, NULL);
g_free (profile);
if (mp4vparse->profile && mp4vparse->level) {
gst_caps_set_simple (caps, "profile", G_TYPE_STRING, mp4vparse->profile,
"level", G_TYPE_STRING, mp4vparse->level, NULL);
}
if (mp4vparse->config != NULL) {
@ -479,15 +514,15 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
GST_TYPE_BUFFER, mp4vparse->config, NULL);
}
if (mp4vparse->params.width > 0 && mp4vparse->params.height > 0) {
gst_caps_set_simple (caps, "width", G_TYPE_INT, mp4vparse->params.width,
"height", G_TYPE_INT, mp4vparse->params.height, NULL);
if (mp4vparse->vol.width > 0 && mp4vparse->vol.height > 0) {
gst_caps_set_simple (caps, "width", G_TYPE_INT, mp4vparse->vol.width,
"height", G_TYPE_INT, mp4vparse->vol.height, NULL);
}
/* perhaps we have a framerate */
if (mp4vparse->params.fixed_time_increment != 0) {
gint fps_num = mp4vparse->params.time_increment_resolution;
gint fps_den = mp4vparse->params.fixed_time_increment;
if (mp4vparse->vol.fixed_vop_time_increment != 0) {
gint fps_num = mp4vparse->vol.vop_time_increment_resolution;
gint fps_den = mp4vparse->vol.fixed_vop_time_increment;
GstClockTime latency = gst_util_uint64_scale (GST_SECOND, fps_den, fps_num);
gst_caps_set_simple (caps, "framerate",
@ -498,11 +533,10 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
}
/* or pixel-aspect-ratio */
if (mp4vparse->params.aspect_ratio_width > 0 &&
mp4vparse->params.aspect_ratio_height > 0) {
if (mp4vparse->vol.par_width > 0 && mp4vparse->vol.par_height > 0) {
gst_caps_set_simple (caps, "pixel-aspect-ratio",
GST_TYPE_FRACTION, mp4vparse->params.aspect_ratio_width,
mp4vparse->params.aspect_ratio_height, NULL);
GST_TYPE_FRACTION, mp4vparse->vol.par_width,
mp4vparse->vol.par_height, NULL);
}
gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (mp4vparse), caps);
@ -512,7 +546,7 @@ gst_mpeg4vparse_update_src_caps (GstMpeg4VParse * mp4vparse)
static GstFlowReturn
gst_mpeg4vparse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GstBuffer *buffer = frame->buffer;
gst_mpeg4vparse_update_src_caps (mp4vparse);
@ -523,34 +557,95 @@ gst_mpeg4vparse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
if (G_UNLIKELY (mp4vparse->drop && !mp4vparse->config)) {
GST_DEBUG_OBJECT (mp4vparse, "dropping frame as no config yet");
GST_LOG_OBJECT (mp4vparse, "dropping frame as no config yet");
return GST_BASE_PARSE_FLOW_DROPPED;
} else
return GST_FLOW_OK;
}
static gint
compare_buffers (GstBuffer * buf1, GstBuffer * buf2)
static GstEvent *
check_pending_key_unit_event (GstEvent * pending_event, GstSegment * segment,
GstClockTime timestamp, guint flags, GstClockTime pending_key_unit_ts)
{
gpointer data;
gsize size;
gint ret;
GstClockTime running_time, stream_time;
gboolean all_headers;
guint count;
GstEvent *event = NULL;
data = gst_buffer_map (buf2, &size, NULL, GST_MAP_READ);
ret = gst_buffer_memcmp (buf1, 0, data, size);
gst_buffer_unmap (buf2, data, size);
g_return_val_if_fail (segment != NULL, NULL);
return ret;
if (pending_event == NULL)
goto out;
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
timestamp == GST_CLOCK_TIME_NONE)
goto out;
running_time = gst_segment_to_running_time (segment,
GST_FORMAT_TIME, timestamp);
GST_INFO ("now %" GST_TIME_FORMAT " wanted %" GST_TIME_FORMAT,
GST_TIME_ARGS (running_time), GST_TIME_ARGS (pending_key_unit_ts));
if (GST_CLOCK_TIME_IS_VALID (pending_key_unit_ts) &&
running_time < pending_key_unit_ts)
goto out;
if (flags & GST_BUFFER_FLAG_DELTA_UNIT) {
GST_DEBUG ("pending force key unit, waiting for keyframe");
goto out;
}
stream_time = gst_segment_to_stream_time (segment,
GST_FORMAT_TIME, timestamp);
gst_video_event_parse_upstream_force_key_unit (pending_event,
NULL, &all_headers, &count);
event =
gst_video_event_new_downstream_force_key_unit (timestamp, stream_time,
running_time, all_headers, count);
gst_event_set_seqnum (event, gst_event_get_seqnum (pending_event));
out:
return event;
}
static void
gst_mpeg4vparse_prepare_key_unit (GstMpeg4VParse * parse, GstEvent * event)
{
GstClockTime running_time;
guint count;
parse->pending_key_unit_ts = GST_CLOCK_TIME_NONE;
gst_event_replace (&parse->force_key_unit_event, NULL);
gst_video_event_parse_downstream_force_key_unit (event,
NULL, NULL, &running_time, NULL, &count);
GST_INFO_OBJECT (parse, "pushing downstream force-key-unit event %d "
"%" GST_TIME_FORMAT " count %d", gst_event_get_seqnum (event),
GST_TIME_ARGS (running_time), count);
gst_pad_push_event (GST_BASE_PARSE_SRC_PAD (parse), event);
}
static GstFlowReturn
gst_mpeg4vparse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
{
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GstBuffer *buffer = frame->buffer;
gboolean push_codec = FALSE;
GstEvent *event = NULL;
/* periodic SPS/PPS sending */
if (mp4vparse->interval > 0) {
if ((event = check_pending_key_unit_event (mp4vparse->force_key_unit_event,
&parse->segment, GST_BUFFER_TIMESTAMP (buffer),
GST_BUFFER_FLAGS (buffer), mp4vparse->pending_key_unit_ts))) {
gst_mpeg4vparse_prepare_key_unit (mp4vparse, event);
push_codec = TRUE;
}
/* periodic config sending */
if (mp4vparse->interval > 0 || push_codec) {
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
guint64 diff;
@ -572,23 +667,30 @@ gst_mpeg4vparse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
GST_LOG_OBJECT (mp4vparse,
"interval since last config %" GST_TIME_FORMAT, GST_TIME_ARGS (diff));
if (GST_TIME_AS_SECONDS (diff) >= mp4vparse->interval) {
if (GST_TIME_AS_SECONDS (diff) >= mp4vparse->interval || push_codec) {
guint8 *cdata;
gsize csize;
gboolean diffconf;
/* we need to send config now first */
GST_LOG_OBJECT (parse, "inserting config in stream");
GST_INFO_OBJECT (parse, "inserting config in stream");
cdata = gst_buffer_map (mp4vparse->config, &csize, NULL, GST_MAP_READ);
diffconf = (gst_buffer_get_size (buffer) < csize)
|| gst_buffer_memcmp (buffer, 0, cdata, csize);
gst_buffer_unmap (mp4vparse->config, cdata, csize);
/* avoid inserting duplicate config */
if ((gst_buffer_get_size (buffer) <
gst_buffer_get_size (mp4vparse->config))
|| compare_buffers (buffer, mp4vparse->config)) {
if (diffconf) {
GstBuffer *superbuf;
/* insert header */
superbuf = gst_buffer_merge (mp4vparse->config, buffer);
gst_buffer_copy_into (superbuf, buffer, GST_BUFFER_COPY_ALL, 0, -1);
gst_buffer_copy_into (superbuf, buffer, GST_BUFFER_COPY_METADATA, 0,
csize);
gst_buffer_replace (&frame->buffer, superbuf);
gst_buffer_unref (superbuf);
} else {
GST_LOG_OBJECT (parse, "... but avoiding duplication");
GST_INFO_OBJECT (parse, "... but avoiding duplication");
}
if (G_UNLIKELY (timestamp != -1)) {
@ -604,10 +706,15 @@ gst_mpeg4vparse_pre_push_frame (GstBaseParse * parse, GstBaseParseFrame * frame)
static gboolean
gst_mpeg4vparse_set_caps (GstBaseParse * parse, GstCaps * caps)
{
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEOPARSE (parse);
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
GstStructure *s;
const GValue *value;
GstBuffer *buf;
guint8 *data;
gsize size;
GstMpeg4Packet packet;
GstMpeg4ParseResult res;
GST_DEBUG_OBJECT (parse, "setcaps called with %" GST_PTR_FORMAT, caps);
@ -615,14 +722,23 @@ gst_mpeg4vparse_set_caps (GstBaseParse * parse, GstCaps * caps)
if ((value = gst_structure_get_value (s, "codec_data")) != NULL
&& (buf = gst_value_get_buffer (value))) {
guint8 *data;
gsize size;
data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
/* best possible parse attempt,
* src caps are based on sink caps so it will end up in there
* whether sucessful or not */
gst_mpeg4vparse_process_config (mp4vparse, data, size);
data = gst_buffer_map (buf, &size, NULL, GST_MAP_READ);
res = gst_mpeg4_parse (&packet, TRUE, NULL, data, 0, size);
while (res == GST_MPEG4_PARSER_OK || res == GST_MPEG4_PARSER_NO_PACKET_END) {
if (packet.type >= GST_MPEG4_VIDEO_LAYER_FIRST &&
packet.type <= GST_MPEG4_VIDEO_LAYER_LAST)
mp4vparse->vol_offset = packet.offset;
res = gst_mpeg4_parse (&packet, TRUE, NULL, data, packet.offset, size);
}
/* And take it as config */
gst_mpeg4vparse_process_config (mp4vparse, data, 3, size);
gst_buffer_unmap (buf, data, size);
}
@ -630,6 +746,7 @@ gst_mpeg4vparse_set_caps (GstBaseParse * parse, GstCaps * caps)
return TRUE;
}
static GstCaps *
gst_mpeg4vparse_get_caps (GstBaseParse * parse, GstCaps * filter)
{
@ -660,24 +777,94 @@ gst_mpeg4vparse_get_caps (GstBaseParse * parse, GstCaps * filter)
(parse)));
}
if (filter) {
GstCaps *tmp = gst_caps_intersect_full (res, filter,
GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (res);
res = tmp;
}
return res;
}
static gboolean
plugin_init (GstPlugin * plugin)
gst_mpeg4vparse_event (GstBaseParse * parse, GstEvent * event)
{
GST_DEBUG_CATEGORY_INIT (mpeg4v_parse_debug, "mpeg4videoparse", 0,
"MPEG-4 video parser");
gboolean handled = FALSE;
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
if (!gst_element_register (plugin, "mpeg4videoparse", GST_RANK_PRIMARY + 1,
gst_mpeg4vparse_get_type ()))
return FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_DOWNSTREAM:
{
GstClockTime timestamp, stream_time, running_time;
gboolean all_headers;
guint count;
return TRUE;
if (!gst_video_event_is_force_key_unit (event))
break;
gst_video_event_parse_downstream_force_key_unit (event,
&timestamp, &stream_time, &running_time, &all_headers, &count);
GST_INFO_OBJECT (mp4vparse, "received downstream force key unit event, "
"seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
all_headers, count);
handled = TRUE;
if (mp4vparse->force_key_unit_event) {
GST_INFO_OBJECT (mp4vparse, "ignoring force key unit event "
"as one is already queued");
break;
}
mp4vparse->pending_key_unit_ts = running_time;
gst_event_replace (&mp4vparse->force_key_unit_event, event);
break;
}
default:
break;
}
return handled;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"mpeg4videoparse",
"MPEG-4 video parser",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
static gboolean
gst_mpeg4vparse_src_event (GstBaseParse * parse, GstEvent * event)
{
gboolean handled = FALSE;
GstMpeg4VParse *mp4vparse = GST_MPEG4VIDEO_PARSE (parse);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_UPSTREAM:
{
GstClockTime running_time;
gboolean all_headers;
guint count;
if (!gst_video_event_is_force_key_unit (event))
break;
gst_video_event_parse_upstream_force_key_unit (event,
&running_time, &all_headers, &count);
GST_INFO_OBJECT (mp4vparse, "received upstream force-key-unit event, "
"seqnum %d running_time %" GST_TIME_FORMAT " all_headers %d count %d",
gst_event_get_seqnum (event), GST_TIME_ARGS (running_time),
all_headers, count);
if (!all_headers)
break;
mp4vparse->pending_key_unit_ts = running_time;
gst_event_replace (&mp4vparse->force_key_unit_event, event);
/* leave handled = FALSE so that the event gets propagated upstream */
break;
}
default:
break;
}
return handled;
}

View file

@ -16,28 +16,28 @@
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __MPEG4VIDEOPARSE_H__
#define __MPEG4VIDEOPARSE_H__
#ifndef __MPEG4VIDEO_PARSE_H__
#define __MPEG4VIDEO_PARSE_H__
#include <gst/gst.h>
#include <gst/base/gstbaseparse.h>
#include "mpeg4parse.h"
#include <gst/codecparsers/gstmpeg4parser.h>
G_BEGIN_DECLS
#define GST_TYPE_MPEG4VIDEOPARSE (gst_mpeg4vparse_get_type())
#define GST_MPEG4VIDEOPARSE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),\
GST_TYPE_MPEG4VIDEOPARSE, GstMpeg4VParse))
#define GST_MPEG4VIDEOPARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),\
GST_TYPE_MPEG4VIDEOPARSE, GstMpeg4VParseClass))
#define GST_MPEG4VIDEOPARSE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),\
GST_TYPE_MPEG4VIDEOPARSE, GstMpeg4VParseClass))
#define GST_IS_MPEG4VIDEOPARSE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),\
GST_TYPE_MPEG4VIDEOPARSE))
#define GST_IS_MPEG4VIDEOPARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),\
GST_TYPE_MPEG4VIDEOPARSE))
#define GST_TYPE_MPEG4VIDEO_PARSE (gst_mpeg4vparse_get_type())
#define GST_MPEG4VIDEO_PARSE(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),\
GST_TYPE_MPEG4VIDEO_PARSE, GstMpeg4VParse))
#define GST_MPEG4VIDEO_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),\
GST_TYPE_MPEG4VIDEO_PARSE, GstMpeg4VParseClass))
#define GST_MPEG4VIDEO_PARSE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj),\
GST_TYPE_MPEG4VIDEO_PARSE, GstMpeg4VParseClass))
#define GST_IS_MPEG4VIDEO_PARSE(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),\
GST_TYPE_MPEG4VIDEO_PARSE))
#define GST_IS_MPEG4VIDEO_PARSE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),\
GST_TYPE_MPEG4VIDEO_PARSE))
typedef struct _GstMpeg4VParse GstMpeg4VParse;
typedef struct _GstMpeg4VParseClass GstMpeg4VParseClass;
@ -50,18 +50,21 @@ struct _GstMpeg4VParse {
/* parse state */
gint last_sc;
gint vop_offset;
gint vos_offset;
gint vo_offset;
gboolean vo_found;
gboolean intra_frame;
gboolean update_caps;
GstBuffer *config;
guint8 profile;
MPEG4Params params;
GstMpeg4VideoObjectLayer vol;
gboolean vol_offset;
const gchar *profile;
const gchar *level;
/* properties */
gboolean drop;
guint interval;
GstClockTime pending_key_unit_ts;
GstEvent *force_key_unit_event;
};
struct _GstMpeg4VParseClass {
@ -72,4 +75,4 @@ GType gst_mpeg4vparse_get_type (void);
G_END_DECLS
#endif /* __MPEG4VIDEOPARSE_H__ */
#endif /* __MPEG4VIDEO_PARSE_H__ */

View file

@ -26,6 +26,7 @@
#include "gsth264parse.h"
#include "gstdiracparse.h"
#include "gstmpegvideoparse.h"
#include "gstmpeg4videoparse.h"
static gboolean
plugin_init (GstPlugin * plugin)
@ -40,6 +41,8 @@ plugin_init (GstPlugin * plugin)
GST_RANK_NONE, GST_TYPE_DIRAC_PARSE);
ret |= gst_element_register (plugin, "mpegvideoparse",
GST_RANK_PRIMARY + 1, GST_TYPE_MPEGVIDEO_PARSE);
ret |= gst_element_register (plugin, "mpeg4videoparse",
GST_RANK_PRIMARY + 1, GST_TYPE_MPEG4VIDEO_PARSE);
return ret;
}

View file

@ -7,7 +7,5 @@ Name: GStreamer Bad Plugin libraries
Description: Currently includes the photography interface library
Requires: gstreamer-@GST_MAJORMINOR@ gstreamer-base-@GST_MAJORMINOR@
Version: @VERSION@
Libs: -L${libdir} -lgstphotography-@GST_MAJORMINOR@\
-L${libdir} -lgstcodecparsers-@GST_MAJORMINOR@\
Libs: -L${libdir} -lgstphotography-@GST_MAJORMINOR@
Cflags: -I${includedir}

View file

@ -1 +1 @@
af az bg ca cs da de el en_GB es eu fi fr gl hu id it ja ky lt lv mt nb nl or pl pt_BR ro ru sk sl sq sr sv tr uk vi zh_CN
af az bg ca cs da de el en_GB eo es eu fi fr gl hu id it ja ky lt lv mt nb nl or pl pt_BR ro ru sk sl sq sr sv tr uk vi zh_CN

71
po/eo.po Normal file
View file

@ -0,0 +1,71 @@
# Esperanto translation for gst-plugins-bad.
# Copyright (C) 2011 Free Software Foundation, Inc.
# This file is distributed under the same license as the gst-plugins-bad package.
# Kristjan SCHMIDT <kristjan.schmidt@googlemail.com>, 2011.
#
msgid ""
msgstr ""
"Project-Id-Version: gst-plugins-bad 0.10.21.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
"POT-Creation-Date: 2011-12-11 00:54+0000\n"
"PO-Revision-Date: 2011-06-04 22:18+0200\n"
"Last-Translator: Kristjan SCHMIDT <kristjan.schmidt@googlemail.com>\n"
"Language-Team: Esperanto <translation-team-eo@lists.sourceforge.net>\n"
"Language: eo\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1)\n"
msgid "Could not read title information for DVD."
msgstr "Ne eblis legi la titol-informojn de la DVD."
#, c-format
msgid "Failed to open DVD device '%s'."
msgstr "Fiaskis malfermi la DVD-aparaton \"%s\"."
msgid "Failed to set PGC based seeking."
msgstr "PGC-bazita serĉo fiaskis."
msgid ""
"Could not read DVD. This may be because the DVD is encrypted and a DVD "
"decryption library is not installed."
msgstr ""
"Ne eblis legi la DVD-n. Eble la DVD estas ĉifrita sed biblioteko por "
"malĉifrado ne estas instalite."
msgid "Could not read DVD."
msgstr "Ne eblis legi la DVD-n."
msgid "No file name specified for writing."
msgstr "Neniu dosiernomo estas specifite por skribi."
#, c-format
msgid "Could not open file \"%s\" for writing."
msgstr "Ne eblis malfermi la dosieron \"%s\" por skribi."
msgid "Internal data stream error."
msgstr "Interna datum-flu-eraro."
#, c-format
msgid "Could not write to file \"%s\"."
msgstr "Ne eblis skribi al dosiero \"%s\"."
msgid "Internal data flow error."
msgstr "Interna datum-flu-eraro."
#, c-format
msgid "Device \"%s\" does not exist."
msgstr "Aparato \"%s\" ne ekzistas."
#, fuzzy, c-format
msgid "Could not open frontend device \"%s\"."
msgstr "Ne eblis malfermi la \"Frontend\"-aparaton \"%s\"."
#, fuzzy, c-format
msgid "Could not get settings from frontend device \"%s\"."
msgstr "Ne eblis akiri la agordojn de la \"Frontend\"-aparato \"%s\"."
#, c-format
msgid "Could not open file \"%s\" for reading."
msgstr "Ne eblis malfermi la dosieron \"%s\" por legi."

369
po/es.po
View file

@ -1,21 +1,21 @@
# translation of gst-plugins-bad-0.10.13.2.po to Español
# translation of gst-plugins-bad-0.10.21.2.po to Español
# spanish translation for gst-plugins-bad
# This file is put in the public domain.
# Jorge González <jorgegonz@svn.gnome.org>, 2007, 2008, 2009, 2011.
#
# Jorge González González <aloriel@gmail.com>, 2007, 2008, 2009.
msgid ""
msgstr ""
"Project-Id-Version: gst-plugins-bad 0.10.13.2\n"
"Project-Id-Version: gst-plugins-bad 0.10.21.2\n"
"Report-Msgid-Bugs-To: http://bugzilla.gnome.org/\n"
"POT-Creation-Date: 2010-10-19 23:33+0100\n"
"PO-Revision-Date: 2009-09-29 23:12+0200\n"
"POT-Creation-Date: 2011-12-11 00:54+0000\n"
"PO-Revision-Date: 2011-10-02 15:47+0200\n"
"Last-Translator: Jorge González González <aloriel@gmail.com>\n"
"Language-Team: Spanish <es@li.org>\n"
"Language: es\n"
"MIME-Version: 1.0\n"
"Content-Type: text/plain; charset=UTF-8\n"
"Content-Transfer-Encoding: 8bit\n"
"Plural-Forms: nplurals=2; plural=(n != 1);\n"
"Plural-Forms: nplurals=2; plural=(n!=1);\n"
msgid "Could not read title information for DVD."
msgstr "No se pudo leer la información del título para el DVD."
@ -31,10 +31,11 @@ msgid ""
"Could not read DVD. This may be because the DVD is encrypted and a DVD "
"decryption library is not installed."
msgstr ""
"No se pudo leer el DVD. Puede ser debido a que el DVD está cifrado y la "
"biblioteca de descifrado del DVD no está instalada."
#, fuzzy
msgid "Could not read DVD."
msgstr "No se pudo leer la información del título para el DVD."
msgstr "No se pudo leer el DVD."
msgid "No file name specified for writing."
msgstr "No se especificó un nombre de archivo para su escritura."
@ -69,11 +70,359 @@ msgstr "No se pudieron obtener los ajustes del dispositivo frontend «%s»."
msgid "Could not open file \"%s\" for reading."
msgstr "No se pudo abrir el archivo «%s» para leer."
#~ msgid "Internal clock error."
#~ msgstr "Error en el reloj interno."
#~ msgid "Could not open audio device for mixer control handling."
#~ msgstr ""
#~ "No se pudo abrir el dispositivo de sonido para manejar el control del "
#~ "mezclador."
#~ msgid ""
#~ "Could not open audio device for mixer control handling. This version of "
#~ "the Open Sound System is not supported by this element."
#~ msgstr ""
#~ "No se pudo abrir el dispositivo para manejar el control del mezclador. "
#~ "Este elemento no soporta esta versión del Open Sound System."
#~ msgid "Volume"
#~ msgstr "Volumen"
#~ msgid "Master"
#~ msgstr "Maestro"
#~ msgid "Front"
#~ msgstr "Frontal"
#~ msgid "Rear"
#~ msgstr "Trasero"
#~ msgid "Headphones"
#~ msgstr "Auriculares"
#~ msgid "Center"
#~ msgstr "Centrado"
#~ msgid "LFE"
#~ msgstr "LFE"
#~ msgid "Surround"
#~ msgstr "Sonido envolvente"
#~ msgid "Side"
#~ msgstr "Lateral"
#~ msgid "Built-in Speaker"
#~ msgstr "Altavoz integrado"
#~ msgid "AUX 1 Out"
#~ msgstr "Salida auxiliar 1"
#~ msgid "AUX 2 Out"
#~ msgstr "Salida auxiliar 2"
#~ msgid "AUX Out"
#~ msgstr "Salida auxiliar"
#~ msgid "Bass"
#~ msgstr "Bajos"
#~ msgid "Treble"
#~ msgstr "Agudos"
#~ msgid "3D Depth"
#~ msgstr "Profundidad 3D"
#~ msgid "3D Center"
#~ msgstr "Centro 3D"
#~ msgid "3D Enhance"
#~ msgstr "Mejora 3D"
#~ msgid "Telephone"
#~ msgstr "Teléfono"
#~ msgid "Microphone"
#~ msgstr "Micrófono"
#~ msgid "Line Out"
#~ msgstr "Línea de salida"
#~ msgid "Line In"
#~ msgstr "Línea de entrada"
#~ msgid "Internal CD"
#~ msgstr "CD interno"
#~ msgid "Video In"
#~ msgstr "Entrada de vídeo"
#~ msgid "AUX 1 In"
#~ msgstr "Entrada auxiliar 1"
#~ msgid "AUX 2 In"
#~ msgstr "Entrada auxiliar 2"
#~ msgid "AUX In"
#~ msgstr "Entrada auxiliar"
#~ msgid "PCM"
#~ msgstr "PCM"
#~ msgid "Record Gain"
#~ msgstr "Ganancia de grabación"
#~ msgid "Output Gain"
#~ msgstr "Salida de grabación"
#~ msgid "Microphone Boost"
#~ msgstr "Aumento del micrófono"
#~ msgid "Loopback"
#~ msgstr "Bucle local"
#~ msgid "Diagnostic"
#~ msgstr "Diagnóstico"
#~ msgid "Bass Boost"
#~ msgstr "Aumento de bajos"
#~ msgid "Playback Ports"
#~ msgstr "Puertos de reproducción"
#~ msgid "Input"
#~ msgstr "Entrada"
#~ msgid "Record Source"
#~ msgstr "Origen de la grabación"
#~ msgid "Monitor Source"
#~ msgstr "Origen del monitor"
#~ msgid "Keyboard Beep"
#~ msgstr "Pitido del teclado"
#~ msgid "Monitor"
#~ msgstr "Monitor"
#~ msgid "Simulate Stereo"
#~ msgstr "Simular estéreo"
#~ msgid "Stereo"
#~ msgstr "Estéreo"
#~ msgid "Surround Sound"
#~ msgstr "Sonido envolvente"
#~ msgid "Microphone Gain"
#~ msgstr "Ganancia del micrófono"
#~ msgid "Speaker Source"
#~ msgstr "Origen de los altavoces"
#~ msgid "Microphone Source"
#~ msgstr "Origen del micrófono"
#~ msgid "Jack"
#~ msgstr "Jack"
#~ msgid "Center / LFE"
#~ msgstr "Centrado / LFE"
#~ msgid "Stereo Mix"
#~ msgstr "Mezclador estéreo"
#~ msgid "Mono Mix"
#~ msgstr "Mezclador mono"
#~ msgid "Input Mix"
#~ msgstr "Mezclador de entrada"
#~ msgid "SPDIF In"
#~ msgstr "Entrada S/PDIF"
#~ msgid "SPDIF Out"
#~ msgstr "Salida S/PDIF"
#~ msgid "Microphone 1"
#~ msgstr "Micrófono 1"
#~ msgid "Microphone 2"
#~ msgstr "Micrófono 2"
#~ msgid "Digital Out"
#~ msgstr "Salida digital"
#~ msgid "Digital In"
#~ msgstr "Entrada digital"
#~ msgid "HDMI"
#~ msgstr "HDMI"
#~ msgid "Modem"
#~ msgstr "Módem"
#~ msgid "Handset"
#~ msgstr "Auriculares"
#~ msgid "Other"
#~ msgstr "Otro"
#~ msgid "None"
#~ msgstr "Ninguno"
#~ msgid "On"
#~ msgstr "Encendido"
#~ msgid "Off"
#~ msgstr "Apagado"
#~ msgid "Mute"
#~ msgstr "Silenciar"
#~ msgid "Fast"
#~ msgstr "Rápido"
#~ msgid "Very Low"
#~ msgstr "Muy bajo"
#~ msgid "Low"
#~ msgstr "Bajo"
#~ msgid "Medium"
#~ msgstr "Medio"
#~ msgid "High"
#~ msgstr "Alto"
#~ msgid "Very High"
#~ msgstr "Muy alto"
#~ msgid "Production"
#~ msgstr "Producción"
#~ msgid "Front Panel Microphone"
#~ msgstr "Micrófono del panel frontal"
#~ msgid "Front Panel Line In"
#~ msgstr "Línea de entrada del panel frontal"
#~ msgid "Front Panel Headphones"
#~ msgstr "Auriculares del panel frontal"
#~ msgid "Front Panel Line Out"
#~ msgstr "Línea de salida del panel frontal"
#~ msgid "Green Connector"
#~ msgstr "Conector verde"
#~ msgid "Pink Connector"
#~ msgstr "Conector rosa"
#~ msgid "Blue Connector"
#~ msgstr "Conector azul"
#~ msgid "White Connector"
#~ msgstr "Conector blanco"
#~ msgid "Black Connector"
#~ msgstr "Conector negro"
#~ msgid "Gray Connector"
#~ msgstr "Conector gris"
#~ msgid "Orange Connector"
#~ msgstr "Conector naranja"
#~ msgid "Red Connector"
#~ msgstr "Conector rojo"
#~ msgid "Yellow Connector"
#~ msgstr "Conector amarillo"
#~ msgid "Green Front Panel Connector"
#~ msgstr "Conector verde del panel frontal"
#~ msgid "Pink Front Panel Connector"
#~ msgstr "Conector rosa del panel frontal"
#~ msgid "Blue Front Panel Connector"
#~ msgstr "Conector azul del panel frontal"
#~ msgid "White Front Panel Connector"
#~ msgstr "Conector blanco del panel frontal"
#~ msgid "Black Front Panel Connector"
#~ msgstr "Conector negro del panel frontal"
#~ msgid "Gray Front Panel Connector"
#~ msgstr "Conector gris del panel frontal"
#~ msgid "Orange Front Panel Connector"
#~ msgstr "Conector naranja del panel frontal"
#~ msgid "Red Front Panel Connector"
#~ msgstr "Conector rojo del panel frontal"
#~ msgid "Yellow Front Panel Connector"
#~ msgstr "Conector amarillo del panel frontal"
#~ msgid "Spread Output"
#~ msgstr "Expandir salida"
#~ msgid "Downmix"
#~ msgstr "Reducción de canales"
#~ msgid "Virtual Mixer Input"
#~ msgstr "Entrada del mezclador virtual"
#~ msgid "Virtual Mixer Output"
#~ msgstr "Salida del mezclador virtual"
#~ msgid "Virtual Mixer Channels"
#~ msgstr "Canales del mezclador virtual"
#~ msgid "%s Function"
#~ msgstr "Función %s"
#~ msgid "%s %d"
#~ msgstr "%s %d"
#~ msgid "Internal clock error."
#~ msgstr "Error en el reloj interno."
#~ msgid ""
#~ "Could not open audio device for playback. Device is being used by another "
#~ "application."
#~ msgstr ""
#~ "No se pudo abrir el dispositivo de sonido para reproducir. Otra "
#~ "aplicación está usando el dispositivo."
#~ msgid ""
#~ "Could not open audio device for playback. You don't have permission to "
#~ "open the device."
#~ msgstr ""
#~ "No se pudo abrir el dispositivo de sonido para reproducir. No tiene "
#~ "permiso para abrir el dispositivo."
#~ msgid "Could not open audio device for playback."
#~ msgstr "No se pudo abrir el dispositivo de sonido para reproducción."
#~ msgid ""
#~ "Could not open audio device for playback. This version of the Open Sound "
#~ "System is not supported by this element."
#~ msgstr ""
#~ "No se pudo abrir el dispositivo para reproducir. Este elemento no soporta "
#~ "esta versión del Open Sound System."
#~ msgid "Playback is not supported by this audio device."
#~ msgstr "Este dispositivo de sonido no soporta reproducción."
#~ msgid "Audio playback error."
#~ msgstr "Error en la reproducción del sonido."
#~ msgid "Recording is not supported by this audio device."
#~ msgstr "Este dispositivo de sonido no soporta grabación."
#~ msgid "Error recording from audio device."
#~ msgstr "Error al grabar desde el dispositivo de sonido."
#~ msgid "PCM 1"
#~ msgstr "PCM 1"

Some files were not shown because too many files have changed in this diff Show more