Merge branch 'master' into 0.11

Conflicts:
	common
	configure.ac
	gst/colorspace/colorspace.c
	gst/colorspace/colorspace.h
	gst/colorspace/gstcolorspace.c
This commit is contained in:
Wim Taymans 2011-08-04 09:36:07 +02:00
commit 01b9b5002f
168 changed files with 13949 additions and 3425 deletions

1
.gitignore vendored
View file

@ -49,3 +49,4 @@ gst*orc.h
/tests/check/orc /tests/check/orc
/tests/examples/shapewipe/shapewipe-example /tests/examples/shapewipe/shapewipe-example
/tests/examples/jack/jack_client /tests/examples/jack/jack_client
/tests/examples/opencv/gstmotioncells_dynamic_test

2
common

@ -1 +1 @@
Subproject commit 50b34abb468b6572a92f6700552f6f541c655be8 Subproject commit 605cd9a65ed61505f24b840d3fe8e252be72b151

View file

@ -339,6 +339,7 @@ AG_GST_CHECK_PLUGIN(h264parse)
AG_GST_CHECK_PLUGIN(hdvparse) AG_GST_CHECK_PLUGIN(hdvparse)
AG_GST_CHECK_PLUGIN(hls) AG_GST_CHECK_PLUGIN(hls)
AG_GST_CHECK_PLUGIN(id3tag) AG_GST_CHECK_PLUGIN(id3tag)
AG_GST_CHECK_PLUGIN(inter)
AG_GST_CHECK_PLUGIN(interlace) AG_GST_CHECK_PLUGIN(interlace)
AG_GST_CHECK_PLUGIN(ivfparse) AG_GST_CHECK_PLUGIN(ivfparse)
AG_GST_CHECK_PLUGIN(jp2kdecimator) AG_GST_CHECK_PLUGIN(jp2kdecimator)
@ -480,16 +481,15 @@ AG_GST_CHECK_FEATURE(DIRECT3D, [Direct3D plug-in], direct3dsink, [
save_LIBS="$LIBS" save_LIBS="$LIBS"
CFLAGS="$CFLAGS $DIRECTX_CFLAGS" CFLAGS="$CFLAGS $DIRECTX_CFLAGS"
LDFLAGS="$LDFLAGS $DIRECTX_LDFLAGS" LDFLAGS="$LDFLAGS $DIRECTX_LDFLAGS"
LIBS="$LIBS -ld3d -lgdi32" LIBS="$LIBS -ld3d9 -lgdi32"
AC_MSG_CHECKING(for Direct3D LDFLAGS) AC_MSG_CHECKING(for Direct3D LDFLAGS)
AC_LINK_IFELSE([ AC_LINK_IFELSE([
#include <windows.h> #include <windows.h>
#include <d3d.h> #include <d3d9.h>
int main () int main ()
{ {
GetStockObject(0); Direct3DCreate9(D3D_SDK_VERSION);
Direct3DCreate(NULL, NULL, NULL);
return 0; return 0;
} }
@ -502,8 +502,7 @@ int main ()
LIBS=$save_LIBS LIBS=$save_LIBS
if test "x$HAVE_DIRECT3D" = "xyes"; then if test "x$HAVE_DIRECT3D" = "xyes"; then
dnl this is much more than we want DIRECT3D_LIBS="-lgdi32"
DIRECT3D_LIBS="-ld3d -ldxguid -lgdi32"
AC_SUBST(DIRECT3D_LIBS) AC_SUBST(DIRECT3D_LIBS)
fi fi
AC_SUBST(HAVE_DIRECT3D) AC_SUBST(HAVE_DIRECT3D)
@ -1403,6 +1402,19 @@ AG_GST_CHECK_FEATURE(OPENCV, [opencv plugins], opencv, [
AC_SUBST(OPENCV_LIBS) AC_SUBST(OPENCV_LIBS)
]) ])
dnl *** Opus ***
translit(dnm, m, l) AM_CONDITIONAL(USE_OPUS, true)
AG_GST_CHECK_FEATURE(OPUS, [opus], opus, [
PKG_CHECK_MODULES(OPUS, opus >= 0.9.4, [
AC_DEFINE([HAVE_OPUS], 1, [Define if Opus >= 0.9.4 is installed])
HAVE_OPUS="yes"
], [
HAVE_OPUS="no"
])
AC_SUBST(OPUS_CFLAGS)
AC_SUBST(OPUS_LIBS)
])
dnl *** rsvg *** dnl *** rsvg ***
translit(dnm, m, l) AM_CONDITIONAL(USE_RSVG, true) translit(dnm, m, l) AM_CONDITIONAL(USE_RSVG, true)
AG_GST_CHECK_FEATURE(RSVG, [rsvg decoder], rsvg, [ AG_GST_CHECK_FEATURE(RSVG, [rsvg decoder], rsvg, [
@ -1605,7 +1617,9 @@ translit(dnm, m, l) AM_CONDITIONAL(USE_WININET, true)
AG_GST_CHECK_FEATURE(WININET, [Windows internet library], wininet, [ AG_GST_CHECK_FEATURE(WININET, [Windows internet library], wininet, [
AC_MSG_CHECKING([Checking for windows internet support]) AC_MSG_CHECKING([Checking for windows internet support])
AC_CHECK_HEADERS([windows.h wininet.h], AC_CHECK_HEADERS([windows.h wininet.h],
[HAVE_WININET="yes"], [HAVE_WININET="no"]) [HAVE_WININET="yes"], [HAVE_WININET="no"],
[AC_INCLUDES_DEFAULT
#include <windows.h>])
]) ])
dnl *** acm *** dnl *** acm ***
@ -1766,6 +1780,7 @@ AM_CONDITIONAL(USE_NEON, false)
AM_CONDITIONAL(USE_OFA, false) AM_CONDITIONAL(USE_OFA, false)
AM_CONDITIONAL(USE_OPENAL, false) AM_CONDITIONAL(USE_OPENAL, false)
AM_CONDITIONAL(USE_OPENCV, false) AM_CONDITIONAL(USE_OPENCV, false)
AM_CONDITIONAL(USE_OPUS, false)
AM_CONDITIONAL(USE_RSVG, false) AM_CONDITIONAL(USE_RSVG, false)
AM_CONDITIONAL(USE_TIMIDITY, false) AM_CONDITIONAL(USE_TIMIDITY, false)
AM_CONDITIONAL(USE_WILDMIDI, false) AM_CONDITIONAL(USE_WILDMIDI, false)
@ -1890,6 +1905,7 @@ gst/h264parse/Makefile
gst/hdvparse/Makefile gst/hdvparse/Makefile
gst/hls/Makefile gst/hls/Makefile
gst/id3tag/Makefile gst/id3tag/Makefile
gst/inter/Makefile
gst/interlace/Makefile gst/interlace/Makefile
gst/ivfparse/Makefile gst/ivfparse/Makefile
gst/jp2kdecimator/Makefile gst/jp2kdecimator/Makefile
@ -1972,6 +1988,7 @@ tests/examples/camerabin2/Makefile
tests/examples/directfb/Makefile tests/examples/directfb/Makefile
tests/examples/mxf/Makefile tests/examples/mxf/Makefile
tests/examples/scaletempo/Makefile tests/examples/scaletempo/Makefile
tests/examples/opencv/Makefile
tests/icles/Makefile tests/icles/Makefile
ext/voamrwbenc/Makefile ext/voamrwbenc/Makefile
ext/voaacenc/Makefile ext/voaacenc/Makefile
@ -2009,6 +2026,7 @@ ext/neon/Makefile
ext/ofa/Makefile ext/ofa/Makefile
ext/openal/Makefile ext/openal/Makefile
ext/opencv/Makefile ext/opencv/Makefile
ext/opus/Makefile
ext/rsvg/Makefile ext/rsvg/Makefile
ext/resindvd/Makefile ext/resindvd/Makefile
ext/rtmp/Makefile ext/rtmp/Makefile

View file

@ -139,6 +139,7 @@ EXTRA_HFILES = \
$(top_srcdir)/gst/audiovisualizers/gstsynaescope.h \ $(top_srcdir)/gst/audiovisualizers/gstsynaescope.h \
$(top_srcdir)/gst/audiovisualizers/gstwavescope.h \ $(top_srcdir)/gst/audiovisualizers/gstwavescope.h \
$(top_srcdir)/gst/camerabin/gstcamerabin.h \ $(top_srcdir)/gst/camerabin/gstcamerabin.h \
$(top_srcdir)/gst/camerabin2/gstcamerabin2.h \
$(top_srcdir)/gst/coloreffects/gstcoloreffects.h \ $(top_srcdir)/gst/coloreffects/gstcoloreffects.h \
$(top_srcdir)/gst/dataurisrc/gstdataurisrc.h \ $(top_srcdir)/gst/dataurisrc/gstdataurisrc.h \
$(top_srcdir)/gst/dccp/gstdccpclientsink.h \ $(top_srcdir)/gst/dccp/gstdccpclientsink.h \

View file

@ -24,6 +24,7 @@
<xi:include href="xml/element-bulge.xml" /> <xi:include href="xml/element-bulge.xml" />
<xi:include href="xml/element-burn.xml" /> <xi:include href="xml/element-burn.xml" />
<xi:include href="xml/element-camerabin.xml" /> <xi:include href="xml/element-camerabin.xml" />
<xi:include href="xml/element-camerabin2.xml" />
<xi:include href="xml/element-celtdec.xml" /> <xi:include href="xml/element-celtdec.xml" />
<xi:include href="xml/element-celtenc.xml" /> <xi:include href="xml/element-celtenc.xml" />
<xi:include href="xml/element-chromium.xml" /> <xi:include href="xml/element-chromium.xml" />
@ -89,7 +90,9 @@
<xi:include href="xml/element-pyramidsegment.xml" /> <xi:include href="xml/element-pyramidsegment.xml" />
<xi:include href="xml/element-rsvgdec.xml" /> <xi:include href="xml/element-rsvgdec.xml" />
<xi:include href="xml/element-rsvgoverlay.xml" /> <xi:include href="xml/element-rsvgoverlay.xml" />
<xi:include href="xml/element-rtpdtmfdepay.xml" /> <xi:include href="xml/element-rtmpsink.xml" />
<xi:include href="xml/element-rtmpsrc.xml" />
<xi:include href="xml/element-rtpmux.xml" />
<xi:include href="xml/element-rtpdtmfsrc.xml" /> <xi:include href="xml/element-rtpdtmfsrc.xml" />
<xi:include href="xml/element-rtpdtmfmux.xml" /> <xi:include href="xml/element-rtpdtmfmux.xml" />
<xi:include href="xml/element-rtpmux.xml" /> <xi:include href="xml/element-rtpmux.xml" />
@ -140,6 +143,7 @@
<xi:include href="xml/plugin-bayer.xml" /> <xi:include href="xml/plugin-bayer.xml" />
<xi:include href="xml/plugin-bz2.xml" /> <xi:include href="xml/plugin-bz2.xml" />
<xi:include href="xml/plugin-camerabin.xml" /> <xi:include href="xml/plugin-camerabin.xml" />
<xi:include href="xml/plugin-camerabin2.xml" />
<xi:include href="xml/plugin-cdaudio.xml" /> <xi:include href="xml/plugin-cdaudio.xml" />
<xi:include href="xml/plugin-cdxaparse.xml" /> <xi:include href="xml/plugin-cdxaparse.xml" />
<xi:include href="xml/plugin-celt.xml" /> <xi:include href="xml/plugin-celt.xml" />
@ -196,6 +200,7 @@
<xi:include href="xml/plugin-real.xml" /> <xi:include href="xml/plugin-real.xml" />
<xi:include href="xml/plugin-rfbsrc.xml" /> <xi:include href="xml/plugin-rfbsrc.xml" />
<xi:include href="xml/plugin-rsvg.xml" /> <xi:include href="xml/plugin-rsvg.xml" />
<xi:include href="xml/plugin-rtmp.xml" />
<xi:include href="xml/plugin-rtpmux.xml" /> <xi:include href="xml/plugin-rtpmux.xml" />
<xi:include href="xml/plugin-scaletempo.xml" /> <xi:include href="xml/plugin-scaletempo.xml" />
<xi:include href="xml/plugin-sdl.xml" /> <xi:include href="xml/plugin-sdl.xml" />

View file

@ -27,34 +27,6 @@ GstAiffParseState
gst_aiff_parse_get_type gst_aiff_parse_get_type
</SECTION> </SECTION>
<SECTION>
<FILE>element-voaacenc</FILE>
<TITLE>voaacenc</TITLE>
GstVoAacEnc
<SUBSECTION Standard>
GstVoAacEncClass
GST_VOAACENC
GST_VOAACENC_CLASS
GST_IS_VOAACENC
GST_IS_VOAACENC_CLASS
GST_TYPE_VOAACENC
gst_voaacenc_get_type
</SECTION>
<SECTION>
<FILE>element-voamrwbenc</FILE>
<TITLE>voamrwbenc</TITLE>
GstVoAmrwbEnc
<SUBSECTION Standard>
GstVoAmrwbEncClass
GST_VOAMRWBENC
GST_VOAMRWBENC_CLASS
GST_IS_VOAMRWBENC
GST_IS_VOAMRWBENC_CLASS
GST_TYPE_VOAMRWBENC
gst_voamrwbenc_get_type
</SECTION>
<SECTION> <SECTION>
<FILE>element-assrender</FILE> <FILE>element-assrender</FILE>
<TITLE>assrender</TITLE> <TITLE>assrender</TITLE>
@ -143,6 +115,20 @@ GST_IS_CAMERABIN_CLASS
gst_camerabin_get_type gst_camerabin_get_type
</SECTION> </SECTION>
<SECTION>
<FILE>element-camerabin2</FILE>
<TITLE>camerabin2</TITLE>
GstCameraBin2
<SUBSECTION Standard>
GstCameraBin2Class
GST_CAMERABIN2
GST_IS_CAMERABIN2
GST_TYPE_CAMERABIN2
GST_CAMERABIN2_CLASS
GST_IS_CAMERABIN2_CLASS
gst_camerabin2_get_type
</SECTION>
<SECTION> <SECTION>
<FILE>element-celtdec</FILE> <FILE>element-celtdec</FILE>
<TITLE>celtdec</TITLE> <TITLE>celtdec</TITLE>
@ -1160,6 +1146,34 @@ GST_TYPE_RSVG_DEC
gst_rsvg_dec_get_type gst_rsvg_dec_get_type
</SECTION> </SECTION>
<SECTION>
<FILE>element-rtmpsink</FILE>
<TITLE>rtmpsink</TITLE>
GstRTMPSink
<SUBSECTION Standard>
GstRTMPSinkClass
GST_RTMP_SINK
GST_IS_RTMP_SINK
GST_TYPE_RTMP_SINK
gst_rtmp_sink_get_type
GST_RTMP_SINK_CLASS
GST_IS_RTMP_SINK_CLASS
</SECTION>
<SECTION>
<FILE>element-rtmpsrc</FILE>
<TITLE>rtmpsrc</TITLE>
GstRTMPSrc
<SUBSECTION Standard>
GstRTMPSrcClass
GST_RTMP_SRC
GST_IS_RTMP_SRC
GST_TYPE_RTMP_SRC
gst_rtmp_src_get_type
GST_RTMP_SRC_CLASS
GST_IS_RTMP_SRC_CLASS
</SECTION>
<SECTION> <SECTION>
<FILE>element-rtpdtmfdepay</FILE> <FILE>element-rtpdtmfdepay</FILE>
<TITLE>rtpdtmfdepay</TITLE> <TITLE>rtpdtmfdepay</TITLE>
@ -1639,6 +1653,34 @@ GST_IS_WILDMIDI_CLASS
GST_TYPE_WILDMIDI GST_TYPE_WILDMIDI
</SECTION> </SECTION>
<SECTION>
<FILE>element-voaacenc</FILE>
<TITLE>voaacenc</TITLE>
GstVoAacEnc
<SUBSECTION Standard>
GstVoAacEncClass
GST_VOAACENC
GST_VOAACENC_CLASS
GST_IS_VOAACENC
GST_IS_VOAACENC_CLASS
GST_TYPE_VOAACENC
gst_voaacenc_get_type
</SECTION>
<SECTION>
<FILE>element-voamrwbenc</FILE>
<TITLE>voamrwbenc</TITLE>
GstVoAmrwbEnc
<SUBSECTION Standard>
GstVoAmrwbEncClass
GST_VOAMRWBENC
GST_VOAMRWBENC_CLASS
GST_IS_VOAMRWBENC
GST_IS_VOAMRWBENC_CLASS
GST_TYPE_VOAMRWBENC
gst_voamrwbenc_get_type
</SECTION>
<SECTION> <SECTION>
<FILE>element-vp8dec</FILE> <FILE>element-vp8dec</FILE>
<TITLE>vp8dec</TITLE> <TITLE>vp8dec</TITLE>

View file

@ -1701,7 +1701,7 @@
<ARG> <ARG>
<NAME>GstDvbSrc::diseqc-source</NAME> <NAME>GstDvbSrc::diseqc-source</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>[-1,7]</RANGE> <RANGE>[G_MAXULONG,7]</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>diseqc source</NICK> <NICK>diseqc source</NICK>
<BLURB>DISEqC selected source (-1 disabled) (DVB-S).</BLURB> <BLURB>DISEqC selected source (-1 disabled) (DVB-S).</BLURB>
@ -17155,7 +17155,7 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Path where to search for RealPlayer codecs</NICK> <NICK>Path where to search for RealPlayer codecs</NICK>
<BLURB>Path where to search for RealPlayer codecs.</BLURB> <BLURB>Path where to search for RealPlayer codecs.</BLURB>
<DEFAULT>"/usr/lib/win32:/usr/lib/codecs:/usr/local/RealPlayer/codecs:/usr/local/lib/win32:/usr/local/lib/codecs"</DEFAULT> <DEFAULT>"/usr/lib64/win32:/usr/lib64/codecs:/usr/local/lib64/win32:/usr/local/lib64/codecs"</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -17195,7 +17195,7 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Path where to search for RealPlayer codecs</NICK> <NICK>Path where to search for RealPlayer codecs</NICK>
<BLURB>Path where to search for RealPlayer codecs.</BLURB> <BLURB>Path where to search for RealPlayer codecs.</BLURB>
<DEFAULT>"/usr/lib/win32:/usr/lib/codecs:/usr/local/RealPlayer/codecs:/usr/local/lib/win32:/usr/local/lib/codecs"</DEFAULT> <DEFAULT>"/usr/lib64/win32:/usr/lib64/codecs:/usr/local/lib64/win32:/usr/local/lib64/codecs"</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -17851,7 +17851,7 @@
<ARG> <ARG>
<NAME>DvbBaseBin::diseqc-source</NAME> <NAME>DvbBaseBin::diseqc-source</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>[-1,7]</RANGE> <RANGE>[G_MAXULONG,7]</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>diseqc source</NICK> <NICK>diseqc source</NICK>
<BLURB>DISEqC selected source (-1 disabled) (DVB-S).</BLURB> <BLURB>DISEqC selected source (-1 disabled) (DVB-S).</BLURB>
@ -22026,7 +22026,7 @@
<ARG> <ARG>
<NAME>GstDCCPClientSrc::sockfd</NAME> <NAME>GstDCCPClientSrc::sockfd</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Socket fd</NICK> <NICK>Socket fd</NICK>
<BLURB>The socket file descriptor.</BLURB> <BLURB>The socket file descriptor.</BLURB>
@ -22066,7 +22066,7 @@
<ARG> <ARG>
<NAME>GstDCCPServerSink::sockfd</NAME> <NAME>GstDCCPServerSink::sockfd</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Socket fd</NICK> <NICK>Socket fd</NICK>
<BLURB>The client socket file descriptor.</BLURB> <BLURB>The client socket file descriptor.</BLURB>
@ -22126,7 +22126,7 @@
<ARG> <ARG>
<NAME>GstDCCPClientSink::sockfd</NAME> <NAME>GstDCCPClientSink::sockfd</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Socket fd</NICK> <NICK>Socket fd</NICK>
<BLURB>The socket file descriptor.</BLURB> <BLURB>The socket file descriptor.</BLURB>
@ -22186,7 +22186,7 @@
<ARG> <ARG>
<NAME>GstDCCPServerSrc::sockfd</NAME> <NAME>GstDCCPServerSrc::sockfd</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Socket fd</NICK> <NICK>Socket fd</NICK>
<BLURB>The client socket file descriptor.</BLURB> <BLURB>The client socket file descriptor.</BLURB>
@ -22246,7 +22246,7 @@
<ARG> <ARG>
<NAME>GstMpegTSDemux::program-number</NAME> <NAME>GstMpegTSDemux::program-number</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Program Number</NICK> <NICK>Program Number</NICK>
<BLURB>Program number to demux for (-1 to ignore).</BLURB> <BLURB>Program number to demux for (-1 to ignore).</BLURB>
@ -22306,7 +22306,7 @@
<ARG> <ARG>
<NAME>GstPcapParse::dst-port</NAME> <NAME>GstPcapParse::dst-port</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>[-1,65535]</RANGE> <RANGE>[G_MAXULONG,65535]</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Destination port</NICK> <NICK>Destination port</NICK>
<BLURB>Destination port to restrict to.</BLURB> <BLURB>Destination port to restrict to.</BLURB>
@ -22326,7 +22326,7 @@
<ARG> <ARG>
<NAME>GstPcapParse::src-port</NAME> <NAME>GstPcapParse::src-port</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>[-1,65535]</RANGE> <RANGE>[G_MAXULONG,65535]</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Source port</NICK> <NICK>Source port</NICK>
<BLURB>Source port to restrict to.</BLURB> <BLURB>Source port to restrict to.</BLURB>
@ -23356,7 +23356,7 @@
<ARG> <ARG>
<NAME>GstRTPDTMFSrc::seqnum-offset</NAME> <NAME>GstRTPDTMFSrc::seqnum-offset</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Sequence number Offset</NICK> <NICK>Sequence number Offset</NICK>
<BLURB>Offset to add to all outgoing seqnum (-1 = random).</BLURB> <BLURB>Offset to add to all outgoing seqnum (-1 = random).</BLURB>
@ -23386,7 +23386,7 @@
<ARG> <ARG>
<NAME>GstRTPDTMFSrc::timestamp-offset</NAME> <NAME>GstRTPDTMFSrc::timestamp-offset</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Timestamp Offset</NICK> <NICK>Timestamp Offset</NICK>
<BLURB>Offset to add to all outgoing timestamps (-1 = random).</BLURB> <BLURB>Offset to add to all outgoing timestamps (-1 = random).</BLURB>
@ -23436,7 +23436,7 @@
<ARG> <ARG>
<NAME>GstRTPMux::seqnum-offset</NAME> <NAME>GstRTPMux::seqnum-offset</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Sequence number Offset</NICK> <NICK>Sequence number Offset</NICK>
<BLURB>Offset to add to all outgoing seqnum (-1 = random).</BLURB> <BLURB>Offset to add to all outgoing seqnum (-1 = random).</BLURB>
@ -23456,7 +23456,7 @@
<ARG> <ARG>
<NAME>GstRTPMux::timestamp-offset</NAME> <NAME>GstRTPMux::timestamp-offset</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Timestamp Offset</NICK> <NICK>Timestamp Offset</NICK>
<BLURB>Offset to add to all outgoing timestamps (-1 = random).</BLURB> <BLURB>Offset to add to all outgoing timestamps (-1 = random).</BLURB>
@ -27930,7 +27930,7 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>bitrate</NICK> <NICK>bitrate</NICK>
<BLURB>bitrate.</BLURB> <BLURB>bitrate.</BLURB>
<DEFAULT>0</DEFAULT> <DEFAULT>13824000</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -28030,7 +28030,7 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>enable_multiquant</NICK> <NICK>enable_multiquant</NICK>
<BLURB>enable_multiquant.</BLURB> <BLURB>enable_multiquant.</BLURB>
<DEFAULT>FALSE</DEFAULT> <DEFAULT>TRUE</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -28116,11 +28116,11 @@
<ARG> <ARG>
<NAME>GstSchroEnc::horiz-slices</NAME> <NAME>GstSchroEnc::horiz-slices</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= 0</RANGE> <RANGE>>= 1</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>horiz_slices</NICK> <NICK>horiz_slices</NICK>
<BLURB>horiz_slices.</BLURB> <BLURB>horiz_slices.</BLURB>
<DEFAULT>0</DEFAULT> <DEFAULT>8</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -28130,7 +28130,7 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>inter_wavelet</NICK> <NICK>inter_wavelet</NICK>
<BLURB>inter_wavelet.</BLURB> <BLURB>inter_wavelet.</BLURB>
<DEFAULT>desl_dubuc_9_7</DEFAULT> <DEFAULT>le_gall_5_3</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -28220,7 +28220,7 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>magic_chroma_lambda_scale</NICK> <NICK>magic_chroma_lambda_scale</NICK>
<BLURB>magic_chroma_lambda_scale.</BLURB> <BLURB>magic_chroma_lambda_scale.</BLURB>
<DEFAULT>0.1</DEFAULT> <DEFAULT>0.01</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -28320,7 +28320,7 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>magic_scene_change_threshold</NICK> <NICK>magic_scene_change_threshold</NICK>
<BLURB>magic_scene_change_threshold.</BLURB> <BLURB>magic_scene_change_threshold.</BLURB>
<DEFAULT>3</DEFAULT> <DEFAULT>0.2</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -28490,17 +28490,17 @@
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>transform_depth</NICK> <NICK>transform_depth</NICK>
<BLURB>transform_depth.</BLURB> <BLURB>transform_depth.</BLURB>
<DEFAULT>3</DEFAULT> <DEFAULT>4</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
<NAME>GstSchroEnc::vert-slices</NAME> <NAME>GstSchroEnc::vert-slices</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= 0</RANGE> <RANGE>>= 1</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>vert_slices</NICK> <NICK>vert_slices</NICK>
<BLURB>vert_slices.</BLURB> <BLURB>vert_slices.</BLURB>
<DEFAULT>0</DEFAULT> <DEFAULT>6</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -46096,7 +46096,7 @@
<ARG> <ARG>
<NAME>GstVideoMaxRate::average-period</NAME> <NAME>GstVideoMaxRate::average-period</NAME>
<TYPE>guint64</TYPE> <TYPE>guint64</TYPE>
<RANGE>[1,G_MAXINT64]</RANGE> <RANGE>[1,G_MAXLONG]</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Period over which to average</NICK> <NICK>Period over which to average</NICK>
<BLURB>Period over which to average the framerate (in ns).</BLURB> <BLURB>Period over which to average the framerate (in ns).</BLURB>
@ -46946,7 +46946,7 @@
<ARG> <ARG>
<NAME>GstJP2kDecimator::max-decomposition-levels</NAME> <NAME>GstJP2kDecimator::max-decomposition-levels</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>[-1,32]</RANGE> <RANGE>[G_MAXULONG,32]</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Maximum Number of Decomposition Levels</NICK> <NICK>Maximum Number of Decomposition Levels</NICK>
<BLURB>Maximum number of decomposition levels to keep (-1 == all).</BLURB> <BLURB>Maximum number of decomposition levels to keep (-1 == all).</BLURB>
@ -47336,7 +47336,7 @@
<ARG> <ARG>
<NAME>GstTSDemux::program-number</NAME> <NAME>GstTSDemux::program-number</NAME>
<TYPE>gint</TYPE> <TYPE>gint</TYPE>
<RANGE>>= -1</RANGE> <RANGE>>= G_MAXULONG</RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Program number</NICK> <NICK>Program number</NICK>
<BLURB>Program Number to demux for (-1 to ignore).</BLURB> <BLURB>Program Number to demux for (-1 to ignore).</BLURB>
@ -47779,8 +47779,8 @@
<RANGE></RANGE> <RANGE></RANGE>
<FLAGS>rw</FLAGS> <FLAGS>rw</FLAGS>
<NICK>Location</NICK> <NICK>Location</NICK>
<BLURB>Location to save the captured files. A %d might be used on thefilename as a placeholder for a numeric index of the capture.Default for images is img_%d and vid_%d for videos.</BLURB> <BLURB>Location to save the captured files. A %d might be used on thefilename as a placeholder for a numeric index of the capture.Default is cap_%d.</BLURB>
<DEFAULT>"img_%d"</DEFAULT> <DEFAULT>"cap_%d"</DEFAULT>
</ARG> </ARG>
<ARG> <ARG>
@ -57493,3 +57493,163 @@
<DEFAULT>FALSE</DEFAULT> <DEFAULT>FALSE</DEFAULT>
</ARG> </ARG>
<ARG>
<NAME>GstCompare::meta</NAME>
<TYPE>GstBufferCopyFlags</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>Compare Meta</NICK>
<BLURB>Indicates which metadata should be compared.</BLURB>
<DEFAULT>GST_BUFFER_COPY_FLAGS|GST_BUFFER_COPY_TIMESTAMPS|GST_BUFFER_COPY_CAPS</DEFAULT>
</ARG>
<ARG>
<NAME>GstCompare::method</NAME>
<TYPE>GstCompareMethod</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>Content Compare Method</NICK>
<BLURB>Method to compare buffer content.</BLURB>
<DEFAULT>Memory</DEFAULT>
</ARG>
<ARG>
<NAME>GstCompare::offset-ts</NAME>
<TYPE>gboolean</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>Offsets Timestamps</NICK>
<BLURB>Consider OFFSET and OFFSET_END part of timestamp metadata.</BLURB>
<DEFAULT>FALSE</DEFAULT>
</ARG>
<ARG>
<NAME>GstCompare::threshold</NAME>
<TYPE>gdouble</TYPE>
<RANGE>>= 0</RANGE>
<FLAGS>rw</FLAGS>
<NICK>Content Threshold</NICK>
<BLURB>Threshold beyond which to consider content different as determined by content-method.</BLURB>
<DEFAULT>0</DEFAULT>
</ARG>
<ARG>
<NAME>GstCompare::upper</NAME>
<TYPE>gboolean</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>Threshold Upper Bound</NICK>
<BLURB>Whether threshold value is upper bound or lower bound for difference measure.</BLURB>
<DEFAULT>TRUE</DEFAULT>
</ARG>
<ARG>
<NAME>GstOpenalSrc::device</NAME>
<TYPE>gchar*</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>Device</NICK>
<BLURB>Specific capture device to open, NULL indicate default device.</BLURB>
<DEFAULT>NULL</DEFAULT>
</ARG>
<ARG>
<NAME>GstOpenalSrc::device-name</NAME>
<TYPE>gchar*</TYPE>
<RANGE></RANGE>
<FLAGS>r</FLAGS>
<NICK>Device name</NICK>
<BLURB>Readable name of device.</BLURB>
<DEFAULT>NULL</DEFAULT>
</ARG>
<ARG>
<NAME>GstOpenALSink::context-handle</NAME>
<TYPE>gpointer</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>ALCcontext</NICK>
<BLURB>Custom playback context.</BLURB>
<DEFAULT></DEFAULT>
</ARG>
<ARG>
<NAME>GstOpenALSink::device</NAME>
<TYPE>gchar*</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>Device</NICK>
<BLURB>OpenAL device string.</BLURB>
<DEFAULT>NULL</DEFAULT>
</ARG>
<ARG>
<NAME>GstOpenALSink::device-handle</NAME>
<TYPE>gpointer</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>ALCdevice</NICK>
<BLURB>Custom playback device.</BLURB>
<DEFAULT></DEFAULT>
</ARG>
<ARG>
<NAME>GstOpenALSink::device-name</NAME>
<TYPE>gchar*</TYPE>
<RANGE></RANGE>
<FLAGS>r</FLAGS>
<NICK>Device name</NICK>
<BLURB>Opened OpenAL device name.</BLURB>
<DEFAULT>""</DEFAULT>
</ARG>
<ARG>
<NAME>GstOpenALSink::source-id</NAME>
<TYPE>guint</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>Source ID</NICK>
<BLURB>Custom playback sID.</BLURB>
<DEFAULT>0</DEFAULT>
</ARG>
<ARG>
<NAME>GstRTMPSink::location</NAME>
<TYPE>gchar*</TYPE>
<RANGE></RANGE>
<FLAGS>rw</FLAGS>
<NICK>File Location</NICK>
<BLURB>Location of the file to read.</BLURB>
<DEFAULT>NULL</DEFAULT>
</ARG>
<ARG>
<NAME>GstDecklinkSrc::connection</NAME>
<TYPE>GstDecklinkConnection</TYPE>
<RANGE></RANGE>
<FLAGS>rwx</FLAGS>
<NICK>Connection</NICK>
<BLURB>Connection.</BLURB>
<DEFAULT>sdi</DEFAULT>
</ARG>
<ARG>
<NAME>GstDecklinkSrc::mode</NAME>
<TYPE>GstDecklinkModes</TYPE>
<RANGE></RANGE>
<FLAGS>rwx</FLAGS>
<NICK>Mode</NICK>
<BLURB>Mode.</BLURB>
<DEFAULT>ntsc</DEFAULT>
</ARG>
<ARG>
<NAME>GstDecklinkSink::mode</NAME>
<TYPE>GstDecklinkModes</TYPE>
<RANGE></RANGE>
<FLAGS>rwx</FLAGS>
<NICK>Mode</NICK>
<BLURB>Mode.</BLURB>
<DEFAULT>ntsc</DEFAULT>
</ARG>

View file

@ -1,4 +1,5 @@
GObject GObject
GstAdapter
GstColorBalanceChannel GstColorBalanceChannel
GstObject GstObject
GstBus GstBus
@ -34,21 +35,26 @@ GObject
GstBaseAudioSink GstBaseAudioSink
GstAudioSink GstAudioSink
GstApExSink GstApExSink
GstNasSink
GstSDLAudioSink GstSDLAudioSink
GstChecksumSink GstChecksumSink
GstCurlSink
GstDCCPClientSink GstDCCPClientSink
GstDCCPServerSink GstDCCPServerSink
GstFBDEVSink GstFBDEVSink
GstInterAudioSink
GstInterVideoSink
GstLinsysSdiSink GstLinsysSdiSink
GstSFSink GstSFSink
GstShmSink GstShmSink
GstVideoSink GstVideoSink
GstDfbVideoSink
GstSDLVideoSink GstSDLVideoSink
VdpSink
GstBaseSrc GstBaseSrc
GstDTMFSrc GstDTMFSrc
GstDataURISrc GstDataURISrc
GstFliteTestSrc
GstInterAudioSrc
GstInterVideoSrc
GstLinsysSdiSrc GstLinsysSdiSrc
GstPushSrc GstPushSrc
GstDCCPClientSrc GstDCCPClientSrc
@ -56,8 +62,6 @@ GObject
GstDc1394 GstDc1394
GstDvbSrc GstDvbSrc
GstMMS GstMMS
GstNeonhttpSrc
GstRTMPSrc
GstRfbSrc GstRfbSrc
GstShmSrc GstShmSrc
GstVCDSrc GstVCDSrc
@ -109,23 +113,11 @@ GObject
GstMirror GstMirror
GstRotate GstRotate
GstSquare GstSquare
GstOpencvVideoFilter
GstCvDilateErode
GstCvDilate
GstCvErode
GstCvEqualizeHist
GstCvLaplace
GstCvSmooth
GstCvSobel
Gstfacedetect
GstRsvgOverlay GstRsvgOverlay
GstSolarize GstSolarize
GstVideo3DConvert
GstVideo3DPresent
GstVideoAnalyse GstVideoAnalyse
GstVideoDetect GstVideoDetect
GstVideoMark GstVideoMark
GstZBar
GstVideoFilter2 GstVideoFilter2
GstSceneChange GstSceneChange
GstZebraStripe GstZebraStripe
@ -133,11 +125,9 @@ GObject
GstBaseVideoCodec GstBaseVideoCodec
GstBaseVideoDecoder GstBaseVideoDecoder
GstSchroDec GstSchroDec
GstVP8Dec
GstBaseVideoEncoder GstBaseVideoEncoder
GstDiracEnc GstDiracEnc
GstSchroEnc GstSchroEnc
GstVP8Enc
GstBin GstBin
DvbBaseBin DvbBaseBin
GstAutoConvert GstAutoConvert
@ -146,12 +136,6 @@ GObject
GstWrapperCameraBinSrc GstWrapperCameraBinSrc
GstFPSDisplaySink GstFPSDisplaySink
GstFaceOverlay GstFaceOverlay
GstGSettingsSwitchSink
GstGSettingsAudioSink
GstGSettingsVideoSink
GstGSettingsSwitchSrc
GstGSettingsAudioSrc
GstGSettingsVideoSrc
GstPipeline GstPipeline
GstCameraBin GstCameraBin
GstCameraBin2 GstCameraBin2
@ -165,12 +149,12 @@ GObject
GstCeltDec GstCeltDec
GstCeltEnc GstCeltEnc
GstChopMyData GstChopMyData
GstCompare
GstDVBSubOverlay GstDVBSubOverlay
GstDVDSpu GstDVDSpu
GstDecklinkSink GstDecklinkSink
GstDecklinkSrc GstDecklinkSrc
GstDtsDec GstDtsDec
GstFaac
GstFaad GstFaad
GstFestival GstFestival
GstFieldAnalysis GstFieldAnalysis
@ -182,7 +166,6 @@ GObject
GstId3BaseMux GstId3BaseMux
GstId3Mux GstId3Mux
GstInterlace GstInterlace
GstInvtelecine
GstIvfParse GstIvfParse
GstJP2kDecimator GstJP2kDecimator
GstJifMux GstJifMux
@ -196,16 +179,16 @@ GObject
GstMSE GstMSE
GstMXFDemux GstMXFDemux
GstMXFMux GstMXFMux
GstMimDec
GstMimEnc
GstModPlug GstModPlug
GstMpegPSDemux GstMpegPSDemux
GstMpegTSDemux GstMpegTSDemux
GstMplex
GstMusepackDec GstMusepackDec
GstMveDemux GstMveDemux
GstMveMux GstMveMux
GstNsfDec GstNsfDec
GstNuvDemux GstNuvDemux
GstOpencvTextOverlay
GstPcapParse GstPcapParse
GstPitch GstPitch
GstPnmdec GstPnmdec
@ -225,494 +208,27 @@ GObject
GstAudioSegmentClip GstAudioSegmentClip
GstVideoSegmentClip GstVideoSegmentClip
GstSignalProcessor GstSignalProcessor
calf-sourceforge-net-plugins-BassEnhancer
calf-sourceforge-net-plugins-Compressor
calf-sourceforge-net-plugins-Deesser
calf-sourceforge-net-plugins-Equalizer12Band
calf-sourceforge-net-plugins-Equalizer5Band
calf-sourceforge-net-plugins-Equalizer8Band
calf-sourceforge-net-plugins-Exciter
calf-sourceforge-net-plugins-Filter
calf-sourceforge-net-plugins-Filterclavier
calf-sourceforge-net-plugins-Flanger
calf-sourceforge-net-plugins-Fluidsynth
calf-sourceforge-net-plugins-Gate
calf-sourceforge-net-plugins-Monosynth
calf-sourceforge-net-plugins-MultiChorus
calf-sourceforge-net-plugins-Multibandcompressor
calf-sourceforge-net-plugins-Organ
calf-sourceforge-net-plugins-Phaser
calf-sourceforge-net-plugins-Pulsator
calf-sourceforge-net-plugins-Reverb
calf-sourceforge-net-plugins-RotarySpeaker
calf-sourceforge-net-plugins-Saturator
calf-sourceforge-net-plugins-Sidechaincompressor
calf-sourceforge-net-plugins-Sidechaingate
calf-sourceforge-net-plugins-VintageDelay
calf-sourceforge-net-plugins-Wavetable
invadarecords-com-plugins-lv2-compressor-mono
invadarecords-com-plugins-lv2-compressor-stereo
invadarecords-com-plugins-lv2-delay-mono
invadarecords-com-plugins-lv2-delay-sum
invadarecords-com-plugins-lv2-erreverb-mono
invadarecords-com-plugins-lv2-erreverb-sum
invadarecords-com-plugins-lv2-filter-hpf-mono
invadarecords-com-plugins-lv2-filter-hpf-stereo
invadarecords-com-plugins-lv2-filter-lpf-mono
invadarecords-com-plugins-lv2-filter-lpf-stereo
invadarecords-com-plugins-lv2-input
invadarecords-com-plugins-lv2-meter
invadarecords-com-plugins-lv2-phaser-mono
invadarecords-com-plugins-lv2-phaser-stereo
invadarecords-com-plugins-lv2-phaser-sum
invadarecords-com-plugins-lv2-testtone
invadarecords-com-plugins-lv2-tube-mono
invadarecords-com-plugins-lv2-tube-stereo
ladspa-AWfilt
ladspa-Accumulate
ladspa-Ambisonics-11-cube-decoder
ladspa-Ambisonics-11-hexagon-decoder
ladspa-Ambisonics-11-mono-panner
ladspa-Ambisonics-11-rotator
ladspa-Ambisonics-11-square-decoder
ladspa-Ambisonics-11-stereo-panner
ladspa-Ambisonics-21-panner
ladspa-Ambisonics-21-rotator
ladspa-Ambisonics-22-panner
ladspa-Ambisonics-22-rotator
ladspa-Ambisonics-31-panner
ladspa-Ambisonics-31-rotator
ladspa-Ambisonics-33-panner
ladspa-Ambisonics-33-rotator
ladspa-AmpIII
ladspa-AmpIV
ladspa-AmpV
ladspa-AmpVTS
ladspa-AutoWah
ladspa-BassEnhancer
ladspa-BoosterM
ladspa-BoosterS
ladspa-CEO
ladspa-CVFreq
ladspa-CabinetI
ladspa-CabinetII
ladspa-Chorus1
ladspa-Chorus1-2x2
ladspa-Chorus2
ladspa-ChorusI
ladspa-ChorusII
ladspa-Click
ladspa-Clip
ladspa-Compress
ladspa-Compressor
ladspa-Deesser
ladspa-Dirac
ladspa-Eq
ladspa-Eq2x2
ladspa-Equalizer12Band
ladspa-Equalizer5Band
ladspa-Equalizer8Band
ladspa-Exaggerate
ladspa-Exciter
ladspa-Filter
ladspa-Filterclavier
ladspa-Flanger
ladspa-G2reverb
ladspa-Gate
ladspa-HRTF
ladspa-JVRev
ladspa-Lorenz
ladspa-MUSIC
ladspa-MUSICDrum
ladspa-MultiChorus
ladspa-Multibandcompressor
ladspa-Mvchpf-1
ladspa-Mvclpf-1
ladspa-Mvclpf-2
ladspa-Mvclpf-3
ladspa-Mvclpf-4
ladspa-NoisifierM
ladspa-NoisifierS
ladspa-PSG
ladspa-Pan
ladspa-Parametric1
ladspa-Phaser
ladspa-Phaser1
ladspa-Phaser1+LFO
ladspa-PhaserI
ladspa-PhaserII
ladspa-Plate
ladspa-Plate2x2
ladspa-PreampIII
ladspa-PreampIV
ladspa-Pulsator
ladspa-Pulse-VCO
ladspa-Rec-VCO
ladspa-Reverb
ladspa-Roessler
ladspa-RotarySpeaker
ladspa-SCC
ladspa-SID
ladspa-Saturator
ladspa-Saw-VCO
ladspa-Scape
ladspa-Sidechaincompressor
ladspa-Sidechaingate
ladspa-Sin
ladspa-SooperLooper
ladspa-StereoChorusI
ladspa-StereoChorusII
ladspa-SweepVFI
ladspa-SweepVFII
ladspa-Sync-Rect-VCO
ladspa-Sync-Saw-VCO
ladspa-Sync-Tri-VCO
ladspa-ToneStack
ladspa-ToneStackLT
ladspa-Transpose
ladspa-Tricardioid-to-AMB
ladspa-TripleChorus
ladspa-VCOd
ladspa-VCOs
ladspa-VariNoiseM
ladspa-VariNoiseS
ladspa-VintageDelay
ladspa-Virtualmic
ladspa-White
ladspa-XShaperM
ladspa-XShaperS
ladspa-adenv
ladspa-adenv-lvl
ladspa-adsr
ladspa-adsr-g+t
ladspa-alias
ladspa-alienwah-mono
ladspa-alienwah-stereo
ladspa-allpass-c
ladspa-allpass-l
ladspa-allpass-n
ladspa-am
ladspa-amPitchshift
ladspa-amp
ladspa-amp-gaia-oa
ladspa-amp-gcia-oa
ladspa-amp-mono ladspa-amp-mono
ladspa-amp-stereo ladspa-amp-stereo
ladspa-analogue
ladspa-analogueOsc
ladspa-artificialLatency
ladspa-autoPhaser
ladspa-bandpass-a-iir
ladspa-bandpass-iir
ladspa-bf-rotate-z
ladspa-bf2cube
ladspa-bf2quad
ladspa-bf2stereo
ladspa-bodeShifter
ladspa-bodeShifterCV
ladspa-branch-ia-oaoa
ladspa-branch-ic-ococ
ladspa-butthigh-iir
ladspa-buttlow-iir
ladspa-bwxover-iir
ladspa-canyon-delay
ladspa-chebstortion
ladspa-clipper
ladspa-comb
ladspa-comb-c
ladspa-comb-l
ladspa-comb-n
ladspa-combSplitter
ladspa-comp-aa
ladspa-comp-ac
ladspa-compress-peak
ladspa-compress-rms
ladspa-const
ladspa-crossoverDist
ladspa-dahdsr-cg+t-control
ladspa-dahdsr-fexp
ladspa-dahdsr-g+t-audio
ladspa-dahdsr-g+t-control
ladspa-dahdsr-hexp
ladspa-dcRemove
ladspa-decay
ladspa-decimator
ladspa-declip
ladspa-delay-0-01s
ladspa-delay-0-1s
ladspa-delay-1s
ladspa-delay-5s ladspa-delay-5s
ladspa-delay-60s
ladspa-delay-c
ladspa-delay-l
ladspa-delay-n
ladspa-delayorama
ladspa-difference-iama-oa
ladspa-difference-iamc-oa
ladspa-difference-icma-oa
ladspa-difference-icmc-oc
ladspa-diode
ladspa-disintegrator
ladspa-divider
ladspa-dj-eq
ladspa-dj-eq-mono
ladspa-djFlanger
ladspa-dysonCompress
ladspa-eir
ladspa-encode-bformat
ladspa-encode-fmh
ladspa-expand-peak
ladspa-expand-rms
ladspa-fadDelay
ladspa-fast-xfade
ladspa-fastLookaheadLimiter
ladspa-fbdelay-0-01s
ladspa-fbdelay-0-1s
ladspa-fbdelay-1s
ladspa-fbdelay-5s
ladspa-fbdelay-60s
ladspa-flanger
ladspa-floatNoise
ladspa-fmOsc
ladspa-fmh-rotate-z
ladspa-fmh2bf
ladspa-fmh2oct
ladspa-fmod-fama-oa
ladspa-fmod-famc-oa
ladspa-fmod-fcma-oa
ladspa-fmod-fcmc-oc
ladspa-foldover
ladspa-foo-chop-liver
ladspa-foo-driver
ladspa-foo-limiter
ladspa-foo-limiter-v2
ladspa-foo-saturator
ladspa-foo-transients
ladspa-foo-transients-mono
ladspa-formant-vc
ladspa-fourByFourPole
ladspa-foverdrive
ladspa-freeverb3
ladspa-freqTracker
ladspa-gate
ladspa-giantFlange
ladspa-gong
ladspa-gongBeater
ladspa-grain-scatter
ladspa-gsm
ladspa-gverb
ladspa-hard-gate
ladspa-hardLimiter
ladspa-harmonicGen
ladspa-hermesFilter
ladspa-highpass-iir
ladspa-hilbert
ladspa-hpf ladspa-hpf
ladspa-hz-voct-ar
ladspa-hz-voct-cr
ladspa-identity-audio
ladspa-identity-control
ladspa-imp
ladspa-impulse-fc
ladspa-intNoise
ladspa-interpolator
ladspa-inv
ladspa-karaoke
ladspa-lcrDelay
ladspa-leet-equalizer-bw2x2
ladspa-leet-equalizer-bw2x2-1
ladspa-lfoPhaser
ladspa-limit-peak
ladspa-limit-rms
ladspa-lofi
ladspa-logistic
ladspa-lowpass-iir
ladspa-lp4pole-faraia-oa
ladspa-lp4pole-fcrcia-oa
ladspa-lpf ladspa-lpf
ladspa-lsFilter
ladspa-matched
ladspa-matrixMSSt
ladspa-matrixSpatialiser
ladspa-matrixStMS
ladspa-mbeq
ladspa-mixer
ladspa-modDelay
ladspa-multivoiceChorus
ladspa-mux-ar
ladspa-mux-cr
ladspa-noise-source-white
ladspa-noise-white ladspa-noise-white
ladspa-notch-iir
ladspa-null-ai
ladspa-null-ao
ladspa-null-ci
ladspa-null-co
ladspa-organ
ladspa-peak
ladspa-phasemod
ladspa-pink-full-frequency
ladspa-pink-interpolated-audio
ladspa-pink-sh
ladspa-pitchScale
ladspa-pitchScaleHQ
ladspa-plate
ladspa-pointerCastDistortion
ladspa-power
ladspa-power-cr
ladspa-preamp
ladspa-prob-switch-ar
ladspa-prob-switch-cr
ladspa-product-iaia-oa
ladspa-product-iaic-oa
ladspa-product-icic-oc
ladspa-pulse-fapa-oa
ladspa-pulse-fapc-oa
ladspa-pulse-fcpa-oa
ladspa-pulse-fcpc-oa
ladspa-quantiser100
ladspa-quantiser20
ladspa-quantiser50
ladspa-random-fasa-oa
ladspa-random-fasc-oa
ladspa-random-fcsa-oa
ladspa-random-fcsc-oa
ladspa-range-trans-ar
ladspa-range-trans-cr
ladspa-rateShifter
ladspa-ratio-nada-oa
ladspa-ratio-nadc-oa
ladspa-ratio-ncda-oa
ladspa-ratio-ncdc-oc
ladspa-retroFlange
ladspa-revdelay
ladspa-ringmod-1i1o1l
ladspa-ringmod-2i1o
ladspa-rissetScales
ladspa-rubberband-pitchshifter-mono
ladspa-rubberband-pitchshifter-stereo
ladspa-satanMaximiser
ladspa-sawtooth-fa-oa
ladspa-sawtooth-fc-oa
ladspa-sc1
ladspa-sc2
ladspa-sc3
ladspa-sc4
ladspa-sc4m
ladspa-se4
ladspa-sequencer16
ladspa-sequencer32
ladspa-sequencer64
ladspa-sh-ar
ladspa-sh-cr
ladspa-shaper
ladspa-sifter
ladspa-signal-abs-ar
ladspa-signal-abs-cr
ladspa-sinCos
ladspa-sine-faaa ladspa-sine-faaa
ladspa-sine-faac ladspa-sine-faac
ladspa-sine-fcaa ladspa-sine-fcaa
ladspa-sine-fcac ladspa-sine-fcac
ladspa-singlePara
ladspa-sinusWavewrapper
ladspa-sledgehammer
ladspa-slew-limiter-ra
ladspa-slew-limiter-rc
ladspa-slide-ta
ladspa-slide-tc
ladspa-smoothDecimate
ladspa-split
ladspa-square-fa-oa
ladspa-square-fc-oa
ladspa-ssm-masher
ladspa-stepMuxer
ladspa-sum-iaia-oa
ladspa-sum-iaic-oa
ladspa-sum-icic-oc
ladspa-super-60
ladspa-surroundEncoder
ladspa-svf
ladspa-syncpulse-fapaga-oa
ladspa-syncpulse-fcpcga-oa
ladspa-syncsquare-faga-oa
ladspa-syncsquare-fcga-oa
ladspa-syndrum
ladspa-tap-autopan
ladspa-tap-chorusflanger
ladspa-tap-deesser
ladspa-tap-doubler
ladspa-tap-dynamics-m
ladspa-tap-dynamics-st
ladspa-tap-equalizer
ladspa-tap-equalizer-bw
ladspa-tap-limiter
ladspa-tap-pinknoise
ladspa-tap-pitch
ladspa-tap-reflector
ladspa-tap-reverb
ladspa-tap-rotspeak
ladspa-tap-sigmoid
ladspa-tap-stereo-echo
ladspa-tap-tremolo
ladspa-tap-tubewarmth
ladspa-tap-vibrato
ladspa-tapeDelay
ladspa-track-max-peak
ladspa-track-max-rms
ladspa-track-peak
ladspa-track-rms
ladspa-tracker-gaaadaia-oa
ladspa-tracker-gaacdcia-oa
ladspa-transient
ladspa-triangle-fasa-oa
ladspa-triangle-fasc-oa
ladspa-triangle-fcsa-oa
ladspa-triangle-fcsc-oa
ladspa-trigger
ladspa-triplePara
ladspa-unmatched
ladspa-valve
ladspa-valveRect
ladspa-vcf-bp1
ladspa-vcf-bp2
ladspa-vcf-hp
ladspa-vcf-hshelf
ladspa-vcf-lp
ladspa-vcf-lshelf
ladspa-vcf-notch
ladspa-vcf-peakeq
ladspa-vcf-reslp
ladspa-vcf303
ladspa-vlevel-mono
ladspa-vlevel-stereo
ladspa-vocoder
ladspa-vynil
ladspa-waveTerrain
ladspa-wg-mesh-cr
ladspa-wshape-sine
ladspa-xfade
ladspa-xfade4
ladspa-zm1
GstSirenDec GstSirenDec
GstSirenEnc GstSirenEnc
GstSpeed GstSpeed
GstSrtEnc GstSrtEnc
GstTRM GstTRM
GstTemplateMatch
GstTimidity
GstTtaDec GstTtaDec
GstTtaParse GstTtaParse
GstVMncDec GstVMncDec
GstVcdParse GstVcdParse
GstVdpVideoPostProcess GstWildmidi
GstVideo3DMerge
GstXvidDec
GstXvidEnc
GstY4mDec GstY4mDec
Gstedgedetect
Gstfaceblur
Gstpyramidsegment
MpegPsMux MpegPsMux
MpegTSBase MpegTSBase
GstTSDemux GstTSDemux
@ -720,14 +236,7 @@ GObject
MpegTSParse MpegTSParse
MpegTsMux MpegTsMux
MpegVideoParse MpegVideoParse
SatBaseVideoDecoder
GstVdpDecoder
GstVdpH264Dec
GstVdpMpeg4Dec
GstVdpMpegDec
GstPad GstPad
GstVdpOutputSrcPad
GstVdpVideoSrcPad
GstPadTemplate GstPadTemplate
GstSignalProcessorPadTemplate GstSignalProcessorPadTemplate
GstPlugin GstPlugin
@ -741,7 +250,6 @@ GObject
GstTask GstTask
GstTaskPool GstTaskPool
GstSignalObject GstSignalObject
GstVdpDevice
MpegTsPatInfo MpegTsPatInfo
MpegTsPmtInfo MpegTsPmtInfo
GInterface GInterface

View file

@ -33,6 +33,7 @@ GstMpeg2enc GstPreset
GstMythtvSrc GstURIHandler GstMythtvSrc GstURIHandler
GstNeonhttpSrc GstURIHandler GstNeonhttpSrc GstURIHandler
GstPipeline GstChildProxy GstPipeline GstChildProxy
GstRTMPSink GstURIHandler
GstRTMPSrc GstURIHandler GstRTMPSrc GstURIHandler
GstSDLVideoSink GstImplementsInterface GstXOverlay GstNavigation GstSDLVideoSink GstImplementsInterface GstXOverlay GstNavigation
GstSDPDemux GstChildProxy GstSDPDemux GstChildProxy

View file

@ -51,6 +51,33 @@
</caps> </caps>
</pads> </pads>
</element> </element>
<element>
<name>compare</name>
<longname>Compare buffers</longname>
<class>Filter/Debug</class>
<description>Compares incoming buffers</description>
<author>Mark Nauwelaerts &lt;mark.nauwelaerts@collabora.co.uk&gt;</author>
<pads>
<caps>
<name>check</name>
<direction>sink</direction>
<presence>always</presence>
<details>ANY</details>
</caps>
<caps>
<name>sink</name>
<direction>sink</direction>
<presence>always</presence>
<details>ANY</details>
</caps>
<caps>
<name>src</name>
<direction>source</direction>
<presence>always</presence>
<details>ANY</details>
</caps>
</pads>
</element>
<element> <element>
<name>debugspy</name> <name>debugspy</name>
<longname>DebugSpy</longname> <longname>DebugSpy</longname>

View file

@ -26,7 +26,7 @@
<name>videosink</name> <name>videosink</name>
<direction>sink</direction> <direction>sink</direction>
<presence>always</presence> <presence>always</presence>
<details>video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true</details> <details>video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)24000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)50/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)60000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)60/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)50/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60/1, interlaced=(boolean)false</details>
</caps> </caps>
</pads> </pads>
</element> </element>
@ -47,7 +47,7 @@
<name>videosrc</name> <name>videosrc</name>
<direction>source</direction> <direction>source</direction>
<presence>always</presence> <presence>always</presence>
<details>video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)24000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)50/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60/1, interlaced=(boolean)true</details> <details>video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)10/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)24000/1001, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)10/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)10/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)50/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)60/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)50/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2</details>
</caps> </caps>
</pads> </pads>
</element> </element>

View file

@ -3,7 +3,7 @@
<description>Decodes DTS audio streams</description> <description>Decodes DTS audio streams</description>
<filename>../../ext/dts/.libs/libgstdtsdec.so</filename> <filename>../../ext/dts/.libs/libgstdtsdec.so</filename>
<basename>libgstdtsdec.so</basename> <basename>libgstdtsdec.so</basename>
<version>0.10.19.1</version> <version>0.10.22.1</version>
<license>GPL</license> <license>GPL</license>
<source>gst-plugins-bad</source> <source>gst-plugins-bad</source>
<package>GStreamer Bad Plug-ins git</package> <package>GStreamer Bad Plug-ins git</package>

View file

@ -0,0 +1,28 @@
<plugin>
<name>flite</name>
<description>Flite speech synthesizer plugin</description>
<filename>../../ext/flite/.libs/libgstflite.so</filename>
<basename>libgstflite.so</basename>
<version>0.10.22.1</version>
<license>LGPL</license>
<source>gst-plugins-bad</source>
<package>GStreamer Bad Plug-ins git</package>
<origin>Unknown package origin</origin>
<elements>
<element>
<name>flitetestsrc</name>
<longname>Flite speech test source</longname>
<class>Source/Audio</class>
<description>Creates audio test signals identifying channels</description>
<author>David Schleef &lt;ds@schleef.org&gt;</author>
<pads>
<caps>
<name>src</name>
<direction>source</direction>
<presence>always</presence>
<details>audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)48000, channels=(int)[ 1, 8 ]</details>
</caps>
</pads>
</element>
</elements>
</plugin>

View file

@ -0,0 +1,73 @@
<plugin>
<name>inter</name>
<description>plugin for inter-pipeline communication</description>
<filename>../../gst/inter/.libs/libgstinter.so</filename>
<basename>libgstinter.so</basename>
<version>0.10.22.1</version>
<license>LGPL</license>
<source>gst-plugins-bad</source>
<package>GStreamer Bad Plug-ins</package>
<origin>Unknown package origin</origin>
<elements>
<element>
<name>interaudiosink</name>
<longname>FIXME Long name</longname>
<class>Generic</class>
<description>FIXME Description</description>
<author>FIXME &lt;fixme@example.com&gt;</author>
<pads>
<caps>
<name>sink</name>
<direction>sink</direction>
<presence>always</presence>
<details>audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]</details>
</caps>
</pads>
</element>
<element>
<name>interaudiosrc</name>
<longname>FIXME Long name</longname>
<class>Generic</class>
<description>FIXME Description</description>
<author>FIXME &lt;fixme@example.com&gt;</author>
<pads>
<caps>
<name>src</name>
<direction>source</direction>
<presence>always</presence>
<details>audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]</details>
</caps>
</pads>
</element>
<element>
<name>intervideosink</name>
<longname>FIXME Long name</longname>
<class>Generic</class>
<description>FIXME Description</description>
<author>FIXME &lt;fixme@example.com&gt;</author>
<pads>
<caps>
<name>sink</name>
<direction>sink</direction>
<presence>always</presence>
<details>video/x-raw-yuv, format=(fourcc)I420, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]</details>
</caps>
</pads>
</element>
<element>
<name>intervideosrc</name>
<longname>FIXME Long name</longname>
<class>Generic</class>
<description>FIXME Description</description>
<author>FIXME &lt;fixme@example.com&gt;</author>
<pads>
<caps>
<name>src</name>
<direction>source</direction>
<presence>always</presence>
<details>video/x-raw-yuv, format=(fourcc)I420, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]</details>
</caps>
</pads>
</element>
</elements>
</plugin>

View file

@ -1,12 +1,12 @@
<plugin> <plugin>
<name>modplug</name> <name>modplug</name>
<description>.MOD audio decoding</description> <description>.MOD audio decoding</description>
<filename>../../gst/modplug/.libs/libgstmodplug.so</filename> <filename>../../ext/modplug/.libs/libgstmodplug.so</filename>
<basename>libgstmodplug.so</basename> <basename>libgstmodplug.so</basename>
<version>0.10.10.1</version> <version>0.10.22.1</version>
<license>LGPL</license> <license>LGPL</license>
<source>gst-plugins-bad</source> <source>gst-plugins-bad</source>
<package>GStreamer Bad Plug-ins CVS/prerelease</package> <package>GStreamer Bad Plug-ins git</package>
<origin>Unknown package origin</origin> <origin>Unknown package origin</origin>
<elements> <elements>
<element> <element>
@ -26,7 +26,7 @@
<name>src</name> <name>src</name>
<direction>source</direction> <direction>source</direction>
<presence>always</presence> <presence>always</presence>
<details>audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)2; audio/x-raw-int, endianness=(int)1234, signed=(boolean)false, width=(int)8, depth=(int)8, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]</details> <details>audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)32, depth=(int)32, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]; audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]; audio/x-raw-int, endianness=(int)1234, signed=(boolean)false, width=(int)8, depth=(int)8, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]</details>
</caps> </caps>
</pads> </pads>
</element> </element>

View file

@ -1,6 +1,6 @@
<plugin> <plugin>
<name>rtmpsrc</name> <name>rtmp</name>
<description>RTMP source</description> <description>RTMP source and sink</description>
<filename>../../ext/rtmp/.libs/libgstrtmp.so</filename> <filename>../../ext/rtmp/.libs/libgstrtmp.so</filename>
<basename>libgstrtmp.so</basename> <basename>libgstrtmp.so</basename>
<version>0.10.22.1</version> <version>0.10.22.1</version>
@ -9,6 +9,21 @@
<package>GStreamer Bad Plug-ins git</package> <package>GStreamer Bad Plug-ins git</package>
<origin>Unknown package origin</origin> <origin>Unknown package origin</origin>
<elements> <elements>
<element>
<name>rtmpsink</name>
<longname>RTMP output sink</longname>
<class>Sink/Network</class>
<description>Sends FLV content to a server via RTMP</description>
<author>Jan Schmidt &lt;thaytan@noraisin.net&gt;</author>
<pads>
<caps>
<name>sink</name>
<direction>sink</direction>
<presence>always</presence>
<details>video/x-flv</details>
</caps>
</pads>
</element>
<element> <element>
<name>rtmpsrc</name> <name>rtmpsrc</name>
<longname>RTMP Source</longname> <longname>RTMP Source</longname>

View file

@ -83,7 +83,7 @@
<name>sink</name> <name>sink</name>
<direction>sink</direction> <direction>sink</direction>
<presence>always</presence> <presence>always</presence>
<details>video/mpeg, mpegversion=(int)4, parsed=(boolean)false, systemstream=(boolean)false</details> <details>video/mpeg, mpegversion=(int)[ 1, 2 ], parsed=(boolean)false, systemstream=(boolean)false</details>
</caps> </caps>
<caps> <caps>
<name>src</name> <name>src</name>

View file

@ -262,6 +262,12 @@ else
OPENCV_DIR= OPENCV_DIR=
endif endif
if USE_OPUS
OPUS_DIR=opus
else
OPUS_DIR=
endif
if USE_RSVG if USE_RSVG
RSVG_DIR=rsvg RSVG_DIR=rsvg
else else
@ -419,6 +425,7 @@ SUBDIRS=\
$(OFA_DIR) \ $(OFA_DIR) \
$(OPENAL_DIR) \ $(OPENAL_DIR) \
$(OPENCV_DIR) \ $(OPENCV_DIR) \
$(OPUS_DIR) \
$(RSVG_DIR) \ $(RSVG_DIR) \
$(SCHRO_DIR) \ $(SCHRO_DIR) \
$(SDL_DIR) \ $(SDL_DIR) \
@ -471,6 +478,7 @@ DIST_SUBDIRS = \
ofa \ ofa \
openal \ openal \
opencv \ opencv \
opus \
rsvg \ rsvg \
resindvd \ resindvd \
schroedinger \ schroedinger \

View file

@ -570,10 +570,13 @@ blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer)
const guint8 *src; const guint8 *src;
guint8 *dst_y, *dst_u, *dst_v; guint8 *dst_y, *dst_u, *dst_v;
gint x, y, w, h; gint x, y, w, h;
/* FIXME ignoring source image stride might be wrong here */
#if 0
gint w2; gint w2;
gint src_stride;
#endif
gint width = render->width; gint width = render->width;
gint height = render->height; gint height = render->height;
gint src_stride;
gint y_offset, y_stride; gint y_offset, y_stride;
gint u_offset, u_stride; gint u_offset, u_stride;
gint v_offset, v_stride; gint v_offset, v_stride;
@ -609,9 +612,11 @@ blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer)
w = MIN (ass_image->w, width - ass_image->dst_x); w = MIN (ass_image->w, width - ass_image->dst_x);
h = MIN (ass_image->h, height - ass_image->dst_y); h = MIN (ass_image->h, height - ass_image->dst_y);
#if 0
w2 = (w + 1) / 2; w2 = (w + 1) / 2;
src_stride = ass_image->stride; src_stride = ass_image->stride;
#endif
src = ass_image->bitmap; src = ass_image->bitmap;
dst_y = dst_y =

View file

@ -520,12 +520,11 @@ cog_virt_frame_render_resample_vert_1tap (CogFrame * frame, void *_dest,
int n_src; int n_src;
int scale = frame->param1; int scale = frame->param1;
int acc; int acc;
int x;
int src_i; int src_i;
acc = scale * i; acc = scale * i;
src_i = acc >> 8; src_i = acc >> 8;
x = acc & 0xff; /* x = acc & 0xff; */
n_src = frame->virt_frame1->components[component].height; n_src = frame->virt_frame1->components[component].height;
src1 = cog_virt_frame_get_line (frame->virt_frame1, component, src1 = cog_virt_frame_get_line (frame->virt_frame1, component,
@ -634,10 +633,9 @@ cog_virt_frame_render_resample_horiz_1tap (CogFrame * frame, void *_dest,
{ {
uint8_t *dest = _dest; uint8_t *dest = _dest;
uint8_t *src; uint8_t *src;
int n_src;
int scale = frame->param1; int scale = frame->param1;
n_src = frame->virt_frame1->components[component].width; /* n_src = frame->virt_frame1->components[component].width; */
src = cog_virt_frame_get_line (frame->virt_frame1, component, i); src = cog_virt_frame_get_line (frame->virt_frame1, component, i);
cogorc_resample_horiz_1tap (dest, src, 0, scale, cogorc_resample_horiz_1tap (dest, src, 0, scale,
@ -650,10 +648,9 @@ cog_virt_frame_render_resample_horiz_2tap (CogFrame * frame, void *_dest,
{ {
uint8_t *dest = _dest; uint8_t *dest = _dest;
uint8_t *src; uint8_t *src;
int n_src;
int scale = frame->param1; int scale = frame->param1;
n_src = frame->virt_frame1->components[component].width; /* n_src = frame->virt_frame1->components[component].width; */
src = cog_virt_frame_get_line (frame->virt_frame1, component, i); src = cog_virt_frame_get_line (frame->virt_frame1, component, i);
cogorc_resample_horiz_2tap (dest, src, 0, scale, cogorc_resample_horiz_2tap (dest, src, 0, scale,

View file

@ -24,6 +24,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h> #include <gst/video/video.h>
#include <gst/video/gstbasevideoencoder.h> #include <gst/video/gstbasevideoencoder.h>
#include <gst/video/gstbasevideoutils.h>
#include <string.h> #include <string.h>
#include <libdirac_encoder/dirac_encoder.h> #include <libdirac_encoder/dirac_encoder.h>
#include <math.h> #include <math.h>
@ -149,7 +150,7 @@ static gboolean gst_dirac_enc_set_format (GstBaseVideoEncoder *
base_video_encoder, GstVideoState * state); base_video_encoder, GstVideoState * state);
static gboolean gst_dirac_enc_start (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_dirac_enc_start (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_dirac_enc_stop (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_dirac_enc_stop (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder); static GstFlowReturn gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder);
static GstFlowReturn gst_dirac_enc_handle_frame (GstBaseVideoEncoder * static GstFlowReturn gst_dirac_enc_handle_frame (GstBaseVideoEncoder *
base_video_encoder, GstVideoFrame * frame); base_video_encoder, GstVideoFrame * frame);
static GstFlowReturn gst_dirac_enc_shape_output (GstBaseVideoEncoder * static GstFlowReturn gst_dirac_enc_shape_output (GstBaseVideoEncoder *
@ -223,13 +224,11 @@ static void
gst_dirac_enc_class_init (GstDiracEncClass * klass) gst_dirac_enc_class_init (GstDiracEncClass * klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseVideoEncoderClass *basevideoencoder_class; GstBaseVideoEncoderClass *basevideoencoder_class;
//int i; //int i;
gobject_class = G_OBJECT_CLASS (klass); gobject_class = G_OBJECT_CLASS (klass);
gstelement_class = GST_ELEMENT_CLASS (klass);
basevideoencoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass); basevideoencoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass);
gobject_class->set_property = gst_dirac_enc_set_property; gobject_class->set_property = gst_dirac_enc_set_property;
@ -843,7 +842,7 @@ gst_dirac_enc_stop (GstBaseVideoEncoder * base_video_encoder)
return TRUE; return TRUE;
} }
static gboolean static GstFlowReturn
gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder) gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder)
{ {
GstDiracEnc *dirac_enc = GST_DIRAC_ENC (base_video_encoder); GstDiracEnc *dirac_enc = GST_DIRAC_ENC (base_video_encoder);
@ -852,7 +851,7 @@ gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder)
gst_dirac_enc_process (dirac_enc, TRUE); gst_dirac_enc_process (dirac_enc, TRUE);
return TRUE; return GST_FLOW_OK;
} }
static GstFlowReturn static GstFlowReturn
@ -1136,7 +1135,6 @@ gst_dirac_enc_process (GstDiracEnc * dirac_enc, gboolean end_sequence)
{ {
GstBuffer *outbuf; GstBuffer *outbuf;
GstFlowReturn ret; GstFlowReturn ret;
int presentation_frame;
int parse_code; int parse_code;
int state; int state;
GstVideoFrame *frame; GstVideoFrame *frame;
@ -1192,8 +1190,6 @@ gst_dirac_enc_process (GstDiracEnc * dirac_enc, gboolean end_sequence)
dirac_enc->pull_frame_num++; dirac_enc->pull_frame_num++;
parse_code = ((guint8 *) GST_BUFFER_DATA (outbuf))[4]; parse_code = ((guint8 *) GST_BUFFER_DATA (outbuf))[4];
/* FIXME */
presentation_frame = 0;
if (DIRAC_PARSE_CODE_IS_SEQ_HEADER (parse_code)) { if (DIRAC_PARSE_CODE_IS_SEQ_HEADER (parse_code)) {
frame->is_sync_point = TRUE; frame->is_sync_point = TRUE;
@ -1230,7 +1226,6 @@ gst_dirac_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame) GstVideoFrame * frame)
{ {
GstDiracEnc *dirac_enc; GstDiracEnc *dirac_enc;
int dpn;
int delay; int delay;
int dist; int dist;
int pt; int pt;
@ -1241,8 +1236,6 @@ gst_dirac_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
dirac_enc = GST_DIRAC_ENC (base_video_encoder); dirac_enc = GST_DIRAC_ENC (base_video_encoder);
dpn = frame->decode_frame_number;
pt = frame->presentation_frame_number * 2 + dirac_enc->granule_offset; pt = frame->presentation_frame_number * 2 + dirac_enc->granule_offset;
dt = frame->decode_frame_number * 2 + dirac_enc->granule_offset; dt = frame->decode_frame_number * 2 + dirac_enc->granule_offset;
delay = pt - dt; delay = pt - dt;

View file

@ -2223,8 +2223,8 @@ gst_dfbvideosink_init (GstDfbVideoSink * dfbvideosink)
{ {
dfbvideosink->pool_lock = g_mutex_new (); dfbvideosink->pool_lock = g_mutex_new ();
dfbvideosink->buffer_pool = NULL; dfbvideosink->buffer_pool = NULL;
dfbvideosink->video_height = dfbvideosink->out_width = 0; dfbvideosink->video_height = dfbvideosink->out_height = 0;
dfbvideosink->video_width = dfbvideosink->out_height = 0; dfbvideosink->video_width = dfbvideosink->out_width = 0;
dfbvideosink->fps_d = 0; dfbvideosink->fps_d = 0;
dfbvideosink->fps_n = 0; dfbvideosink->fps_n = 0;

View file

@ -68,11 +68,6 @@ static GstStaticPadTemplate gst_jasper_dec_src_template =
GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, v308 }")) GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, v308 }"))
); );
static void gst_jasper_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_jasper_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_jasper_dec_reset (GstJasperDec * dec); static void gst_jasper_dec_reset (GstJasperDec * dec);
static GstStateChangeReturn gst_jasper_dec_change_state (GstElement * element, static GstStateChangeReturn gst_jasper_dec_change_state (GstElement * element,
GstStateChange transition); GstStateChange transition);
@ -114,18 +109,13 @@ gst_jasper_dec_base_init (gpointer g_class)
static void static void
gst_jasper_dec_class_init (GstJasperDecClass * klass) gst_jasper_dec_class_init (GstJasperDecClass * klass)
{ {
GObjectClass *gobject_class;
GstElementClass *gstelement_class; GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass; gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_jasper_dec_debug, "jp2kdec", 0, GST_DEBUG_CATEGORY_INIT (gst_jasper_dec_debug, "jp2kdec", 0,
"Jasper JPEG2000 decoder"); "Jasper JPEG2000 decoder");
gobject_class->set_property = gst_jasper_dec_set_property;
gobject_class->get_property = gst_jasper_dec_get_property;
gstelement_class->change_state = gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_jasper_dec_change_state); GST_DEBUG_FUNCPTR (gst_jasper_dec_change_state);
} }
@ -819,36 +809,6 @@ invalid_bytes_segment:
} }
} }
static void
gst_jasper_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstJasperDec *filter;
filter = GST_JASPER_DEC (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_jasper_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstJasperDec *filter;
filter = GST_JASPER_DEC (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn static GstStateChangeReturn
gst_jasper_dec_change_state (GstElement * element, GstStateChange transition) gst_jasper_dec_change_state (GstElement * element, GstStateChange transition)
{ {

View file

@ -65,11 +65,6 @@ static GstStaticPadTemplate gst_jasper_enc_src_template =
"image/jp2") "image/jp2")
); );
static void gst_jasper_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_jasper_enc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_jasper_enc_reset (GstJasperEnc * enc); static void gst_jasper_enc_reset (GstJasperEnc * enc);
static GstStateChangeReturn gst_jasper_enc_change_state (GstElement * element, static GstStateChangeReturn gst_jasper_enc_change_state (GstElement * element,
GstStateChange transition); GstStateChange transition);
@ -118,18 +113,13 @@ gst_jasper_enc_base_init (gpointer g_class)
static void static void
gst_jasper_enc_class_init (GstJasperEncClass * klass) gst_jasper_enc_class_init (GstJasperEncClass * klass)
{ {
GObjectClass *gobject_class;
GstElementClass *gstelement_class; GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass; gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_jasper_enc_debug, "jp2kenc", 0, GST_DEBUG_CATEGORY_INIT (gst_jasper_enc_debug, "jp2kenc", 0,
"Jasper JPEG2000 encoder"); "Jasper JPEG2000 encoder");
gobject_class->set_property = gst_jasper_enc_set_property;
gobject_class->get_property = gst_jasper_enc_get_property;
/* FIXME add some encoder properties */ /* FIXME add some encoder properties */
gstelement_class->change_state = gstelement_class->change_state =
@ -535,36 +525,6 @@ not_negotiated:
} }
} }
static void
gst_jasper_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstJasperEnc *filter;
filter = GST_JASPER_ENC (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_jasper_enc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstJasperEnc *filter;
filter = GST_JASPER_ENC (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn static GstStateChangeReturn
gst_jasper_enc_change_state (GstElement * element, GstStateChange transition) gst_jasper_enc_change_state (GstElement * element, GstStateChange transition)
{ {

View file

@ -622,7 +622,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps)
GstLV2Group *group = NULL; GstLV2Group *group = NULL;
GstAudioChannelPosition *positions = NULL; GstAudioChannelPosition *positions = NULL;
GstPad *pad; GstPad *pad;
GstCaps *pad_caps;
gsp_class = GST_SIGNAL_PROCESSOR_GET_CLASS (gsp); gsp_class = GST_SIGNAL_PROCESSOR_GET_CLASS (gsp);
lv2 = (GstLV2 *) gsp; lv2 = (GstLV2 *) gsp;
@ -655,7 +654,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps)
slv2_value_as_string (group->symbol)))) { slv2_value_as_string (group->symbol)))) {
GST_INFO_OBJECT (lv2, "set audio channel positions on sink pad %s", GST_INFO_OBJECT (lv2, "set audio channel positions on sink pad %s",
slv2_value_as_string (group->symbol)); slv2_value_as_string (group->symbol));
pad_caps = GST_PAD_CAPS (pad);
s = gst_caps_get_structure (caps, 0); s = gst_caps_get_structure (caps, 0);
gst_audio_set_channel_positions (s, positions); gst_audio_set_channel_positions (s, positions);
gst_object_unref (pad); gst_object_unref (pad);
@ -674,7 +672,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps)
slv2_value_as_string (group->symbol)))) { slv2_value_as_string (group->symbol)))) {
GST_INFO_OBJECT (lv2, "set audio channel positions on src pad %s", GST_INFO_OBJECT (lv2, "set audio channel positions on src pad %s",
slv2_value_as_string (group->symbol)); slv2_value_as_string (group->symbol));
pad_caps = GST_PAD_CAPS (pad);
s = gst_caps_get_structure (caps, 0); s = gst_caps_get_structure (caps, 0);
gst_audio_set_channel_positions (s, positions); gst_audio_set_channel_positions (s, positions);
gst_object_unref (pad); gst_object_unref (pad);

View file

@ -370,15 +370,20 @@ gst_modplug_src_event (GstPad * pad, GstEvent * event)
GstSeekType cur_type, stop_type; GstSeekType cur_type, stop_type;
gboolean flush; gboolean flush;
gint64 cur, stop; gint64 cur, stop;
/* FIXME timestamp is set but not used */
#if 0
guint64 timestamp; guint64 timestamp;
#endif
if (modplug->frequency == 0) { if (modplug->frequency == 0) {
GST_DEBUG_OBJECT (modplug, "no song loaded yet"); GST_DEBUG_OBJECT (modplug, "no song loaded yet");
break; break;
} }
#if 0
timestamp = gst_util_uint64_scale_int (modplug->offset, GST_SECOND, timestamp = gst_util_uint64_scale_int (modplug->offset, GST_SECOND,
modplug->frequency); modplug->frequency);
#endif
gst_event_parse_seek (event, &rate, &format, &flags, gst_event_parse_seek (event, &rate, &format, &flags,
&cur_type, &cur, &stop_type, &stop); &cur_type, &cur, &stop_type, &stop);

View file

@ -98,6 +98,7 @@ static gboolean gst_neonhttp_src_get_size (GstBaseSrc * bsrc, guint64 * size);
static gboolean gst_neonhttp_src_is_seekable (GstBaseSrc * bsrc); static gboolean gst_neonhttp_src_is_seekable (GstBaseSrc * bsrc);
static gboolean gst_neonhttp_src_do_seek (GstBaseSrc * bsrc, static gboolean gst_neonhttp_src_do_seek (GstBaseSrc * bsrc,
GstSegment * segment); GstSegment * segment);
static gboolean gst_neonhttp_src_query (GstBaseSrc * bsrc, GstQuery * query);
static gboolean gst_neonhttp_src_set_proxy (GstNeonhttpSrc * src, static gboolean gst_neonhttp_src_set_proxy (GstNeonhttpSrc * src,
const gchar * uri); const gchar * uri);
@ -268,6 +269,7 @@ gst_neonhttp_src_class_init (GstNeonhttpSrcClass * klass)
gstbasesrc_class->is_seekable = gstbasesrc_class->is_seekable =
GST_DEBUG_FUNCPTR (gst_neonhttp_src_is_seekable); GST_DEBUG_FUNCPTR (gst_neonhttp_src_is_seekable);
gstbasesrc_class->do_seek = GST_DEBUG_FUNCPTR (gst_neonhttp_src_do_seek); gstbasesrc_class->do_seek = GST_DEBUG_FUNCPTR (gst_neonhttp_src_do_seek);
gstbasesrc_class->query = GST_DEBUG_FUNCPTR (gst_neonhttp_src_query);
gstpushsrc_class->create = GST_DEBUG_FUNCPTR (gst_neonhttp_src_create); gstpushsrc_class->create = GST_DEBUG_FUNCPTR (gst_neonhttp_src_create);
@ -777,6 +779,28 @@ gst_neonhttp_src_do_seek (GstBaseSrc * bsrc, GstSegment * segment)
return FALSE; return FALSE;
} }
static gboolean
gst_neonhttp_src_query (GstBaseSrc * bsrc, GstQuery * query)
{
GstNeonhttpSrc *src = GST_NEONHTTP_SRC (bsrc);
gboolean ret;
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_URI:
gst_query_set_uri (query, src->location);
ret = TRUE;
break;
default:
ret = FALSE;
break;
}
if (!ret)
ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
return ret;
}
static gboolean static gboolean
gst_neonhttp_src_set_location (GstNeonhttpSrc * src, const gchar * uri) gst_neonhttp_src_set_location (GstNeonhttpSrc * src, const gchar * uri)
{ {

View file

@ -16,7 +16,12 @@ libgstopencv_la_SOURCES = gstopencv.c \
gstfacedetect.c \ gstfacedetect.c \
gstpyramidsegment.c \ gstpyramidsegment.c \
gsttemplatematch.c \ gsttemplatematch.c \
gsttextoverlay.c gsttextoverlay.c \
gstmotioncells.c \
motioncells_wrapper.cpp \
MotionCells.cpp
libgstopencv_la_CXXFLAGS = $(GST_CXXFLAGS) $(OPENCV_CFLAGS)
# flags used to compile this facedetect # flags used to compile this facedetect
# add other _CFLAGS and _LIBS as needed # add other _CFLAGS and _LIBS as needed
@ -46,4 +51,7 @@ noinst_HEADERS = gstopencvvideofilter.h gstopencvutils.h \
gstfacedetect.h \ gstfacedetect.h \
gstpyramidsegment.h \ gstpyramidsegment.h \
gsttemplatematch.h \ gsttemplatematch.h \
gsttextoverlay.h gsttextoverlay.h \
gstmotioncells.h \
motioncells_wrapper.h \
MotionCells.h

593
ext/opencv/MotionCells.cpp Normal file
View file

@ -0,0 +1,593 @@
/*
* GStreamer
* Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
* Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include <cstdlib>
#include <errno.h>
#include <math.h>
#include <gst/gst.h>
#include <arpa/inet.h>
#include "MotionCells.h"
uint64_t ntohl64 (uint64_t val);
uint64_t htonl64 (uint64_t val);
uint64_t
ntohl64 (uint64_t val)
{
uint64_t res64;
uint32_t low = (uint32_t) (val & 0x00000000FFFFFFFFLL);
uint32_t high = (uint32_t) ((val & 0xFFFFFFFF00000000LL) >> 32);
low = ntohl (low);
high = ntohl (high);
res64 = (uint64_t) high + (((uint64_t) low) << 32);
return res64;
}
uint64_t
htonl64 (uint64_t val)
{
uint64_t res64;
uint32_t low = (uint32_t) (val & 0x00000000FFFFFFFFLL);
uint32_t high = (uint32_t) ((val & 0xFFFFFFFF00000000LL) >> 32);
low = htonl (low);
high = htonl (high);
res64 = (uint64_t) high + (((uint64_t) low) << 32);
return res64;
}
MotionCells::MotionCells ()
{
m_framecnt = 0;
m_motioncells_idx_count = 0;
m_motioncellsidxcstr = NULL;
m_saveInDatafile = false;
mc_savefile = NULL;
m_pcurFrame = NULL;
m_pprevFrame = NULL;
transparencyimg = NULL;
m_pdifferenceImage = NULL;
m_pbwImage = NULL;
m_initdatafilefailed = new char[BUSMSGLEN];
m_savedatafilefailed = new char[BUSMSGLEN];
m_initerrorcode = 0;
m_saveerrorcode = 0;
m_alpha = 0.5;
m_beta = 0.5;
}
MotionCells::~MotionCells ()
{
if (mc_savefile) {
fclose (mc_savefile);
mc_savefile = NULL;
}
delete[]m_initdatafilefailed;
delete[]m_savedatafilefailed;
if (m_motioncellsidxcstr)
delete[]m_motioncellsidxcstr;
if (m_pcurFrame)
cvReleaseImage (&m_pcurFrame);
if (m_pprevFrame)
cvReleaseImage (&m_pprevFrame);
if (transparencyimg)
cvReleaseImage (&transparencyimg);
if (m_pdifferenceImage)
cvReleaseImage (&m_pdifferenceImage);
if (m_pbwImage)
cvReleaseImage (&m_pbwImage);
}
int
MotionCells::performDetectionMotionCells (IplImage * p_frame,
double p_sensitivity, double p_framerate, int p_gridx, int p_gridy,
gint64 timestamp_millisec, bool p_isVisible, bool p_useAlpha,
int motionmaskcoord_count, motionmaskcoordrect * motionmaskcoords,
int motionmaskcells_count, motioncellidx * motionmaskcellsidx,
cellscolor motioncellscolor, int motioncells_count,
motioncellidx * motioncellsidx, gint64 starttime, char *p_datafile,
bool p_changed_datafile, int p_thickness)
{
int sumframecnt = 0;
int ret = 0;
p_framerate >= 1 ? p_framerate <= 5 ? sumframecnt = 1
: p_framerate <= 10 ? sumframecnt = 2
: p_framerate <= 15 ? sumframecnt = 3
: p_framerate <= 20 ? sumframecnt = 4
: p_framerate <= 25 ? sumframecnt = 5 : sumframecnt = 0 : sumframecnt = 0;
m_framecnt++;
m_changed_datafile = p_changed_datafile;
if (m_framecnt >= sumframecnt) {
m_useAlpha = p_useAlpha;
m_gridx = p_gridx;
m_gridy = p_gridy;
if (m_changed_datafile) {
ret = initDataFile (p_datafile, starttime);
if (ret != 0)
return ret;
}
m_frameSize = cvGetSize (p_frame);
m_frameSize.width /= 2;
m_frameSize.height /= 2;
setMotionCells (m_frameSize.width, m_frameSize.height);
m_sensitivity = 1 - p_sensitivity;
m_isVisible = p_isVisible;
m_pcurFrame = cvCloneImage (p_frame);
IplImage *m_pcurgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
IplImage *m_pprevgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
IplImage *m_pgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
IplImage *m_pcurDown =
cvCreateImage (m_frameSize, m_pcurFrame->depth, m_pcurFrame->nChannels);
IplImage *m_pprevDown = cvCreateImage (m_frameSize, m_pprevFrame->depth,
m_pprevFrame->nChannels);
m_pbwImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1);
cvPyrDown (m_pprevFrame, m_pprevDown);
cvCvtColor (m_pprevDown, m_pprevgreyImage, CV_RGB2GRAY);
if (m_pprevFrame)
cvReleaseImage (&m_pprevFrame);
cvPyrDown (m_pcurFrame, m_pcurDown);
cvCvtColor (m_pcurDown, m_pcurgreyImage, CV_RGB2GRAY);
m_pdifferenceImage = cvCloneImage (m_pcurgreyImage);
//cvSmooth(m_pcurgreyImage, m_pcurgreyImage, CV_GAUSSIAN, 3, 0);//TODO camera noise reduce,something smoothing, and rethink runningavg weights
//Minus the current gray frame from the 8U moving average.
cvAbsDiff (m_pprevgreyImage, m_pcurgreyImage, m_pdifferenceImage);
//Convert the image to black and white.
cvAdaptiveThreshold (m_pdifferenceImage, m_pbwImage, 255,
CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY_INV, 7);
// Dilate and erode to get object blobs
cvDilate (m_pbwImage, m_pbwImage, NULL, 2);
cvErode (m_pbwImage, m_pbwImage, NULL, 2);
//mask-out the overlay on difference image
if (motionmaskcoord_count > 0)
performMotionMaskCoords (motionmaskcoords, motionmaskcoord_count);
if (motionmaskcells_count > 0)
performMotionMask (motionmaskcellsidx, motionmaskcells_count);
if (getIsNonZero (m_pbwImage)) { //detect Motion
GST_DEBUG ("DETECT MOTION \n");
if (m_MotionCells.size () > 0) //it contains previous motioncells what we used when frames dropped
m_MotionCells.clear ();
if (transparencyimg)
cvReleaseImage (&transparencyimg);
(motioncells_count > 0) ?
calculateMotionPercentInMotionCells (motioncellsidx,
motioncells_count)
: calculateMotionPercentInMotionCells (motionmaskcellsidx, 0);
transparencyimg = cvCreateImage (cvGetSize (p_frame), p_frame->depth, 3);
cvSetZero (transparencyimg);
if (m_motioncellsidxcstr)
delete[]m_motioncellsidxcstr;
m_motioncells_idx_count = m_MotionCells.size () * MSGLEN; //one motion cell idx: (lin idx : col idx,) it's 4 character except last motion cell idx
m_motioncellsidxcstr = new char[m_motioncells_idx_count];
char *tmpstr = new char[MSGLEN];
for (int i = 0; i < MSGLEN; i++)
tmpstr[i] = ' ';
for (unsigned int i = 0; i < m_MotionCells.size (); i++) {
CvPoint pt1, pt2;
pt1.x = m_MotionCells.at (i).cell_pt1.x * 2;
pt1.y = m_MotionCells.at (i).cell_pt1.y * 2;
pt2.x = m_MotionCells.at (i).cell_pt2.x * 2;
pt2.y = m_MotionCells.at (i).cell_pt2.y * 2;
if (m_useAlpha && m_isVisible) {
cvRectangle (transparencyimg,
pt1,
pt2,
CV_RGB (motioncellscolor.B_channel_value,
motioncellscolor.G_channel_value,
motioncellscolor.R_channel_value), CV_FILLED);
} else if (m_isVisible) {
cvRectangle (p_frame,
pt1,
pt2,
CV_RGB (motioncellscolor.B_channel_value,
motioncellscolor.G_channel_value,
motioncellscolor.R_channel_value), p_thickness);
}
if (i < m_MotionCells.size () - 1) {
snprintf (tmpstr, MSGLEN, "%d:%d,", m_MotionCells.at (i).lineidx,
m_MotionCells.at (i).colidx);
} else {
snprintf (tmpstr, MSGLEN, "%d:%d", m_MotionCells.at (i).lineidx,
m_MotionCells.at (i).colidx);
}
if (i == 0)
strncpy (m_motioncellsidxcstr, tmpstr, m_motioncells_idx_count);
else
strcat (m_motioncellsidxcstr, tmpstr);
}
if (m_MotionCells.size () == 0)
strncpy (m_motioncellsidxcstr, " ", m_motioncells_idx_count);
if (m_useAlpha && m_isVisible) {
if (m_MotionCells.size () > 0)
blendImages (p_frame, transparencyimg, m_alpha, m_beta);
}
delete[]tmpstr;
if (mc_savefile && m_saveInDatafile) {
ret = saveMotionCells (timestamp_millisec);
if (ret != 0)
return ret;
}
} else {
m_motioncells_idx_count = 0;
if (m_MotionCells.size () > 0)
m_MotionCells.clear ();
if (transparencyimg)
cvReleaseImage (&transparencyimg);
}
m_pprevFrame = cvCloneImage (m_pcurFrame);
m_framecnt = 0;
if (m_pcurFrame)
cvReleaseImage (&m_pcurFrame);
if (m_pdifferenceImage)
cvReleaseImage (&m_pdifferenceImage);
if (m_pcurgreyImage)
cvReleaseImage (&m_pcurgreyImage);
if (m_pprevgreyImage)
cvReleaseImage (&m_pprevgreyImage);
if (m_pgreyImage)
cvReleaseImage (&m_pgreyImage);
if (m_pbwImage)
cvReleaseImage (&m_pbwImage);
if (m_pprevDown)
cvReleaseImage (&m_pprevDown);
if (m_pcurDown)
cvReleaseImage (&m_pcurDown);
if (m_pCells) {
for (int i = 0; i < m_gridy; ++i) {
delete[]m_pCells[i];
}
delete[]m_pCells;
}
if (p_framerate <= 5) {
if (m_MotionCells.size () > 0)
m_MotionCells.clear ();
if (transparencyimg)
cvReleaseImage (&transparencyimg);
}
} else { //we do frame drop
m_motioncells_idx_count = 0;
ret = -2;
for (unsigned int i = 0; i < m_MotionCells.size (); i++) {
CvPoint pt1, pt2;
pt1.x = m_MotionCells.at (i).cell_pt1.x * 2;
pt1.y = m_MotionCells.at (i).cell_pt1.y * 2;
pt2.x = m_MotionCells.at (i).cell_pt2.x * 2;
pt2.y = m_MotionCells.at (i).cell_pt2.y * 2;
if (m_useAlpha && m_isVisible) {
cvRectangle (transparencyimg,
pt1,
pt2,
CV_RGB (motioncellscolor.B_channel_value,
motioncellscolor.G_channel_value,
motioncellscolor.R_channel_value), CV_FILLED);
} else if (m_isVisible) {
cvRectangle (p_frame,
pt1,
pt2,
CV_RGB (motioncellscolor.B_channel_value,
motioncellscolor.G_channel_value,
motioncellscolor.R_channel_value), p_thickness);
}
}
if (m_useAlpha && m_isVisible) {
if (m_MotionCells.size () > 0)
blendImages (p_frame, transparencyimg, m_alpha, m_beta);
}
}
return ret;
}
int
MotionCells::initDataFile (char *p_datafile, gint64 starttime) //p_date is increased with difference between current and previous buffer ts
{
MotionCellData mcd;
if (strncmp (p_datafile, " ", 1)) {
mc_savefile = fopen (p_datafile, "w");
if (mc_savefile == NULL) {
//fprintf(stderr, "%s %d:initDataFile:fopen:%d (%s)\n", __FILE__, __LINE__, errno,
//strerror(errno));
strncpy (m_initdatafilefailed, strerror (errno), BUSMSGLEN - 1);
m_initerrorcode = errno;
return 1;
} else {
m_saveInDatafile = true;
}
} else
mc_savefile = NULL;
bzero (&m_header, sizeof (MotionCellHeader));
m_header.headersize = htonl (MC_HEADER);
m_header.type = htonl (MC_TYPE);
m_header.version = htonl (MC_VERSION);
//it needs these bytes
m_header.itemsize =
htonl ((int) ceil (ceil (m_gridx * m_gridy / 8.0) / 4.0) * 4 +
sizeof (mcd.timestamp));
m_header.gridx = htonl (m_gridx);
m_header.gridy = htonl (m_gridy);
m_header.starttime = htonl64 (starttime);
snprintf (m_header.name, sizeof (m_header.name), "%s %dx%d", MC_VERSIONTEXT,
ntohl (m_header.gridx), ntohl (m_header.gridy));
m_changed_datafile = false;
return 0;
}
int
MotionCells::saveMotionCells (gint64 timestamp_millisec)
{
MotionCellData mc_data;
mc_data.timestamp = htonl (timestamp_millisec);
mc_data.data = NULL;
//There is no datafile
if (mc_savefile == NULL)
return 0;
if (ftello (mc_savefile) == 0) {
//cerr << "Writing out file header"<< m_header.headersize <<":" << sizeof(MotionCellHeader) << " itemsize:"
//<< m_header.itemsize << endl;
if (fwrite (&m_header, sizeof (MotionCellHeader), 1, mc_savefile) != 1) {
//fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno,
//strerror(errno));
strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
m_saveerrorcode = errno;
return -1;
}
}
mc_data.data =
(char *) calloc (1,
ntohl (m_header.itemsize) - sizeof (mc_data.timestamp));
if (mc_data.data == NULL) {
//fprintf(stderr, "%s %d:saveMotionCells:calloc:%d (%s)\n", __FILE__, __LINE__, errno,
//strerror(errno));
strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
m_saveerrorcode = errno;
return -1;
}
for (unsigned int i = 0; i < m_MotionCells.size (); i++) {
int bitnum =
m_MotionCells.at (i).lineidx * ntohl (m_header.gridx) +
m_MotionCells.at (i).colidx;
int bytenum = (int) floor (bitnum / 8.0);
int shift = bitnum - bytenum * 8;
mc_data.data[bytenum] = mc_data.data[bytenum] | (1 << shift);
//cerr << "Motion Detected " << "line:" << m_MotionCells.at(i).lineidx << " col:" << m_MotionCells.at(i).colidx;
//cerr << " bitnum " << bitnum << " bytenum " << bytenum << " shift " << shift << " value " << (int)mc_data.data[bytenum] << endl;
}
if (fwrite (&mc_data.timestamp, sizeof (mc_data.timestamp), 1,
mc_savefile) != 1) {
//fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno,
//strerror(errno));
strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
m_saveerrorcode = errno;
return -1;
}
if (fwrite (mc_data.data,
ntohl (m_header.itemsize) - sizeof (mc_data.timestamp), 1,
mc_savefile) != 1) {
//fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno,
//strerror(errno));
strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1);
m_saveerrorcode = errno;
return -1;
}
free (mc_data.data);
return 0;
}
double
MotionCells::calculateMotionPercentInCell (int p_row, int p_col,
double *p_cellarea, double *p_motionarea)
{
double cntpixelsnum = 0;
double cntmotionpixelnum = 0;
int ybegin = floor ((double) p_row * m_cellheight);
int yend = floor ((double) (p_row + 1) * m_cellheight);
int xbegin = floor ((double) (p_col) * m_cellwidth);
int xend = floor ((double) (p_col + 1) * m_cellwidth);
int cellw = xend - xbegin;
int cellh = yend - ybegin;
int cellarea = cellw * cellh;
*p_cellarea = cellarea;
int thresholdmotionpixelnum = floor ((double) cellarea * m_sensitivity);
for (int i = ybegin; i < yend; i++) {
for (int j = xbegin; j < xend; j++) {
cntpixelsnum++;
if ((((uchar *) (m_pbwImage->imageData + m_pbwImage->widthStep * i))[j]) >
0) {
cntmotionpixelnum++;
if (cntmotionpixelnum >= thresholdmotionpixelnum) { //we dont needs calculate anymore
*p_motionarea = cntmotionpixelnum;
return (cntmotionpixelnum / cntpixelsnum);
}
}
int remainingpixelsnum = cellarea - cntpixelsnum;
if ((cntmotionpixelnum + remainingpixelsnum) < thresholdmotionpixelnum) { //moving pixels number will be less than threshold
*p_motionarea = 0;
return 0;
}
}
}
return (cntmotionpixelnum / cntpixelsnum);
}
void
MotionCells::calculateMotionPercentInMotionCells (motioncellidx *
p_motioncellsidx, int p_motioncells_count)
{
if (p_motioncells_count == 0) {
for (int i = 0; i < m_gridy; i++) {
for (int j = 0; j < m_gridx; j++) {
m_pCells[i][j].MotionPercent = calculateMotionPercentInCell (i, j,
&m_pCells[i][j].CellArea, &m_pCells[i][j].MotionArea);
m_pCells[i][j].hasMotion =
m_sensitivity < m_pCells[i][j].MotionPercent ? true : false;
if (m_pCells[i][j].hasMotion) {
MotionCellsIdx mci;
mci.lineidx = i;
mci.colidx = j;
mci.cell_pt1.x = floor ((double) j * m_cellwidth);
mci.cell_pt1.y = floor ((double) i * m_cellheight);
mci.cell_pt2.x = floor ((double) (j + 1) * m_cellwidth);
mci.cell_pt2.y = floor ((double) (i + 1) * m_cellheight);
int w = mci.cell_pt2.x - mci.cell_pt1.x;
int h = mci.cell_pt2.y - mci.cell_pt1.y;
mci.motioncell = cvRect (mci.cell_pt1.x, mci.cell_pt1.y, w, h);
m_MotionCells.push_back (mci);
}
}
}
} else {
for (int k = 0; k < p_motioncells_count; ++k) {
int i = p_motioncellsidx[k].lineidx;
int j = p_motioncellsidx[k].columnidx;
m_pCells[i][j].MotionPercent =
calculateMotionPercentInCell (i, j,
&m_pCells[i][j].CellArea, &m_pCells[i][j].MotionArea);
m_pCells[i][j].hasMotion =
m_pCells[i][j].MotionPercent > m_sensitivity ? true : false;
if (m_pCells[i][j].hasMotion) {
MotionCellsIdx mci;
mci.lineidx = p_motioncellsidx[k].lineidx;
mci.colidx = p_motioncellsidx[k].columnidx;
mci.cell_pt1.x = floor ((double) j * m_cellwidth);
mci.cell_pt1.y = floor ((double) i * m_cellheight);
mci.cell_pt2.x = floor ((double) (j + 1) * m_cellwidth);
mci.cell_pt2.y = floor ((double) (i + 1) * m_cellheight);
int w = mci.cell_pt2.x - mci.cell_pt1.x;
int h = mci.cell_pt2.y - mci.cell_pt1.y;
mci.motioncell = cvRect (mci.cell_pt1.x, mci.cell_pt1.y, w, h);
m_MotionCells.push_back (mci);
}
}
}
}
void
MotionCells::performMotionMaskCoords (motionmaskcoordrect * p_motionmaskcoords,
int p_motionmaskcoords_count)
{
CvPoint upperleft;
upperleft.x = 0;
upperleft.y = 0;
CvPoint lowerright;
lowerright.x = 0;
lowerright.y = 0;
for (int i = 0; i < p_motionmaskcoords_count; i++) {
upperleft.x = p_motionmaskcoords[i].upper_left_x;
upperleft.y = p_motionmaskcoords[i].upper_left_y;
lowerright.x = p_motionmaskcoords[i].lower_right_x;
lowerright.y = p_motionmaskcoords[i].lower_right_y;
cvRectangle (m_pbwImage, upperleft, lowerright, CV_RGB (0, 0, 0),
CV_FILLED);
}
}
void
MotionCells::performMotionMask (motioncellidx * p_motionmaskcellsidx,
int p_motionmaskcells_count)
{
for (int k = 0; k < p_motionmaskcells_count; k++) {
int beginy = p_motionmaskcellsidx[k].lineidx * m_cellheight;
int beginx = p_motionmaskcellsidx[k].columnidx * m_cellwidth;
int endx =
(double) p_motionmaskcellsidx[k].columnidx * m_cellwidth + m_cellwidth;
int endy =
(double) p_motionmaskcellsidx[k].lineidx * m_cellheight + m_cellheight;
for (int i = beginy; i < endy; i++)
for (int j = beginx; j < endx; j++) {
((uchar *) (m_pbwImage->imageData + m_pbwImage->widthStep * i))[j] = 0;
}
}
}
///BGR if we use only OpenCV
//RGB if we use gst+OpenCV
void
MotionCells::blendImages (IplImage * p_actFrame, IplImage * p_cellsFrame,
float p_alpha, float p_beta)
{
int height = p_actFrame->height;
int width = p_actFrame->width;
int step = p_actFrame->widthStep / sizeof (uchar);
int channels = p_actFrame->nChannels;
int cellstep = p_cellsFrame->widthStep / sizeof (uchar);
uchar *curImageData = (uchar *) p_actFrame->imageData;
uchar *cellImageData = (uchar *) p_cellsFrame->imageData;
for (int i = 0; i < height; i++)
for (int j = 0; j < width; j++)
for (int k = 0; k < channels; k++)
if (cellImageData[i * cellstep + j * channels + k] > 0) {
curImageData[i * step + j * channels + k] =
round ((double) curImageData[i * step + j * channels +
k] * p_alpha + ((double) cellImageData[i * cellstep +
j * channels + k] * p_beta));
}
}

259
ext/opencv/MotionCells.h Normal file
View file

@ -0,0 +1,259 @@
/*
* GStreamer
* Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
* Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef MOTIONCELLS_H_
#define MOTIONCELLS_H_
#include <cv.h> // includes OpenCV definitions
#include <highgui.h> // includes highGUI definitions
#include <iostream>
#include <fstream>
#include <vector>
#include <cstdio>
#include <cmath>
#include <glib.h>
//MotionCells defines
#define MC_HEADER 64
#define MC_TYPE 1
#define MC_VERSION 1
#define MC_VERSIONTEXT "MotionCells-1"
#define MSGLEN 6
#define BUSMSGLEN 20
using namespace std;
struct MotionCellHeader{
gint32 headersize;
gint32 type;
gint32 version;
gint32 itemsize;
gint32 gridx;
gint32 gridy;
gint64 starttime;
char name[MC_HEADER - 32];
};
struct MotionCellData{
gint32 timestamp;
char *data;
};
typedef struct {
int upper_left_x;
int upper_left_y;
int lower_right_x;
int lower_right_y;
} motionmaskcoordrect;
typedef struct {
int R_channel_value;
int G_channel_value;
int B_channel_value;
} cellscolor;
typedef struct {
int lineidx;
int columnidx;
} motioncellidx;
struct Cell
{
double MotionArea;
double CellArea;
double MotionPercent;
bool hasMotion;
};
struct MotionCellsIdx
{
CvRect motioncell;
//Points for the edges of the rectangle.
CvPoint cell_pt1;
CvPoint cell_pt2;
int lineidx;
int colidx;
};
struct OverlayRegions
{
CvPoint upperleft;
CvPoint lowerright;
};
class MotionCells
{
public:
MotionCells ();
virtual ~ MotionCells ();
int performDetectionMotionCells (IplImage * p_frame, double p_sensitivity,
double p_framerate, int p_gridx, int p_gridy, gint64 timestamp_millisec,
bool p_isVisble, bool p_useAlpha, int motionmaskcoord_count,
motionmaskcoordrect * motionmaskcoords, int motionmaskcells_count,
motioncellidx * motionmaskcellsidx, cellscolor motioncellscolor,
int motioncells_count, motioncellidx * motioncellsidx, gint64 starttime,
char *datafile, bool p_changed_datafile, int p_thickness);
void setPrevFrame (IplImage * p_prevframe)
{
m_pprevFrame = cvCloneImage (p_prevframe);
}
char *getMotionCellsIdx ()
{
return m_motioncellsidxcstr;
}
int getMotionCellsIdxCount ()
{
return m_motioncells_idx_count;
}
bool getChangedDataFile ()
{
return m_changed_datafile;
}
char *getDatafileInitFailed ()
{
return m_initdatafilefailed;
}
char *getDatafileSaveFailed ()
{
return m_savedatafilefailed;
}
int getInitErrorCode ()
{
return m_initerrorcode;
}
int getSaveErrorCode ()
{
return m_saveerrorcode;
}
void freeDataFile ()
{
if (mc_savefile) {
fclose (mc_savefile);
mc_savefile = NULL;
m_saveInDatafile = false;
}
}
private:
double calculateMotionPercentInCell (int p_row, int p_col, double *p_cellarea,
double *p_motionarea);
void performMotionMaskCoords (motionmaskcoordrect * p_motionmaskcoords,
int p_motionmaskcoords_count);
void performMotionMask (motioncellidx * p_motionmaskcellsidx,
int p_motionmaskcells_count);
void calculateMotionPercentInMotionCells (motioncellidx *
p_motionmaskcellsidx, int p_motionmaskcells_count = 0);
int saveMotionCells (gint64 timestamp_millisec);
int initDataFile (char *p_datafile, gint64 starttime);
void blendImages (IplImage * p_actFrame, IplImage * p_cellsFrame,
float p_alpha, float p_beta);
void setData (IplImage * img, int lin, int col, uchar valor)
{
((uchar *) (img->imageData + img->widthStep * lin))[col] = valor;
}
uchar getData (IplImage * img, int lin, int col)
{
return ((uchar *) (img->imageData + img->widthStep * lin))[col];
}
bool getIsNonZero (IplImage * img)
{
for (int lin = 0; lin < img->height; lin++)
for (int col = 0; col < img->width; col++) {
if ((((uchar *) (img->imageData + img->widthStep * lin))[col]) > 0)
return true;
}
return false;
}
void setMotionCells (int p_frameWidth, int p_frameHeight)
{
m_cellwidth = (double) p_frameWidth / (double) m_gridx;
m_cellheight = (double) p_frameHeight / (double) m_gridy;
m_pCells = new Cell *[m_gridy];
for (int i = 0; i < m_gridy; i++)
m_pCells[i] = new Cell[m_gridx];
//init cells
for (int i = 0; i < m_gridy; i++)
for (int j = 0; j < m_gridx; j++) {
m_pCells[i][j].MotionArea = 0;
m_pCells[i][j].CellArea = 0;
m_pCells[i][j].MotionPercent = 0;
m_pCells[i][j].hasMotion = false;
}
}
IplImage *m_pcurFrame, *m_pprevFrame, *m_pdifferenceImage,
*m_pbwImage,*transparencyimg;
CvSize m_frameSize;
bool m_isVisible, m_changed_datafile, m_useAlpha, m_saveInDatafile;
Cell **m_pCells;
vector < MotionCellsIdx > m_MotionCells;
vector < OverlayRegions > m_OverlayRegions;
int m_gridx, m_gridy;
double m_cellwidth, m_cellheight;
double m_alpha, m_beta;
double m_thresholdBoundingboxArea, m_cellArea, m_sensitivity;
int m_framecnt, m_motioncells_idx_count, m_initerrorcode, m_saveerrorcode;
char *m_motioncellsidxcstr, *m_initdatafilefailed, *m_savedatafilefailed;
FILE *mc_savefile;
MotionCellHeader m_header;
};
#endif /* MOTIONCELLS_H_ */

1109
ext/opencv/gstmotioncells.c Normal file

File diff suppressed because it is too large Load diff

124
ext/opencv/gstmotioncells.h Normal file
View file

@ -0,0 +1,124 @@
/*
* GStreamer
* Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
* Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_MOTIONCELLS_H__
#define __GST_MOTIONCELLS_H__
#include <gst/gst.h>
#include <cv.h>
G_BEGIN_DECLS
/* #defines don't like whitespacey bits */
#define GST_TYPE_MOTIONCELLS \
(gst_motion_cells_get_type())
#define gst_motion_cells(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MOTIONCELLS,GstMotioncells))
#define gst_motion_cells_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MOTIONCELLS,GstMotioncellsClass))
#define GST_IS_MOTIONCELLS(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MOTIONCELLS))
#define GST_IS_MOTIONCELLS_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MOTIONCELLS))
typedef struct _GstMotioncells GstMotioncells;
typedef struct _GstMotioncellsClass GstMotioncellsClass;
typedef struct {
int upper_left_x;
int upper_left_y;
int lower_right_x;
int lower_right_y;
} motionmaskcoordrect;
typedef struct {
int R_channel_value;
int G_channel_value;
int B_channel_value;
} cellscolor;
typedef struct {
int lineidx;
int columnidx;
} motioncellidx;
struct _GstMotioncells
{
GstElement element;
GstPad *sinkpad, *srcpad;
GstState state;
gboolean display, calculate_motion, firstgridx, firstgridy, changed_gridx,
changed_gridy, changed_startime;
gboolean previous_motion, changed_datafile, postallmotion, usealpha,
firstdatafile, firstframe;
gboolean sent_init_error_msg, sent_save_error_msg;
gchar *prev_datafile, *cur_datafile, *basename_datafile, *datafile_extension;
gint prevgridx, gridx, prevgridy, gridy, id;
gdouble sensitivity, threshold;
IplImage *cvImage;
motionmaskcoordrect *motionmaskcoords;
cellscolor *motioncellscolor;
motioncellidx *motioncellsidx, *motionmaskcellsidx;
int motionmaskcoord_count, motioncells_count, motionmaskcells_count;
int gap, thickness, datafileidx, postnomotion, minimum_motion_frames;
guint64 motion_begin_timestamp, last_motion_timestamp, motion_timestamp,
last_nomotion_notified, prev_buff_timestamp, cur_buff_timestamp;
gint64 diff_timestamp, starttime;
guint64 consecutive_motion;
gint width, height;
//time stuff
struct timeval tv;
GMutex *propset_mutex;
double framerate;
};
struct _GstMotioncellsClass
{
GstElementClass parent_class;
};
GType gst_motion_cells_get_type (void);
gboolean gst_motioncells_plugin_init (GstPlugin * plugin);
G_END_DECLS
#endif /* __GST_MOTION_CELLS_H__ */

View file

@ -32,6 +32,7 @@
#include "gstedgedetect.h" #include "gstedgedetect.h"
#include "gstfaceblur.h" #include "gstfaceblur.h"
#include "gstfacedetect.h" #include "gstfacedetect.h"
#include "gstmotioncells.h"
#include "gstpyramidsegment.h" #include "gstpyramidsegment.h"
#include "gsttemplatematch.h" #include "gsttemplatematch.h"
#include "gsttextoverlay.h" #include "gsttextoverlay.h"
@ -66,6 +67,9 @@ plugin_init (GstPlugin * plugin)
if (!gst_facedetect_plugin_init (plugin)) if (!gst_facedetect_plugin_init (plugin))
return FALSE; return FALSE;
if (!gst_motioncells_plugin_init (plugin))
return FALSE;
if (!gst_pyramidsegment_plugin_init (plugin)) if (!gst_pyramidsegment_plugin_init (plugin))
return FALSE; return FALSE;

View file

@ -0,0 +1,213 @@
/*
* GStreamer
* Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
* Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include <stdio.h>
#include <limits.h>
#include "motioncells_wrapper.h"
extern int instanceCounter;
extern bool element_id_was_max;
MotionCells *mc;
char p_str[] = "idx failed";
void
motion_cells_init ()
{
mc = new MotionCells ();
instanceOfMC tmpmc;
tmpmc.id = instanceCounter;
tmpmc.mc = mc;
motioncellsvector.push_back (tmpmc);
if ((instanceCounter < INT_MAX) && !element_id_was_max) {
instanceCounter++;
element_id_was_max = false;
} else {
element_id_was_max = true;
instanceCounter = motioncellsfreeids.back ();
motioncellsfreeids.pop_back ();
}
}
int
perform_detection_motion_cells (IplImage * p_image, double p_sensitivity,
double p_framerate, int p_gridx, int p_gridy, long int p_timestamp_millisec,
bool p_isVisible, bool p_useAlpha, int motionmaskcoord_count,
motionmaskcoordrect * motionmaskcoords, int motionmaskcells_count,
motioncellidx * motionmaskcellsidx, cellscolor motioncellscolor,
int motioncells_count, motioncellidx * motioncellsidx, gint64 starttime,
char *p_datafile, bool p_changed_datafile, int p_thickness, int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
return motioncellsvector.at (idx).mc->performDetectionMotionCells (p_image,
p_sensitivity, p_framerate, p_gridx, p_gridy, p_timestamp_millisec,
p_isVisible, p_useAlpha, motionmaskcoord_count, motionmaskcoords,
motionmaskcells_count, motionmaskcellsidx, motioncellscolor,
motioncells_count, motioncellsidx, starttime, p_datafile,
p_changed_datafile, p_thickness);
}
void
setPrevFrame (IplImage * p_prevFrame, int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
motioncellsvector.at (idx).mc->setPrevFrame (p_prevFrame);
}
char *
getMotionCellsIdx (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
return motioncellsvector.at (idx).mc->getMotionCellsIdx ();
else {
return p_str;
}
}
int
getMotionCellsIdxCnt (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
return motioncellsvector.at (idx).mc->getMotionCellsIdxCount ();
else
return 0;
}
bool
getChangedDataFile (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
return motioncellsvector.at (idx).mc->getChangedDataFile ();
else
return false;
}
int
searchIdx (int p_id)
{
for (unsigned int i = 0; i < motioncellsvector.size (); i++) {
instanceOfMC tmpmc;
tmpmc = motioncellsvector.at (i);
if (tmpmc.id == p_id) {
return i;
}
}
return -1;
}
char *
getInitDataFileFailed (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
return motioncellsvector.at (idx).mc->getDatafileInitFailed ();
else {
return p_str;
}
}
char *
getSaveDataFileFailed (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
return motioncellsvector.at (idx).mc->getDatafileSaveFailed ();
else {
return p_str;
}
}
int
getInitErrorCode (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
return motioncellsvector.at (idx).mc->getInitErrorCode ();
else
return -1;
}
int
getSaveErrorCode (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
return motioncellsvector.at (idx).mc->getSaveErrorCode ();
else
return -1;
}
void
motion_cells_free (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1) {
delete motioncellsvector.at (idx).mc;
motioncellsvector.erase (motioncellsvector.begin () + idx);
motioncellsfreeids.push_back (p_id);
}
}
void
motion_cells_free_resources (int p_id)
{
int idx = 0;
idx = searchIdx (p_id);
if (idx > -1)
motioncellsvector.at (idx).mc->freeDataFile ();
}

View file

@ -0,0 +1,89 @@
/*
* GStreamer
* Copyright (C) 2011 Robert Jobbagy <jobbagy.robert@gmail.com>
* Copyright (C) 2011 Nicola Murino <nicola.murino@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a
* copy of this software and associated documentation files (the "Software"),
* to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense,
* and/or sell copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*
* Alternatively, the contents of this file may be used under the
* GNU Lesser General Public License Version 2.1 (the "LGPL"), in
* which case the following provisions apply instead of the ones
* mentioned above:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef MOTIONCELLS_WRAPPER_H
#define MOTIONCELLS_WRAPPER_H
#include <stdbool.h>
#ifdef __cplusplus
#include "MotionCells.h"
struct instanceOfMC
{
int id;
MotionCells *mc;
};
vector < instanceOfMC > motioncellsvector;
vector < int >motioncellsfreeids;
int searchIdx (int p_id);
extern "C"
{
#endif
void motion_cells_init ();
int perform_detection_motion_cells (IplImage * p_image, double p_sensitivity,
double p_framerate, int p_gridx, int p_gridy,
long int p_timestamp_millisec, bool p_isVisible, bool p_useAlpha,
int motionmaskcoord_count, motionmaskcoordrect * motionmaskcoords,
int motionmaskcells_count, motioncellidx * motionmaskcellsidx,
cellscolor motioncellscolor, int motioncells_count,
motioncellidx * motioncellsidx, gint64 starttime, char *datafile,
bool p_changed_datafile, int p_thickness, int p_id);
void setPrevFrame (IplImage * p_prevFrame, int p_id);
void motion_cells_free (int p_id);
void motion_cells_free_resources (int p_id);
char *getMotionCellsIdx (int p_id);
int getMotionCellsIdxCnt (int p_id);
bool getChangedDataFile (int p_id);
char *getInitDataFileFailed (int p_id);
char *getSaveDataFileFailed (int p_id);
int getInitErrorCode (int p_id);
int getSaveErrorCode (int p_id);
#ifdef __cplusplus
}
#endif
#endif /* MOTIONCELLS_WRAPPER_H */

16
ext/opus/Makefile.am Normal file
View file

@ -0,0 +1,16 @@
plugin_LTLIBRARIES = libgstopus.la
libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c
libgstopus_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_CFLAGS) \
$(OPUS_CFLAGS)
libgstopus_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) -lgsttag-$(GST_MAJORMINOR) \
$(GST_BASE_LIBS) \
$(GST_LIBS) \
$(OPUS_LIBS)
libgstopus_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM)
libgstopus_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstopusenc.h gstopusdec.h

50
ext/opus/gstopus.c Normal file
View file

@ -0,0 +1,50 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include "gstopusdec.h"
#include "gstopusenc.h"
#include <gst/tag/tag.h>
static gboolean
plugin_init (GstPlugin * plugin)
{
if (!gst_element_register (plugin, "opusenc", GST_RANK_NONE,
GST_TYPE_OPUS_ENC))
return FALSE;
if (!gst_element_register (plugin, "opusdec", GST_RANK_PRIMARY,
GST_TYPE_OPUS_DEC))
return FALSE;
gst_tag_register_musicbrainz_tags ();
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"opus",
"OPUS plugin library",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

865
ext/opus/gstopusdec.c Normal file
View file

@ -0,0 +1,865 @@
/* GStreamer
* Copyright (C) 2004 Wim Taymans <wim@fluendo.com>
* Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
* Copyright (C) 2008 Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*
* Based on the speexdec element.
*/
/**
* SECTION:element-opusdec
* @see_also: opusenc, oggdemux
*
* This element decodes a OPUS stream to raw integer audio.
*
* <refsect2>
* <title>Example pipelines</title>
* |[
* gst-launch -v filesrc location=opus.ogg ! oggdemux ! opusdec ! audioconvert ! audioresample ! alsasink
* ]| Decode an Ogg/Opus file. To create an Ogg/Opus file refer to the documentation of opusenc.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
# include "config.h"
#endif
#include "gstopusdec.h"
#include <string.h>
#include <gst/tag/tag.h>
GST_DEBUG_CATEGORY_STATIC (opusdec_debug);
#define GST_CAT_DEFAULT opusdec_debug
#define DEC_MAX_FRAME_SIZE 2000
static GstStaticPadTemplate opus_dec_src_factory =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw-int, "
"rate = (int) [ 32000, 64000 ], "
"channels = (int) [ 1, 2 ], "
"endianness = (int) BYTE_ORDER, "
"signed = (boolean) true, " "width = (int) 16, " "depth = (int) 16")
);
static GstStaticPadTemplate opus_dec_sink_factory =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-opus")
);
GST_BOILERPLATE (GstOpusDec, gst_opus_dec, GstElement, GST_TYPE_ELEMENT);
static gboolean opus_dec_sink_event (GstPad * pad, GstEvent * event);
static GstFlowReturn opus_dec_chain (GstPad * pad, GstBuffer * buf);
static gboolean opus_dec_sink_setcaps (GstPad * pad, GstCaps * caps);
static GstStateChangeReturn opus_dec_change_state (GstElement * element,
GstStateChange transition);
static gboolean opus_dec_src_event (GstPad * pad, GstEvent * event);
static gboolean opus_dec_src_query (GstPad * pad, GstQuery * query);
static gboolean opus_dec_sink_query (GstPad * pad, GstQuery * query);
static const GstQueryType *opus_get_src_query_types (GstPad * pad);
static const GstQueryType *opus_get_sink_query_types (GstPad * pad);
static gboolean opus_dec_convert (GstPad * pad,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value);
static GstFlowReturn opus_dec_chain_parse_data (GstOpusDec * dec,
GstBuffer * buf, GstClockTime timestamp, GstClockTime duration);
static GstFlowReturn opus_dec_chain_parse_header (GstOpusDec * dec,
GstBuffer * buf);
#if 0
static GstFlowReturn opus_dec_chain_parse_comments (GstOpusDec * dec,
GstBuffer * buf);
#endif
static void
gst_opus_dec_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&opus_dec_src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&opus_dec_sink_factory));
gst_element_class_set_details_simple (element_class, "Opus audio decoder",
"Codec/Decoder/Audio",
"decode opus streams to audio",
"Sebastian Dröge <sebastian.droege@collabora.co.uk>");
}
static void
gst_opus_dec_class_init (GstOpusDecClass * klass)
{
GstElementClass *gstelement_class;
gstelement_class = (GstElementClass *) klass;
gstelement_class->change_state = GST_DEBUG_FUNCPTR (opus_dec_change_state);
GST_DEBUG_CATEGORY_INIT (opusdec_debug, "opusdec", 0,
"opus decoding element");
}
static void
gst_opus_dec_reset (GstOpusDec * dec)
{
gst_segment_init (&dec->segment, GST_FORMAT_UNDEFINED);
dec->granulepos = -1;
dec->packetno = 0;
dec->frame_size = 0;
dec->frame_samples = 960;
dec->frame_duration = 0;
if (dec->state) {
opus_decoder_destroy (dec->state);
dec->state = NULL;
}
#if 0
if (dec->mode) {
opus_mode_destroy (dec->mode);
dec->mode = NULL;
}
#endif
gst_buffer_replace (&dec->streamheader, NULL);
gst_buffer_replace (&dec->vorbiscomment, NULL);
g_list_foreach (dec->extra_headers, (GFunc) gst_mini_object_unref, NULL);
g_list_free (dec->extra_headers);
dec->extra_headers = NULL;
#if 0
memset (&dec->header, 0, sizeof (dec->header));
#endif
}
static void
gst_opus_dec_init (GstOpusDec * dec, GstOpusDecClass * g_class)
{
dec->sinkpad =
gst_pad_new_from_static_template (&opus_dec_sink_factory, "sink");
gst_pad_set_chain_function (dec->sinkpad, GST_DEBUG_FUNCPTR (opus_dec_chain));
gst_pad_set_event_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (opus_dec_sink_event));
gst_pad_set_query_type_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (opus_get_sink_query_types));
gst_pad_set_query_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (opus_dec_sink_query));
gst_pad_set_setcaps_function (dec->sinkpad,
GST_DEBUG_FUNCPTR (opus_dec_sink_setcaps));
gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad);
dec->srcpad = gst_pad_new_from_static_template (&opus_dec_src_factory, "src");
gst_pad_use_fixed_caps (dec->srcpad);
gst_pad_set_event_function (dec->srcpad,
GST_DEBUG_FUNCPTR (opus_dec_src_event));
gst_pad_set_query_type_function (dec->srcpad,
GST_DEBUG_FUNCPTR (opus_get_src_query_types));
gst_pad_set_query_function (dec->srcpad,
GST_DEBUG_FUNCPTR (opus_dec_src_query));
gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
dec->sample_rate = 48000;
dec->n_channels = 2;
gst_opus_dec_reset (dec);
}
static gboolean
opus_dec_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstOpusDec *dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
gboolean ret = TRUE;
GstStructure *s;
const GValue *streamheader;
s = gst_caps_get_structure (caps, 0);
if ((streamheader = gst_structure_get_value (s, "streamheader")) &&
G_VALUE_HOLDS (streamheader, GST_TYPE_ARRAY) &&
gst_value_array_get_size (streamheader) >= 2) {
const GValue *header;
GstBuffer *buf;
GstFlowReturn res = GST_FLOW_OK;
header = gst_value_array_get_value (streamheader, 0);
if (header && G_VALUE_HOLDS (header, GST_TYPE_BUFFER)) {
buf = gst_value_get_buffer (header);
res = opus_dec_chain_parse_header (dec, buf);
if (res != GST_FLOW_OK)
goto done;
gst_buffer_replace (&dec->streamheader, buf);
}
#if 0
vorbiscomment = gst_value_array_get_value (streamheader, 1);
if (vorbiscomment && G_VALUE_HOLDS (vorbiscomment, GST_TYPE_BUFFER)) {
buf = gst_value_get_buffer (vorbiscomment);
res = opus_dec_chain_parse_comments (dec, buf);
if (res != GST_FLOW_OK)
goto done;
gst_buffer_replace (&dec->vorbiscomment, buf);
}
#endif
g_list_foreach (dec->extra_headers, (GFunc) gst_mini_object_unref, NULL);
g_list_free (dec->extra_headers);
dec->extra_headers = NULL;
if (gst_value_array_get_size (streamheader) > 2) {
gint i, n;
n = gst_value_array_get_size (streamheader);
for (i = 2; i < n; i++) {
header = gst_value_array_get_value (streamheader, i);
buf = gst_value_get_buffer (header);
dec->extra_headers =
g_list_prepend (dec->extra_headers, gst_buffer_ref (buf));
}
}
}
done:
gst_object_unref (dec);
return ret;
}
static gboolean
opus_dec_convert (GstPad * pad,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value)
{
gboolean res = TRUE;
GstOpusDec *dec;
guint64 scale = 1;
dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
if (dec->packetno < 1) {
res = FALSE;
goto cleanup;
}
if (src_format == *dest_format) {
*dest_value = src_value;
res = TRUE;
goto cleanup;
}
if (pad == dec->sinkpad &&
(src_format == GST_FORMAT_BYTES || *dest_format == GST_FORMAT_BYTES)) {
res = FALSE;
goto cleanup;
}
switch (src_format) {
case GST_FORMAT_TIME:
switch (*dest_format) {
case GST_FORMAT_BYTES:
scale = sizeof (gint16) * dec->n_channels;
case GST_FORMAT_DEFAULT:
*dest_value =
gst_util_uint64_scale_int (scale * src_value,
dec->sample_rate, GST_SECOND);
break;
default:
res = FALSE;
break;
}
break;
case GST_FORMAT_DEFAULT:
switch (*dest_format) {
case GST_FORMAT_BYTES:
*dest_value = src_value * sizeof (gint16) * dec->n_channels;
break;
case GST_FORMAT_TIME:
*dest_value =
gst_util_uint64_scale_int (src_value, GST_SECOND,
dec->sample_rate);
break;
default:
res = FALSE;
break;
}
break;
case GST_FORMAT_BYTES:
switch (*dest_format) {
case GST_FORMAT_DEFAULT:
*dest_value = src_value / (sizeof (gint16) * dec->n_channels);
break;
case GST_FORMAT_TIME:
*dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND,
dec->sample_rate * sizeof (gint16) * dec->n_channels);
break;
default:
res = FALSE;
break;
}
break;
default:
res = FALSE;
break;
}
cleanup:
gst_object_unref (dec);
return res;
}
static const GstQueryType *
opus_get_sink_query_types (GstPad * pad)
{
static const GstQueryType opus_dec_sink_query_types[] = {
GST_QUERY_CONVERT,
0
};
return opus_dec_sink_query_types;
}
static gboolean
opus_dec_sink_query (GstPad * pad, GstQuery * query)
{
GstOpusDec *dec;
gboolean res;
dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONVERT:
{
GstFormat src_fmt, dest_fmt;
gint64 src_val, dest_val;
gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
res = opus_dec_convert (pad, src_fmt, src_val, &dest_fmt, &dest_val);
if (res) {
gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
}
break;
}
default:
res = gst_pad_query_default (pad, query);
break;
}
gst_object_unref (dec);
return res;
}
static const GstQueryType *
opus_get_src_query_types (GstPad * pad)
{
static const GstQueryType opus_dec_src_query_types[] = {
GST_QUERY_POSITION,
GST_QUERY_DURATION,
0
};
return opus_dec_src_query_types;
}
static gboolean
opus_dec_src_query (GstPad * pad, GstQuery * query)
{
GstOpusDec *dec;
gboolean res = FALSE;
dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_POSITION:{
GstSegment segment;
GstFormat format;
gint64 cur;
gst_query_parse_position (query, &format, NULL);
GST_PAD_STREAM_LOCK (dec->sinkpad);
segment = dec->segment;
GST_PAD_STREAM_UNLOCK (dec->sinkpad);
if (segment.format != GST_FORMAT_TIME) {
GST_DEBUG_OBJECT (dec, "segment not initialised yet");
break;
}
if ((res = opus_dec_convert (dec->srcpad, GST_FORMAT_TIME,
segment.last_stop, &format, &cur))) {
gst_query_set_position (query, format, cur);
}
break;
}
case GST_QUERY_DURATION:{
GstFormat format = GST_FORMAT_TIME;
gint64 dur;
/* get duration from demuxer */
if (!gst_pad_query_peer_duration (dec->sinkpad, &format, &dur))
break;
gst_query_parse_duration (query, &format, NULL);
/* and convert it into the requested format */
if ((res = opus_dec_convert (dec->srcpad, GST_FORMAT_TIME,
dur, &format, &dur))) {
gst_query_set_duration (query, format, dur);
}
break;
}
default:
res = gst_pad_query_default (pad, query);
break;
}
gst_object_unref (dec);
return res;
}
static gboolean
opus_dec_src_event (GstPad * pad, GstEvent * event)
{
gboolean res = FALSE;
GstOpusDec *dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:{
GstFormat format, tformat;
gdouble rate;
GstEvent *real_seek;
GstSeekFlags flags;
GstSeekType cur_type, stop_type;
gint64 cur, stop;
gint64 tcur, tstop;
gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur,
&stop_type, &stop);
/* we have to ask our peer to seek to time here as we know
* nothing about how to generate a granulepos from the src
* formats or anything.
*
* First bring the requested format to time
*/
tformat = GST_FORMAT_TIME;
if (!(res = opus_dec_convert (pad, format, cur, &tformat, &tcur)))
break;
if (!(res = opus_dec_convert (pad, format, stop, &tformat, &tstop)))
break;
/* then seek with time on the peer */
real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
flags, cur_type, tcur, stop_type, tstop);
GST_LOG_OBJECT (dec, "seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (tcur));
res = gst_pad_push_event (dec->sinkpad, real_seek);
gst_event_unref (event);
break;
}
default:
res = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (dec);
return res;
}
static gboolean
opus_dec_sink_event (GstPad * pad, GstEvent * event)
{
GstOpusDec *dec;
gboolean ret = FALSE;
dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:{
GstFormat format;
gdouble rate, arate;
gint64 start, stop, time;
gboolean update;
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
&start, &stop, &time);
if (format != GST_FORMAT_TIME)
goto newseg_wrong_format;
if (rate <= 0.0)
goto newseg_wrong_rate;
if (update) {
/* time progressed without data, see if we can fill the gap with
* some concealment data */
if (dec->segment.last_stop < start) {
GstClockTime duration;
duration = start - dec->segment.last_stop;
opus_dec_chain_parse_data (dec, NULL, dec->segment.last_stop,
duration);
}
}
/* now configure the values */
gst_segment_set_newsegment_full (&dec->segment, update,
rate, arate, GST_FORMAT_TIME, start, stop, time);
dec->granulepos = -1;
GST_DEBUG_OBJECT (dec, "segment now: cur = %" GST_TIME_FORMAT " [%"
GST_TIME_FORMAT " - %" GST_TIME_FORMAT "]",
GST_TIME_ARGS (dec->segment.last_stop),
GST_TIME_ARGS (dec->segment.start),
GST_TIME_ARGS (dec->segment.stop));
ret = gst_pad_push_event (dec->srcpad, event);
break;
}
default:
ret = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (dec);
return ret;
/* ERRORS */
newseg_wrong_format:
{
GST_DEBUG_OBJECT (dec, "received non TIME newsegment");
gst_object_unref (dec);
return FALSE;
}
newseg_wrong_rate:
{
GST_DEBUG_OBJECT (dec, "negative rates not supported yet");
gst_object_unref (dec);
return FALSE;
}
}
static GstFlowReturn
opus_dec_chain_parse_header (GstOpusDec * dec, GstBuffer * buf)
{
GstCaps *caps;
//gint error = OPUS_OK;
#if 0
dec->samples_per_frame = opus_packet_get_samples_per_frame (
(const unsigned char *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
#endif
#if 0
if (memcmp (dec->header.codec_id, "OPUS ", 8) != 0)
goto invalid_header;
#endif
#if 0
#ifdef HAVE_OPUS_0_7
dec->mode =
opus_mode_create (dec->sample_rate, dec->header.frame_size, &error);
#else
dec->mode =
opus_mode_create (dec->sample_rate, dec->header.nb_channels,
dec->header.frame_size, &error);
#endif
if (!dec->mode)
goto mode_init_failed;
/* initialize the decoder */
#ifdef HAVE_OPUS_0_11
dec->state =
opus_decoder_create_custom (dec->mode, dec->header.nb_channels, &error);
#else
#ifdef HAVE_OPUS_0_7
dec->state = opus_decoder_create (dec->mode, dec->header.nb_channels, &error);
#else
dec->state = opus_decoder_create (dec->mode);
#endif
#endif
#endif
dec->state = opus_decoder_create (dec->sample_rate, dec->n_channels);
if (!dec->state)
goto init_failed;
#if 0
#ifdef HAVE_OPUS_0_8
dec->frame_size = dec->header.frame_size;
#else
opus_mode_info (dec->mode, OPUS_GET_FRAME_SIZE, &dec->frame_size);
#endif
#endif
dec->frame_duration = gst_util_uint64_scale_int (dec->frame_size,
GST_SECOND, dec->sample_rate);
/* set caps */
caps = gst_caps_new_simple ("audio/x-raw-int",
"rate", G_TYPE_INT, dec->sample_rate,
"channels", G_TYPE_INT, dec->n_channels,
"signed", G_TYPE_BOOLEAN, TRUE,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL);
GST_DEBUG_OBJECT (dec, "rate=%d channels=%d frame-size=%d",
dec->sample_rate, dec->n_channels, dec->frame_size);
if (!gst_pad_set_caps (dec->srcpad, caps))
goto nego_failed;
gst_caps_unref (caps);
return GST_FLOW_OK;
/* ERRORS */
#if 0
invalid_header:
{
GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
(NULL), ("Invalid header"));
return GST_FLOW_ERROR;
}
mode_init_failed:
{
GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
(NULL), ("Mode initialization failed: %d", error));
return GST_FLOW_ERROR;
}
#endif
init_failed:
{
GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
(NULL), ("couldn't initialize decoder"));
return GST_FLOW_ERROR;
}
nego_failed:
{
GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE,
(NULL), ("couldn't negotiate format"));
gst_caps_unref (caps);
return GST_FLOW_NOT_NEGOTIATED;
}
}
#if 0
static GstFlowReturn
opus_dec_chain_parse_comments (GstOpusDec * dec, GstBuffer * buf)
{
GstTagList *list;
gchar *encoder = NULL;
list = gst_tag_list_from_vorbiscomment_buffer (buf, NULL, 0, &encoder);
if (!list) {
GST_WARNING_OBJECT (dec, "couldn't decode comments");
list = gst_tag_list_new ();
}
if (encoder) {
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_ENCODER, encoder, NULL);
}
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_AUDIO_CODEC, "Opus", NULL);
if (dec->header.bytes_per_packet > 0) {
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_BITRATE, (guint) dec->header.bytes_per_packet * 8, NULL);
}
GST_INFO_OBJECT (dec, "tags: %" GST_PTR_FORMAT, list);
gst_element_found_tags_for_pad (GST_ELEMENT (dec), dec->srcpad, list);
g_free (encoder);
g_free (ver);
return GST_FLOW_OK;
}
#endif
static GstFlowReturn
opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buf,
GstClockTime timestamp, GstClockTime duration)
{
GstFlowReturn res = GST_FLOW_OK;
gint size;
guint8 *data;
GstBuffer *outbuf;
gint16 *out_data;
int n;
if (timestamp != -1) {
dec->segment.last_stop = timestamp;
dec->granulepos = -1;
}
if (dec->state == NULL) {
GstCaps *caps;
dec->state = opus_decoder_create (dec->sample_rate, dec->n_channels);
/* set caps */
caps = gst_caps_new_simple ("audio/x-raw-int",
"rate", G_TYPE_INT, dec->sample_rate,
"channels", G_TYPE_INT, dec->n_channels,
"signed", G_TYPE_BOOLEAN, TRUE,
"endianness", G_TYPE_INT, G_BYTE_ORDER,
"width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL);
GST_DEBUG_OBJECT (dec, "rate=%d channels=%d frame-size=%d",
dec->sample_rate, dec->n_channels, dec->frame_size);
if (!gst_pad_set_caps (dec->srcpad, caps))
GST_ERROR ("nego failure");
gst_caps_unref (caps);
}
if (buf) {
data = GST_BUFFER_DATA (buf);
size = GST_BUFFER_SIZE (buf);
GST_DEBUG_OBJECT (dec, "received buffer of size %u", size);
/* copy timestamp */
} else {
/* concealment data, pass NULL as the bits parameters */
GST_DEBUG_OBJECT (dec, "creating concealment data");
data = NULL;
size = 0;
}
GST_DEBUG ("bandwidth %d", opus_packet_get_bandwidth (data));
GST_DEBUG ("samples_per_frame %d", opus_packet_get_samples_per_frame (data,
48000));
GST_DEBUG ("channels %d", opus_packet_get_nb_channels (data));
res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad,
GST_BUFFER_OFFSET_NONE, dec->frame_samples * dec->n_channels * 2,
GST_PAD_CAPS (dec->srcpad), &outbuf);
if (res != GST_FLOW_OK) {
GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res));
return res;
}
out_data = (gint16 *) GST_BUFFER_DATA (outbuf);
GST_LOG_OBJECT (dec, "decoding frame");
n = opus_decode (dec->state, data, size, out_data, dec->frame_samples, TRUE);
if (n < 0) {
GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL));
return GST_FLOW_ERROR;
}
if (!GST_CLOCK_TIME_IS_VALID (timestamp)) {
timestamp = gst_util_uint64_scale_int (dec->granulepos - dec->frame_size,
GST_SECOND, dec->sample_rate);
}
GST_DEBUG_OBJECT (dec, "timestamp=%" GST_TIME_FORMAT,
GST_TIME_ARGS (timestamp));
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf);
if (dec->discont) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT);
dec->discont = 0;
}
dec->segment.last_stop += dec->frame_duration;
GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)),
GST_TIME_ARGS (dec->frame_duration));
res = gst_pad_push (dec->srcpad, outbuf);
if (res != GST_FLOW_OK)
GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res));
return res;
}
static GstFlowReturn
opus_dec_chain (GstPad * pad, GstBuffer * buf)
{
GstFlowReturn res;
GstOpusDec *dec;
dec = GST_OPUS_DEC (gst_pad_get_parent (pad));
if (GST_BUFFER_IS_DISCONT (buf)) {
dec->discont = TRUE;
}
res = opus_dec_chain_parse_data (dec, buf, GST_BUFFER_TIMESTAMP (buf),
GST_BUFFER_DURATION (buf));
//done:
dec->packetno++;
gst_buffer_unref (buf);
gst_object_unref (dec);
return res;
}
static GstStateChangeReturn
opus_dec_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
GstOpusDec *dec = GST_OPUS_DEC (element);
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
case GST_STATE_CHANGE_READY_TO_PAUSED:
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
default:
break;
}
ret = parent_class->change_state (element, transition);
if (ret != GST_STATE_CHANGE_SUCCESS)
return ret;
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_opus_dec_reset (dec);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
default:
break;
}
return ret;
}

77
ext/opus/gstopusdec.h Normal file
View file

@ -0,0 +1,77 @@
/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_OPUS_DEC_H__
#define __GST_OPUS_DEC_H__
#include <gst/gst.h>
#include <opus/opus.h>
G_BEGIN_DECLS
#define GST_TYPE_OPUS_DEC \
(gst_opus_dec_get_type())
#define GST_OPUS_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPUS_DEC,GstOpusDec))
#define GST_OPUS_DEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPUS_DEC,GstOpusDecClass))
#define GST_IS_OPUS_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPUS_DEC))
#define GST_IS_OPUS_DEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPUS_DEC))
typedef struct _GstOpusDec GstOpusDec;
typedef struct _GstOpusDecClass GstOpusDecClass;
struct _GstOpusDec {
GstElement element;
/* pads */
GstPad *sinkpad;
GstPad *srcpad;
OpusDecoder *state;
int frame_samples;
gint frame_size;
GstClockTime frame_duration;
guint64 packetno;
GstSegment segment; /* STREAM LOCK */
gint64 granulepos; /* -1 = needs to be set from current time */
gboolean discont;
GstBuffer *streamheader;
GstBuffer *vorbiscomment;
GList *extra_headers;
int sample_rate;
int n_channels;
};
struct _GstOpusDecClass {
GstElementClass parent_class;
};
GType gst_opus_dec_get_type (void);
G_END_DECLS
#endif /* __GST_OPUS_DEC_H__ */

1198
ext/opus/gstopusenc.c Normal file

File diff suppressed because it is too large Load diff

105
ext/opus/gstopusenc.h Normal file
View file

@ -0,0 +1,105 @@
/* GStreamer Opus Encoder
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
* Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_OPUS_ENC_H__
#define __GST_OPUS_ENC_H__
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <opus/opus.h>
G_BEGIN_DECLS
#define GST_TYPE_OPUS_ENC \
(gst_opus_enc_get_type())
#define GST_OPUS_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPUS_ENC,GstOpusEnc))
#define GST_OPUS_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPUS_ENC,GstOpusEncClass))
#define GST_IS_OPUS_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPUS_ENC))
#define GST_IS_OPUS_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPUS_ENC))
#define MAX_FRAME_SIZE 2000*2
#define MAX_FRAME_BYTES 2000
typedef struct _GstOpusEnc GstOpusEnc;
typedef struct _GstOpusEncClass GstOpusEncClass;
struct _GstOpusEnc {
GstElement element;
/* pads */
GstPad *sinkpad;
GstPad *srcpad;
//OpusHeader header;
//OpusMode *mode;
OpusEncoder *state;
GstAdapter *adapter;
/* properties */
gboolean audio_or_voip;
gint bitrate;
gint bandwidth;
gint frame_size;
gboolean cbr;
gboolean constrained_vbr;
gint complexity;
gboolean inband_fec;
gboolean dtx;
gint packet_loss_percentage;
int frame_samples;
gint n_channels;
gint sample_rate;
gboolean setup;
gboolean header_sent;
gboolean eos;
guint64 samples_in;
guint64 bytes_out;
guint64 frameno;
guint64 frameno_out;
GstClockTime start_ts;
GstClockTime next_ts;
guint64 granulepos_offset;
};
struct _GstOpusEncClass {
GstElementClass parent_class;
/* signals */
void (*frame_encoded) (GstElement *element);
};
GType gst_opus_enc_get_type (void);
G_END_DECLS
#endif /* __GST_OPUS_ENC_H__ */

View file

@ -101,7 +101,6 @@ gst_pes_filter_parse (GstPESFilter * filter)
GstFlowReturn ret; GstFlowReturn ret;
guint32 start_code; guint32 start_code;
gboolean STD_buffer_bound_scale;
guint16 STD_buffer_size_bound; guint16 STD_buffer_size_bound;
const guint8 *data; const guint8 *data;
gint avail, datalen; gint avail, datalen;
@ -213,7 +212,7 @@ gst_pes_filter_parse (GstPESFilter * filter)
if (datalen < 3) if (datalen < 3)
goto need_more_data; goto need_more_data;
STD_buffer_bound_scale = *data & 0x20; /* STD_buffer_bound_scale = *data & 0x20; */
STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8; STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8;
STD_buffer_size_bound |= *data++; STD_buffer_size_bound |= *data++;

View file

@ -424,7 +424,6 @@ ignore:
} }
static void rsn_stream_selector_dispose (GObject * object); static void rsn_stream_selector_dispose (GObject * object);
static void rsn_stream_selector_finalize (GObject * object);
static void rsn_stream_selector_init (RsnStreamSelector * sel); static void rsn_stream_selector_init (RsnStreamSelector * sel);
static void rsn_stream_selector_base_init (RsnStreamSelectorClass * klass); static void rsn_stream_selector_base_init (RsnStreamSelectorClass * klass);
@ -497,7 +496,6 @@ rsn_stream_selector_class_init (RsnStreamSelectorClass * klass)
parent_class = g_type_class_peek_parent (klass); parent_class = g_type_class_peek_parent (klass);
gobject_class->dispose = rsn_stream_selector_dispose; gobject_class->dispose = rsn_stream_selector_dispose;
gobject_class->finalize = rsn_stream_selector_finalize;
gobject_class->set_property = gobject_class->set_property =
GST_DEBUG_FUNCPTR (rsn_stream_selector_set_property); GST_DEBUG_FUNCPTR (rsn_stream_selector_set_property);
@ -545,16 +543,6 @@ rsn_stream_selector_dispose (GObject * object)
G_OBJECT_CLASS (parent_class)->dispose (object); G_OBJECT_CLASS (parent_class)->dispose (object);
} }
static void
rsn_stream_selector_finalize (GObject * object)
{
RsnStreamSelector *sel;
sel = RSN_STREAM_SELECTOR (object);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void static void
rsn_stream_selector_set_property (GObject * object, guint prop_id, rsn_stream_selector_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec) const GValue * value, GParamSpec * pspec)
@ -653,11 +641,8 @@ rsn_stream_selector_getcaps (GstPad * pad)
static gboolean static gboolean
rsn_stream_selector_is_active_sinkpad (RsnStreamSelector * sel, GstPad * pad) rsn_stream_selector_is_active_sinkpad (RsnStreamSelector * sel, GstPad * pad)
{ {
RsnSelectorPad *selpad;
gboolean res; gboolean res;
selpad = GST_SELECTOR_PAD_CAST (pad);
GST_OBJECT_LOCK (sel); GST_OBJECT_LOCK (sel);
res = (pad == sel->active_sinkpad); res = (pad == sel->active_sinkpad);
GST_OBJECT_UNLOCK (sel); GST_OBJECT_UNLOCK (sel);

View file

@ -1,8 +1,8 @@
plugin_LTLIBRARIES = libgstrtmp.la plugin_LTLIBRARIES = libgstrtmp.la
libgstrtmp_la_SOURCES = gstrtmpsrc.c libgstrtmp_la_SOURCES = gstrtmpsrc.c gstrtmpsink.c gstrtmp.c
noinst_HEADERS = gstrtmpsrc.h noinst_HEADERS = gstrtmpsrc.h gstrtmpsink.h
libgstrtmp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(RTMP_CFLAGS) libgstrtmp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(RTMP_CFLAGS)
libgstrtmp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) $(RTMP_LIBS) libgstrtmp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) $(RTMP_LIBS)
libgstrtmp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstrtmp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)

54
ext/rtmp/gstrtmp.c Normal file
View file

@ -0,0 +1,54 @@
/* GStreamer
* Copyright (C) 1999,2000 Erik Walthinsen <omega@cse.ogi.edu>
* 2000 Wim Taymans <wtay@chello.be>
* 2002 Kristian Rietveld <kris@gtk.org>
* 2002,2003 Colin Walters <walters@gnu.org>
* 2001,2010 Bastien Nocera <hadess@hadess.net>
* 2010 Sebastian Dröge <sebastian.droege@collabora.co.uk>
* 2010 Jan Schmidt <thaytan@noraisin.net>
*
* rtmpsrc.c:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include "gstrtmpsrc.h"
#include "gstrtmpsink.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
gboolean ret;
ret = gst_element_register (plugin, "rtmpsrc", GST_RANK_PRIMARY,
GST_TYPE_RTMP_SRC);
ret &= gst_element_register (plugin, "rtmpsink", GST_RANK_PRIMARY,
GST_TYPE_RTMP_SINK);
return ret;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"rtmp",
"RTMP source and sink",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);

347
ext/rtmp/gstrtmpsink.c Normal file
View file

@ -0,0 +1,347 @@
/*
* GStreamer
* Copyright (C) 2010 Jan Schmidt <thaytan@noraisin.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-rtmpsink
*
* This element delivers data to a streaming server via RTMP. It uses
* librtmp, and supports any protocols/urls that librtmp supports.
* The URL/location can contain extra connection or session parameters
* for librtmp, such as 'flashver=version'. See the librtmp documentation
* for more detail
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1'
* ]| Encode a test video stream to FLV video format and stream it via RTMP.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include "gstrtmpsink.h"
GST_DEBUG_CATEGORY_STATIC (gst_rtmp_sink_debug);
#define GST_CAT_DEFAULT gst_rtmp_sink_debug
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
PROP_0,
PROP_LOCATION
};
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-flv")
);
static void gst_rtmp_sink_uri_handler_init (gpointer g_iface,
gpointer iface_data);
static void gst_rtmp_sink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_rtmp_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_rtmp_sink_stop (GstBaseSink * sink);
static gboolean gst_rtmp_sink_start (GstBaseSink * sink);
static GstFlowReturn gst_rtmp_sink_render (GstBaseSink * sink, GstBuffer * buf);
static void
_do_init (GType gtype)
{
static const GInterfaceInfo urihandler_info = {
gst_rtmp_sink_uri_handler_init,
NULL,
NULL
};
g_type_add_interface_static (gtype, GST_TYPE_URI_HANDLER, &urihandler_info);
GST_DEBUG_CATEGORY_INIT (gst_rtmp_sink_debug, "rtmpsink", 0,
"RTMP server element");
}
GST_BOILERPLATE_FULL (GstRTMPSink, gst_rtmp_sink, GstBaseSink,
GST_TYPE_BASE_SINK, _do_init);
static void
gst_rtmp_sink_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_set_details_simple (element_class,
"RTMP output sink",
"Sink/Network", "Sends FLV content to a server via RTMP",
"Jan Schmidt <thaytan@noraisin.net>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
}
/* initialize the plugin's class */
static void
gst_rtmp_sink_class_init (GstRTMPSinkClass * klass)
{
GObjectClass *gobject_class;
GstBaseSinkClass *gstbasesink_class = (GstBaseSinkClass *) klass;
gobject_class = (GObjectClass *) klass;
gobject_class->set_property = gst_rtmp_sink_set_property;
gobject_class->get_property = gst_rtmp_sink_get_property;
gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_rtmp_sink_start);
gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_rtmp_sink_stop);
gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_rtmp_sink_render);
gst_element_class_install_std_props (GST_ELEMENT_CLASS (klass),
"location", PROP_LOCATION, G_PARAM_READWRITE, NULL);
}
/* initialize the new element
* initialize instance structure
*/
static void
gst_rtmp_sink_init (GstRTMPSink * sink, GstRTMPSinkClass * klass)
{
}
static gboolean
gst_rtmp_sink_start (GstBaseSink * basesink)
{
GstRTMPSink *sink = GST_RTMP_SINK (basesink);
if (!sink->uri) {
GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE,
("Please set URI for RTMP output"), ("No URI set before starting"));
return FALSE;
}
sink->rtmp_uri = g_strdup (sink->uri);
sink->rtmp = RTMP_Alloc ();
RTMP_Init (sink->rtmp);
if (!RTMP_SetupURL (sink->rtmp, sink->rtmp_uri)) {
GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL),
("Failed to setup URL '%s'", sink->uri));
RTMP_Free (sink->rtmp);
sink->rtmp = NULL;
g_free (sink->rtmp_uri);
sink->rtmp_uri = NULL;
return FALSE;
}
GST_DEBUG_OBJECT (sink, "Created RTMP object");
/* Mark this as an output connection */
RTMP_EnableWrite (sink->rtmp);
/* open the connection */
if (!RTMP_IsConnected (sink->rtmp)) {
if (!RTMP_Connect (sink->rtmp, NULL) || !RTMP_ConnectStream (sink->rtmp, 0)) {
GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL),
("Could not connect to RTMP stream \"%s\" for writing", sink->uri));
RTMP_Free (sink->rtmp);
sink->rtmp = NULL;
g_free (sink->rtmp_uri);
sink->rtmp_uri = NULL;
return FALSE;
}
GST_DEBUG_OBJECT (sink, "Opened connection to %s", sink->rtmp_uri);
}
sink->first = TRUE;
return TRUE;
}
static gboolean
gst_rtmp_sink_stop (GstBaseSink * basesink)
{
GstRTMPSink *sink = GST_RTMP_SINK (basesink);
gst_buffer_replace (&sink->cache, NULL);
if (sink->rtmp) {
RTMP_Close (sink->rtmp);
RTMP_Free (sink->rtmp);
sink->rtmp = NULL;
}
if (sink->rtmp_uri) {
g_free (sink->rtmp_uri);
sink->rtmp_uri = NULL;
}
return TRUE;
}
static GstFlowReturn
gst_rtmp_sink_render (GstBaseSink * bsink, GstBuffer * buf)
{
GstRTMPSink *sink = GST_RTMP_SINK (bsink);
GstBuffer *reffed_buf = NULL;
if (sink->first) {
/* FIXME: Parse the first buffer and see if it contains a header plus a packet instead
* of just assuming it's only the header */
GST_LOG_OBJECT (sink, "Caching first buffer of size %d for concatenation",
GST_BUFFER_SIZE (buf));
gst_buffer_replace (&sink->cache, buf);
sink->first = FALSE;
return GST_FLOW_OK;
}
if (sink->cache) {
GST_LOG_OBJECT (sink, "Joining 2nd buffer of size %d to cached buf",
GST_BUFFER_SIZE (buf));
gst_buffer_ref (buf);
reffed_buf = buf = gst_buffer_join (sink->cache, buf);
sink->cache = NULL;
}
GST_LOG_OBJECT (sink, "Sending %d bytes to RTMP server",
GST_BUFFER_SIZE (buf));
if (!RTMP_Write (sink->rtmp,
(char *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf))) {
GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, (NULL), ("Failed to write data"));
if (reffed_buf)
gst_buffer_unref (reffed_buf);
return GST_FLOW_ERROR;
}
if (reffed_buf)
gst_buffer_unref (reffed_buf);
return GST_FLOW_OK;
}
/*
* URI interface support.
*/
static GstURIType
gst_rtmp_sink_uri_get_type (void)
{
return GST_URI_SINK;
}
static gchar **
gst_rtmp_sink_uri_get_protocols (void)
{
static gchar *protocols[] =
{ (char *) "rtmp", (char *) "rtmpt", (char *) "rtmps", (char *) "rtmpe",
(char *) "rtmfp", (char *) "rtmpte", (char *) "rtmpts", NULL
};
return protocols;
}
static const gchar *
gst_rtmp_sink_uri_get_uri (GstURIHandler * handler)
{
GstRTMPSink *sink = GST_RTMP_SINK (handler);
return sink->uri;
}
static gboolean
gst_rtmp_sink_uri_set_uri (GstURIHandler * handler, const gchar * uri)
{
GstRTMPSink *sink = GST_RTMP_SINK (handler);
if (GST_STATE (sink) >= GST_STATE_PAUSED)
return FALSE;
g_free (sink->uri);
sink->uri = NULL;
if (uri != NULL) {
int protocol;
AVal host;
unsigned int port;
AVal playpath, app;
if (!RTMP_ParseURL (uri, &protocol, &host, &port, &playpath, &app) ||
!host.av_len || !playpath.av_len) {
GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE,
("Failed to parse URI %s", uri), (NULL));
return FALSE;
}
sink->uri = g_strdup (uri);
}
GST_DEBUG_OBJECT (sink, "Changed URI to %s", GST_STR_NULL (uri));
return TRUE;
}
static void
gst_rtmp_sink_uri_handler_init (gpointer g_iface, gpointer iface_data)
{
GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface;
iface->get_type = gst_rtmp_sink_uri_get_type;
iface->get_protocols = gst_rtmp_sink_uri_get_protocols;
iface->get_uri = gst_rtmp_sink_uri_get_uri;
iface->set_uri = gst_rtmp_sink_uri_set_uri;
}
static void
gst_rtmp_sink_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstRTMPSink *sink = GST_RTMP_SINK (object);
switch (prop_id) {
case PROP_LOCATION:
gst_rtmp_sink_uri_set_uri (GST_URI_HANDLER (sink),
g_value_get_string (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_rtmp_sink_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstRTMPSink *sink = GST_RTMP_SINK (object);
switch (prop_id) {
case PROP_LOCATION:
g_value_set_string (value, sink->uri);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}

68
ext/rtmp/gstrtmpsink.h Normal file
View file

@ -0,0 +1,68 @@
/*
* GStreamer
* Copyright (C) 2010 Jan Schmidt <thaytan@noraisin.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_RTMP_SINK_H__
#define __GST_RTMP_SINK_H__
#include <gst/gst.h>
#include <gst/base/gstbasesink.h>
#include <librtmp/rtmp.h>
#include <librtmp/log.h>
#include <librtmp/amf.h>
G_BEGIN_DECLS
#define GST_TYPE_RTMP_SINK \
(gst_rtmp_sink_get_type())
#define GST_RTMP_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTMP_SINK,GstRTMPSink))
#define GST_RTMP_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTMP_SINK,GstRTMPSinkClass))
#define GST_IS_RTMP_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTMP_SINK))
#define GST_IS_RTMP_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTMP_SINK))
typedef struct _GstRTMPSink GstRTMPSink;
typedef struct _GstRTMPSinkClass GstRTMPSinkClass;
struct _GstRTMPSink {
GstBaseSink parent;
/* < private > */
gchar *uri;
RTMP *rtmp;
gchar *rtmp_uri; /* copy of url for librtmp */
GstBuffer *cache; /* Cached buffer */
gboolean first;
};
struct _GstRTMPSinkClass {
GstBaseSinkClass parent_class;
};
GType gst_rtmp_sink_get_type (void);
G_END_DECLS
#endif /* __GST_RTMP_SINK_H__ */

View file

@ -98,6 +98,8 @@ _do_init (GType gtype)
}; };
g_type_add_interface_static (gtype, GST_TYPE_URI_HANDLER, &urihandler_info); g_type_add_interface_static (gtype, GST_TYPE_URI_HANDLER, &urihandler_info);
GST_DEBUG_CATEGORY_INIT (rtmpsrc_debug, "rtmpsrc", 0, "RTMP Source");
} }
GST_BOILERPLATE_FULL (GstRTMPSrc, gst_rtmp_src, GstPushSrc, GST_TYPE_PUSH_SRC, GST_BOILERPLATE_FULL (GstRTMPSrc, gst_rtmp_src, GstPushSrc, GST_TYPE_PUSH_SRC,
@ -581,18 +583,3 @@ gst_rtmp_src_stop (GstBaseSrc * basesrc)
return TRUE; return TRUE;
} }
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (rtmpsrc_debug, "rtmpsrc", 0, "RTMP Source");
return gst_element_register (plugin, "rtmpsrc", GST_RANK_PRIMARY,
GST_TYPE_RTMP_SRC);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"rtmpsrc",
"RTMP source",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);

View file

@ -78,10 +78,6 @@ enum
}; };
static void gst_schro_dec_finalize (GObject * object); static void gst_schro_dec_finalize (GObject * object);
static void gst_schro_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_schro_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_schro_dec_sink_query (GstPad * pad, GstQuery * query); static gboolean gst_schro_dec_sink_query (GstPad * pad, GstQuery * query);
@ -137,8 +133,6 @@ gst_schro_dec_class_init (GstSchroDecClass * klass)
gobject_class = G_OBJECT_CLASS (klass); gobject_class = G_OBJECT_CLASS (klass);
base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass); base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
gobject_class->set_property = gst_schro_dec_set_property;
gobject_class->get_property = gst_schro_dec_get_property;
gobject_class->finalize = gst_schro_dec_finalize; gobject_class->finalize = gst_schro_dec_finalize;
base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start); base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start);
@ -172,21 +166,16 @@ static gint64
granulepos_to_frame (gint64 granulepos) granulepos_to_frame (gint64 granulepos)
{ {
guint64 pt; guint64 pt;
int dist_h;
int dist_l;
int dist;
int delay;
guint64 dt;
if (granulepos == -1) if (granulepos == -1)
return -1; return -1;
pt = ((granulepos >> 22) + (granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9; pt = ((granulepos >> 22) + (granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9;
dist_h = (granulepos >> 22) & 0xff; /* dist_h = (granulepos >> 22) & 0xff;
dist_l = granulepos & 0xff; * dist_l = granulepos & 0xff;
dist = (dist_h << 8) | dist_l; * dist = (dist_h << 8) | dist_l;
delay = (granulepos >> 9) & 0x1fff; * delay = (granulepos >> 9) & 0x1fff;
dt = pt - delay; * dt = pt - delay; */
return pt >> 1; return pt >> 1;
} }
@ -308,38 +297,6 @@ gst_schro_dec_finalize (GObject * object)
G_OBJECT_CLASS (parent_class)->finalize (object); G_OBJECT_CLASS (parent_class)->finalize (object);
} }
static void
gst_schro_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstSchroDec *src;
g_return_if_fail (GST_IS_SCHRO_DEC (object));
src = GST_SCHRO_DEC (object);
GST_DEBUG ("gst_schro_dec_set_property");
switch (prop_id) {
default:
break;
}
}
static void
gst_schro_dec_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstSchroDec *src;
g_return_if_fail (GST_IS_SCHRO_DEC (object));
src = GST_SCHRO_DEC (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void static void
parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size) parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size)
{ {
@ -642,7 +599,6 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame) GstVideoFrame * frame)
{ {
GstSchroDec *schro_dec; GstSchroDec *schro_dec;
int schro_ret;
SchroBuffer *input_buffer; SchroBuffer *input_buffer;
schro_dec = GST_SCHRO_DEC (base_video_decoder); schro_dec = GST_SCHRO_DEC (base_video_decoder);
@ -654,7 +610,7 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
input_buffer->tag = schro_tag_new (frame, NULL); input_buffer->tag = schro_tag_new (frame, NULL);
schro_ret = schro_decoder_autoparse_push (schro_dec->decoder, input_buffer); schro_decoder_autoparse_push (schro_dec->decoder, input_buffer);
return gst_schro_dec_process (schro_dec, FALSE); return gst_schro_dec_process (schro_dec, FALSE);
} }

View file

@ -24,6 +24,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h> #include <gst/video/video.h>
#include <gst/video/gstbasevideoencoder.h> #include <gst/video/gstbasevideoencoder.h>
#include <gst/video/gstbasevideoutils.h>
#include <string.h> #include <string.h>
#include <schroedinger/schro.h> #include <schroedinger/schro.h>
@ -107,7 +108,8 @@ static gboolean gst_schro_enc_set_format (GstBaseVideoEncoder *
base_video_encoder, GstVideoState * state); base_video_encoder, GstVideoState * state);
static gboolean gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder); static GstFlowReturn gst_schro_enc_finish (GstBaseVideoEncoder *
base_video_encoder);
static GstFlowReturn gst_schro_enc_handle_frame (GstBaseVideoEncoder * static GstFlowReturn gst_schro_enc_handle_frame (GstBaseVideoEncoder *
base_video_encoder, GstVideoFrame * frame); base_video_encoder, GstVideoFrame * frame);
static GstFlowReturn gst_schro_enc_shape_output (GstBaseVideoEncoder * static GstFlowReturn gst_schro_enc_shape_output (GstBaseVideoEncoder *
@ -439,7 +441,7 @@ gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder)
return TRUE; return TRUE;
} }
static gboolean static GstFlowReturn
gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder) gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder)
{ {
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder); GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
@ -449,7 +451,7 @@ gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder)
schro_encoder_end_of_stream (schro_enc->encoder); schro_encoder_end_of_stream (schro_enc->encoder);
gst_schro_enc_process (schro_enc); gst_schro_enc_process (schro_enc);
return TRUE; return GST_FLOW_OK;
} }
static GstFlowReturn static GstFlowReturn
@ -612,7 +614,6 @@ gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame) GstVideoFrame * frame)
{ {
GstSchroEnc *schro_enc; GstSchroEnc *schro_enc;
int dpn;
int delay; int delay;
int dist; int dist;
int pt; int pt;
@ -623,8 +624,6 @@ gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
schro_enc = GST_SCHRO_ENC (base_video_encoder); schro_enc = GST_SCHRO_ENC (base_video_encoder);
dpn = frame->decode_frame_number;
pt = frame->presentation_frame_number * 2 + schro_enc->granule_offset; pt = frame->presentation_frame_number * 2 + schro_enc->granule_offset;
dt = frame->decode_frame_number * 2 + schro_enc->granule_offset; dt = frame->decode_frame_number * 2 + schro_enc->granule_offset;
delay = pt - dt; delay = pt - dt;

View file

@ -200,7 +200,10 @@ gst_sf_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
{ {
GstSFSrc *this; GstSFSrc *this;
GstBuffer *buf; GstBuffer *buf;
/* FIXME discont is set but not used */
#if 0
gboolean discont = FALSE; gboolean discont = FALSE;
#endif
sf_count_t bytes_read; sf_count_t bytes_read;
this = GST_SF_SRC (bsrc); this = GST_SF_SRC (bsrc);
@ -221,7 +224,9 @@ gst_sf_src_create (GstBaseSrc * bsrc, guint64 offset, guint length,
goto seek_failed; goto seek_failed;
this->offset = offset; this->offset = offset;
#if 0
discont = TRUE; discont = TRUE;
#endif
} }
buf = gst_buffer_new_and_alloc (length); buf = gst_buffer_new_and_alloc (length);

View file

@ -108,12 +108,9 @@ gst_timidity_base_init (gpointer gclass)
static void static void
gst_timidity_class_init (GstTimidityClass * klass) gst_timidity_class_init (GstTimidityClass * klass)
{ {
GObjectClass *gobject_class;
GstElementClass *gstelement_class; GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass; gstelement_class = (GstElementClass *) klass;
gstelement_class->change_state = gst_timidity_change_state; gstelement_class->change_state = gst_timidity_change_state;
} }

View file

@ -443,7 +443,10 @@ gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event)
GstSeekFlags flags; GstSeekFlags flags;
GstSeekType start_type, stop_type; GstSeekType start_type, stop_type;
gint64 start, stop; gint64 start, stop;
gboolean flush, update, accurate; gboolean flush, update;
#ifdef HAVE_WILDMIDI_0_2_2
gboolean accurate;
#endif
gboolean res; gboolean res;
unsigned long int sample; unsigned long int sample;
GstSegment *segment; GstSegment *segment;
@ -472,7 +475,9 @@ gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event)
return res; return res;
flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH); flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH);
#ifdef HAVE_WILDMIDI_0_2_2
accurate = ((flags & GST_SEEK_FLAG_ACCURATE) == GST_SEEK_FLAG_ACCURATE); accurate = ((flags & GST_SEEK_FLAG_ACCURATE) == GST_SEEK_FLAG_ACCURATE);
#endif
if (flush) { if (flush) {
GST_DEBUG ("performing flush"); GST_DEBUG ("performing flush");

View file

@ -405,8 +405,10 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame)
state->width = stream_info.w; state->width = stream_info.w;
state->height = stream_info.h; state->height = stream_info.h;
state->format = GST_VIDEO_FORMAT_I420; state->format = GST_VIDEO_FORMAT_I420;
if (state->par_n == 0 || state->par_d == 0) {
state->par_n = 1; state->par_n = 1;
state->par_d = 1; state->par_d = 1;
}
gst_vp8_dec_send_tags (dec); gst_vp8_dec_send_tags (dec);
gst_base_video_decoder_set_src_caps (decoder); gst_base_video_decoder_set_src_caps (decoder);

View file

@ -65,6 +65,24 @@ typedef struct
GList *invisible; GList *invisible;
} GstVP8EncCoderHook; } GstVP8EncCoderHook;
static void
_gst_mini_object_unref0 (GstMiniObject * obj)
{
if (obj)
gst_mini_object_unref (obj);
}
static void
gst_vp8_enc_coder_hook_free (GstVP8EncCoderHook * hook)
{
if (hook->image)
g_slice_free (vpx_image_t, hook->image);
g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL);
g_list_free (hook->invisible);
g_slice_free (GstVP8EncCoderHook, hook);
}
#define DEFAULT_BITRATE 0 #define DEFAULT_BITRATE 0
#define DEFAULT_MODE VPX_VBR #define DEFAULT_MODE VPX_VBR
#define DEFAULT_MIN_QUANTIZER 0 #define DEFAULT_MIN_QUANTIZER 0
@ -283,7 +301,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass)
g_object_class_install_property (gobject_class, PROP_SPEED, g_object_class_install_property (gobject_class, PROP_SPEED,
g_param_spec_int ("speed", "Speed", g_param_spec_int ("speed", "Speed",
"Speed", "Speed",
0, 2, DEFAULT_SPEED, 0, 7, DEFAULT_SPEED,
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)));
g_object_class_install_property (gobject_class, PROP_THREADS, g_object_class_install_property (gobject_class, PROP_THREADS,
@ -586,7 +604,9 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
return FALSE; return FALSE;
} }
status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, 0); /* FIXME move this to a set_speed() function */
status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED,
(encoder->speed == 0) ? 0 : (encoder->speed - 1));
if (status != VPX_CODEC_OK) { if (status != VPX_CODEC_OK) {
GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s", GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s",
gst_vpx_error_name (status)); gst_vpx_error_name (status));
@ -779,7 +799,7 @@ gst_vp8_enc_process (GstVP8Enc * encoder)
return ret; return ret;
} }
static gboolean static GstFlowReturn
gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
{ {
GstVP8Enc *encoder; GstVP8Enc *encoder;
@ -796,7 +816,7 @@ gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
if (status != 0) { if (status != 0) {
GST_ERROR_OBJECT (encoder, "encode returned %d %s", status, GST_ERROR_OBJECT (encoder, "encode returned %d %s", status,
gst_vpx_error_name (status)); gst_vpx_error_name (status));
return FALSE; return GST_FLOW_ERROR;
} }
/* dispatch remaining frames */ /* dispatch remaining frames */
@ -815,7 +835,7 @@ gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder)
} }
} }
return TRUE; return GST_FLOW_OK;
} }
static vpx_image_t * static vpx_image_t *
@ -823,9 +843,6 @@ gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer)
{ {
vpx_image_t *image = g_slice_new (vpx_image_t); vpx_image_t *image = g_slice_new (vpx_image_t);
guint8 *data = GST_BUFFER_DATA (buffer); guint8 *data = GST_BUFFER_DATA (buffer);
const GstVideoState *state;
state = gst_base_video_encoder_get_state (GST_BASE_VIDEO_ENCODER (enc));
memcpy (image, &enc->image, sizeof (*image)); memcpy (image, &enc->image, sizeof (*image));
@ -837,12 +854,6 @@ gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer)
return image; return image;
} }
static const int speed_table[] = {
VPX_DL_BEST_QUALITY,
VPX_DL_GOOD_QUALITY,
VPX_DL_REALTIME,
};
static GstFlowReturn static GstFlowReturn
gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame) GstVideoFrame * frame)
@ -853,6 +864,7 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
int flags = 0; int flags = 0;
vpx_image_t *image; vpx_image_t *image;
GstVP8EncCoderHook *hook; GstVP8EncCoderHook *hook;
int quality;
GST_DEBUG_OBJECT (base_video_encoder, "handle_frame"); GST_DEBUG_OBJECT (base_video_encoder, "handle_frame");
@ -869,13 +881,17 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
hook = g_slice_new0 (GstVP8EncCoderHook); hook = g_slice_new0 (GstVP8EncCoderHook);
hook->image = image; hook->image = image;
frame->coder_hook = hook; frame->coder_hook = hook;
frame->coder_hook_destroy_notify =
(GDestroyNotify) gst_vp8_enc_coder_hook_free;
if (frame->force_keyframe) { if (frame->force_keyframe) {
flags |= VPX_EFLAG_FORCE_KF; flags |= VPX_EFLAG_FORCE_KF;
} }
quality = (encoder->speed == 0) ? VPX_DL_BEST_QUALITY : VPX_DL_GOOD_QUALITY;
status = vpx_codec_encode (&encoder->encoder, image, status = vpx_codec_encode (&encoder->encoder, image,
encoder->n_frames, 1, flags, speed_table[encoder->speed]); encoder->n_frames, 1, flags, quality);
if (status != 0) { if (status != 0) {
GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE, GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE,
("Failed to encode frame"), ("%s", gst_vpx_error_name (status))); ("Failed to encode frame"), ("%s", gst_vpx_error_name (status)));
@ -900,13 +916,6 @@ _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist)
return granulepos; return granulepos;
} }
static void
_gst_mini_object_unref0 (GstMiniObject * obj)
{
if (obj)
gst_mini_object_unref (obj);
}
static GstFlowReturn static GstFlowReturn
gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame) GstVideoFrame * frame)
@ -939,6 +948,8 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
encoder->keyframe_distance++; encoder->keyframe_distance++;
} }
GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->src_buffer);
GST_BUFFER_DURATION (buf) = 0;
GST_BUFFER_OFFSET_END (buf) = GST_BUFFER_OFFSET_END (buf) =
_to_granulepos (frame->presentation_frame_number + 1, _to_granulepos (frame->presentation_frame_number + 1,
inv_count, encoder->keyframe_distance); inv_count, encoder->keyframe_distance);
@ -980,13 +991,6 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
} }
done: done:
if (hook) {
g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL);
g_list_free (hook->invisible);
g_slice_free (GstVP8EncCoderHook, hook);
frame->coder_hook = NULL;
}
return ret; return ret;
} }

View file

@ -277,7 +277,7 @@ gst_zbar_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
{ {
GstZBar *zbar = GST_ZBAR (base); GstZBar *zbar = GST_ZBAR (base);
guint8 *data; guint8 *data;
guint size, rowstride; guint rowstride;
zbar_image_t *image; zbar_image_t *image;
const zbar_symbol_t *symbol; const zbar_symbol_t *symbol;
int n; int n;
@ -286,7 +286,6 @@ gst_zbar_transform_ip (GstBaseTransform * base, GstBuffer * outbuf)
goto done; goto done;
data = GST_BUFFER_DATA (outbuf); data = GST_BUFFER_DATA (outbuf);
size = GST_BUFFER_SIZE (outbuf);
image = zbar_image_create (); image = zbar_image_create ();

View file

@ -222,24 +222,6 @@ gst_base_camera_src_setup_preview (GstBaseCameraSrc * self,
bclass->set_preview (self, preview_caps); bclass->set_preview (self, preview_caps);
} }
/**
* gst_base_camera_src_get_allowed_input_caps:
* @self: the camerasrc bin
*
* Retrieve caps from videosrc describing formats it supports
*
* Returns: caps object from videosrc
*/
GstCaps *
gst_base_camera_src_get_allowed_input_caps (GstBaseCameraSrc * self)
{
GstBaseCameraSrcClass *bclass = GST_BASE_CAMERA_SRC_GET_CLASS (self);
g_return_val_if_fail (bclass->get_allowed_input_caps, NULL);
return bclass->get_allowed_input_caps (self);
}
static void static void
gst_base_camera_src_start_capture (GstBaseCameraSrc * src) gst_base_camera_src_start_capture (GstBaseCameraSrc * src)
{ {
@ -476,6 +458,8 @@ gst_base_camera_src_change_state (GstElement * element,
case GST_STATE_CHANGE_READY_TO_PAUSED: case GST_STATE_CHANGE_READY_TO_PAUSED:
if (!setup_pipeline (self)) if (!setup_pipeline (self))
return GST_STATE_CHANGE_FAILURE; return GST_STATE_CHANGE_FAILURE;
/* without this the preview pipeline will not post buffer
* messages on the pipeline */
gst_element_set_state (self->preview_pipeline->pipeline, gst_element_set_state (self->preview_pipeline->pipeline,
GST_STATE_PLAYING); GST_STATE_PLAYING);
break; break;
@ -589,26 +573,19 @@ gst_base_camera_src_class_init (GstBaseCameraSrcClass * klass)
/* Signals */ /* Signals */
basecamerasrc_signals[START_CAPTURE_SIGNAL] = basecamerasrc_signals[START_CAPTURE_SIGNAL] =
g_signal_new ("start-capture", g_signal_new_class_handler ("start-capture",
G_TYPE_FROM_CLASS (klass), G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstBaseCameraSrcClass, private_start_capture), G_CALLBACK (gst_base_camera_src_start_capture),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
basecamerasrc_signals[STOP_CAPTURE_SIGNAL] = basecamerasrc_signals[STOP_CAPTURE_SIGNAL] =
g_signal_new ("stop-capture", g_signal_new_class_handler ("stop-capture",
G_TYPE_FROM_CLASS (klass), G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstBaseCameraSrcClass, private_stop_capture), G_CALLBACK (gst_base_camera_src_stop_capture),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
/* TODO these should be moved to a private struct
* that is allocated sequentially to the main struct as said at:
* http://library.gnome.org/devel/gobject/unstable/gobject-Type-Information.html#g-type-add-class-private
*/
klass->private_start_capture = gst_base_camera_src_start_capture;
klass->private_stop_capture = gst_base_camera_src_stop_capture;
gstelement_class->change_state = gst_base_camera_src_change_state; gstelement_class->change_state = gst_base_camera_src_change_state;
} }

View file

@ -92,7 +92,7 @@ struct _GstBaseCameraSrc
/** /**
* GstBaseCameraSrcClass: * GstBaseCameraSrcClass:
* @construct_pipeline: construct pipeline must be implemented by derived class * @construct_pipeline: construct pipeline
* @setup_pipeline: configure pipeline for the chosen settings * @setup_pipeline: configure pipeline for the chosen settings
* @set_zoom: set the zoom * @set_zoom: set the zoom
* @set_mode: set the mode * @set_mode: set the mode
@ -101,29 +101,27 @@ struct _GstBaseCameraSrcClass
{ {
GstBinClass parent; GstBinClass parent;
/* construct pipeline must be implemented by derived class */ /* Construct pipeline. (called in GST_STATE_CHANGE_NULL_TO_READY) Optional. */
gboolean (*construct_pipeline) (GstBaseCameraSrc *self); gboolean (*construct_pipeline) (GstBaseCameraSrc *self);
/* optional */ /* (called in GST_STATE_CHANGE_READY_TO_PAUSED). Optional. */
gboolean (*setup_pipeline) (GstBaseCameraSrc *self); gboolean (*setup_pipeline) (GstBaseCameraSrc *self);
/* set the zoom */ /* Set the zoom. If set, called when changing 'zoom' property. Optional. */
void (*set_zoom) (GstBaseCameraSrc *self, gfloat zoom); void (*set_zoom) (GstBaseCameraSrc *self, gfloat zoom);
/* set the mode */ /* Set the mode. If set, called when changing 'mode' property. Optional. */
gboolean (*set_mode) (GstBaseCameraSrc *self, gboolean (*set_mode) (GstBaseCameraSrc *self,
GstCameraBinMode mode); GstCameraBinMode mode);
/* set preview caps */ /* Set preview caps. If set, called called when setting new 'preview-caps'. Optional. */
gboolean (*set_preview) (GstBaseCameraSrc *self, gboolean (*set_preview) (GstBaseCameraSrc *self,
GstCaps *preview_caps); GstCaps *preview_caps);
/* */ /* Called by the handler for 'start-capture'. Mandatory. */
GstCaps * (*get_allowed_input_caps) (GstBaseCameraSrc * self);
void (*private_start_capture) (GstBaseCameraSrc * src);
void (*private_stop_capture) (GstBaseCameraSrc * src);
gboolean (*start_capture) (GstBaseCameraSrc * src); gboolean (*start_capture) (GstBaseCameraSrc * src);
/* Called by the handler for 'stop-capture'. Mandatory. */
void (*stop_capture) (GstBaseCameraSrc * src); void (*stop_capture) (GstBaseCameraSrc * src);
gpointer _gst_reserved[GST_PADDING_LARGE]; gpointer _gst_reserved[GST_PADDING_LARGE];
@ -140,7 +138,6 @@ GstColorBalance * gst_base_camera_src_get_color_balance (GstBaseCameraSrc *self)
gboolean gst_base_camera_src_set_mode (GstBaseCameraSrc *self, GstCameraBinMode mode); gboolean gst_base_camera_src_set_mode (GstBaseCameraSrc *self, GstCameraBinMode mode);
void gst_base_camera_src_setup_zoom (GstBaseCameraSrc * self); void gst_base_camera_src_setup_zoom (GstBaseCameraSrc * self);
void gst_base_camera_src_setup_preview (GstBaseCameraSrc * self, GstCaps * preview_caps); void gst_base_camera_src_setup_preview (GstBaseCameraSrc * self, GstCaps * preview_caps);
GstCaps * gst_base_camera_src_get_allowed_input_caps (GstBaseCameraSrc * self);
void gst_base_camera_src_finish_capture (GstBaseCameraSrc *self); void gst_base_camera_src_finish_capture (GstBaseCameraSrc *self);

View file

@ -75,18 +75,6 @@ bus_callback (GstBus * bus, GstMessage * message, gpointer user_data)
return TRUE; return TRUE;
} }
static GstFlowReturn
gst_camerabin_preview_pipeline_new_preroll (GstAppSink * appsink,
gpointer user_data)
{
GstBuffer *buffer;
buffer = gst_app_sink_pull_preroll (appsink);
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
static GstFlowReturn static GstFlowReturn
gst_camerabin_preview_pipeline_new_buffer (GstAppSink * appsink, gst_camerabin_preview_pipeline_new_buffer (GstAppSink * appsink,
gpointer user_data) gpointer user_data)
@ -175,7 +163,6 @@ gst_camerabin_create_preview_pipeline (GstElement * element,
goto error; goto error;
} }
callbacks.new_preroll = gst_camerabin_preview_pipeline_new_preroll;
callbacks.new_buffer = gst_camerabin_preview_pipeline_new_buffer; callbacks.new_buffer = gst_camerabin_preview_pipeline_new_buffer;
gst_app_sink_set_callbacks ((GstAppSink *) data->appsink, &callbacks, data, gst_app_sink_set_callbacks ((GstAppSink *) data->appsink, &callbacks, data,
NULL); NULL);

View file

@ -4,14 +4,15 @@ lib_LTLIBRARIES = libgstbasevideo-@GST_MAJORMINOR@.la
CLEANFILES = $(BUILT_SOURCES) CLEANFILES = $(BUILT_SOURCES)
libgstbasevideo_@GST_MAJORMINOR@_la_SOURCES = \ libgstbasevideo_@GST_MAJORMINOR@_la_SOURCES = \
gstbasevideoutils.c \
gstbasevideocodec.c \ gstbasevideocodec.c \
gstbasevideoutils.c \
gstbasevideodecoder.c \ gstbasevideodecoder.c \
gstbasevideoencoder.c gstbasevideoencoder.c
libgstbasevideo_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/video libgstbasevideo_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/video
libgstbasevideo_@GST_MAJORMINOR@include_HEADERS = \ libgstbasevideo_@GST_MAJORMINOR@include_HEADERS = \
gstbasevideocodec.h \ gstbasevideocodec.h \
gstbasevideoutils.h \
gstbasevideodecoder.h \ gstbasevideodecoder.h \
gstbasevideoencoder.h gstbasevideoencoder.h

View file

@ -106,14 +106,19 @@ gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec)
GST_DEBUG_OBJECT (base_video_codec, "reset"); GST_DEBUG_OBJECT (base_video_codec, "reset");
GST_OBJECT_LOCK (base_video_codec);
for (g = base_video_codec->frames; g; g = g_list_next (g)) { for (g = base_video_codec->frames; g; g = g_list_next (g)) {
gst_base_video_codec_free_frame ((GstVideoFrame *) g->data); gst_base_video_codec_free_frame ((GstVideoFrame *) g->data);
} }
g_list_free (base_video_codec->frames); g_list_free (base_video_codec->frames);
base_video_codec->frames = NULL; base_video_codec->frames = NULL;
GST_OBJECT_UNLOCK (base_video_codec);
base_video_codec->bytes = 0; base_video_codec->bytes = 0;
base_video_codec->time = 0; base_video_codec->time = 0;
gst_buffer_replace (&base_video_codec->state.codec_data, NULL);
gst_caps_replace (&base_video_codec->state.caps, NULL);
} }
static void static void
@ -163,7 +168,7 @@ gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec)
{ {
GstVideoFrame *frame; GstVideoFrame *frame;
frame = g_malloc0 (sizeof (GstVideoFrame)); frame = g_slice_new0 (GstVideoFrame);
frame->system_frame_number = base_video_codec->system_frame_number; frame->system_frame_number = base_video_codec->system_frame_number;
base_video_codec->system_frame_number++; base_video_codec->system_frame_number++;
@ -174,6 +179,8 @@ gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec)
void void
gst_base_video_codec_free_frame (GstVideoFrame * frame) gst_base_video_codec_free_frame (GstVideoFrame * frame)
{ {
g_return_if_fail (frame != NULL);
if (frame->sink_buffer) { if (frame->sink_buffer) {
gst_buffer_unref (frame->sink_buffer); gst_buffer_unref (frame->sink_buffer);
} }
@ -182,5 +189,11 @@ gst_base_video_codec_free_frame (GstVideoFrame * frame)
gst_buffer_unref (frame->src_buffer); gst_buffer_unref (frame->src_buffer);
} }
g_free (frame); g_list_foreach (frame->events, (GFunc) gst_event_unref, NULL);
g_list_free (frame->events);
if (frame->coder_hook_destroy_notify && frame->coder_hook)
frame->coder_hook_destroy_notify (frame->coder_hook);
g_slice_free (GstVideoFrame, frame);
} }

View file

@ -86,6 +86,7 @@ typedef struct _GstBaseVideoCodecClass GstBaseVideoCodecClass;
struct _GstVideoState struct _GstVideoState
{ {
GstCaps *caps;
GstVideoFormat format; GstVideoFormat format;
int width, height; int width, height;
int fps_n, fps_d; int fps_n, fps_d;
@ -125,9 +126,15 @@ struct _GstVideoFrame
int n_fields; int n_fields;
void *coder_hook; void *coder_hook;
GDestroyNotify coder_hook_destroy_notify;
GstClockTime deadline; GstClockTime deadline;
gboolean force_keyframe; gboolean force_keyframe;
/* Events that should be pushed downstream *before*
* the next src_buffer */
GList *events;
}; };
struct _GstBaseVideoCodec struct _GstBaseVideoCodec
@ -140,7 +147,7 @@ struct _GstBaseVideoCodec
guint64 system_frame_number; guint64 system_frame_number;
GList *frames; GList *frames; /* Protected with OBJECT_LOCK */
GstVideoState state; GstVideoState state;
GstSegment segment; GstSegment segment;
@ -168,17 +175,6 @@ GType gst_base_video_codec_get_type (void);
GstVideoFrame * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec); GstVideoFrame * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec);
void gst_base_video_codec_free_frame (GstVideoFrame *frame); void gst_base_video_codec_free_frame (GstVideoFrame *frame);
gboolean gst_base_video_rawvideo_convert (GstVideoState *state,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 *dest_value);
gboolean gst_base_video_encoded_video_convert (GstVideoState * state,
gint64 bytes, gint64 time, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
GstClockTime gst_video_state_get_timestamp (const GstVideoState *state,
GstSegment *segment, int frame_number);
G_END_DECLS G_END_DECLS
#endif #endif

View file

@ -128,6 +128,7 @@
#endif #endif
#include "gstbasevideodecoder.h" #include "gstbasevideodecoder.h"
#include "gstbasevideoutils.h"
#include <string.h> #include <string.h>
@ -168,7 +169,6 @@ static guint64 gst_base_video_decoder_get_field_duration (GstBaseVideoDecoder *
base_video_decoder, int n_fields); base_video_decoder, int n_fields);
static GstVideoFrame *gst_base_video_decoder_new_frame (GstBaseVideoDecoder * static GstVideoFrame *gst_base_video_decoder_new_frame (GstBaseVideoDecoder *
base_video_decoder); base_video_decoder);
static void gst_base_video_decoder_free_frame (GstVideoFrame * frame);
static void gst_base_video_decoder_clear_queues (GstBaseVideoDecoder * dec); static void gst_base_video_decoder_clear_queues (GstBaseVideoDecoder * dec);
@ -237,6 +237,32 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder,
base_video_decoder->sink_clipping = TRUE; base_video_decoder->sink_clipping = TRUE;
} }
static gboolean
gst_base_video_decoder_push_src_event (GstBaseVideoDecoder * decoder,
GstEvent * event)
{
/* Forward non-serialized events and EOS/FLUSH_STOP immediately.
* For EOS this is required because no buffer or serialized event
* will come after EOS and nothing could trigger another
* _finish_frame() call. *
* If the subclass handles sending of EOS manually it can return
* _DROPPED from ::finish() and all other subclasses should have
* decoded/flushed all remaining data before this
*
* For FLUSH_STOP this is required because it is expected
* to be forwarded immediately and no buffers are queued anyway.
*/
if (!GST_EVENT_IS_SERIALIZED (event)
|| GST_EVENT_TYPE (event) == GST_EVENT_EOS
|| GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
return gst_pad_push_event (decoder->base_video_codec.srcpad, event);
decoder->current_frame_events =
g_list_prepend (decoder->current_frame_events, event);
return TRUE;
}
static gboolean static gboolean
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{ {
@ -244,7 +270,7 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
GstBaseVideoDecoderClass *base_video_decoder_class; GstBaseVideoDecoderClass *base_video_decoder_class;
GstStructure *structure; GstStructure *structure;
const GValue *codec_data; const GValue *codec_data;
GstVideoState *state; GstVideoState state;
gboolean ret = TRUE; gboolean ret = TRUE;
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
@ -253,37 +279,47 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
GST_DEBUG_OBJECT (base_video_decoder, "setcaps %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (base_video_decoder, "setcaps %" GST_PTR_FORMAT, caps);
state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; memset (&state, 0, sizeof (state));
memset (state, 0, sizeof (GstVideoState)); state.caps = gst_caps_ref (caps);
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure (caps, 0);
gst_video_format_parse_caps (caps, NULL, &state->width, &state->height); gst_video_format_parse_caps (caps, NULL, &state.width, &state.height);
/* this one fails if no framerate in caps */ /* this one fails if no framerate in caps */
if (!gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d)) { if (!gst_video_parse_caps_framerate (caps, &state.fps_n, &state.fps_d)) {
state->fps_n = 0; state.fps_n = 0;
state->fps_d = 1; state.fps_d = 1;
} }
/* but the p-a-r sets 1/1 instead, which is not quite informative ... */ /* but the p-a-r sets 1/1 instead, which is not quite informative ... */
if (!gst_structure_has_field (structure, "pixel-aspect-ratio") || if (!gst_structure_has_field (structure, "pixel-aspect-ratio") ||
!gst_video_parse_caps_pixel_aspect_ratio (caps, !gst_video_parse_caps_pixel_aspect_ratio (caps,
&state->par_n, &state->par_d)) { &state.par_n, &state.par_d)) {
state->par_n = 0; state.par_n = 0;
state->par_d = 1; state.par_d = 1;
} }
state->have_interlaced = state.have_interlaced =
gst_video_format_parse_caps_interlaced (caps, &state->interlaced); gst_video_format_parse_caps_interlaced (caps, &state.interlaced);
codec_data = gst_structure_get_value (structure, "codec_data"); codec_data = gst_structure_get_value (structure, "codec_data");
if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) { if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
state->codec_data = gst_value_get_buffer (codec_data); state.codec_data = GST_BUFFER (gst_value_dup_mini_object (codec_data));
} }
if (base_video_decoder_class->set_format) { if (base_video_decoder_class->set_format) {
ret = base_video_decoder_class->set_format (base_video_decoder, ret = base_video_decoder_class->set_format (base_video_decoder, &state);
&GST_BASE_VIDEO_CODEC (base_video_decoder)->state); }
if (ret) {
gst_buffer_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->
state.codec_data, NULL);
gst_caps_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->state.caps,
NULL);
GST_BASE_VIDEO_CODEC (base_video_decoder)->state = state;
} else {
gst_buffer_replace (&state.codec_data, NULL);
gst_caps_replace (&state.caps, NULL);
} }
g_object_unref (base_video_decoder); g_object_unref (base_video_decoder);
@ -323,6 +359,11 @@ gst_base_video_decoder_flush (GstBaseVideoDecoder * dec, gboolean hard)
GST_LOG_OBJECT (dec, "flush hard %d", hard); GST_LOG_OBJECT (dec, "flush hard %d", hard);
/* Inform subclass */
/* FIXME ? only if hard, or tell it if hard ? */
if (klass->reset)
klass->reset (dec);
/* FIXME make some more distinction between hard and soft, /* FIXME make some more distinction between hard and soft,
* but subclass may not be prepared for that */ * but subclass may not be prepared for that */
/* FIXME perhaps also clear pending frames ?, /* FIXME perhaps also clear pending frames ?,
@ -334,15 +375,13 @@ gst_base_video_decoder_flush (GstBaseVideoDecoder * dec, gboolean hard)
GST_FORMAT_UNDEFINED); GST_FORMAT_UNDEFINED);
gst_base_video_decoder_clear_queues (dec); gst_base_video_decoder_clear_queues (dec);
dec->error_count = 0; dec->error_count = 0;
g_list_foreach (dec->current_frame_events, (GFunc) gst_event_unref, NULL);
g_list_free (dec->current_frame_events);
dec->current_frame_events = NULL;
} }
/* and get (re)set for the sequel */ /* and get (re)set for the sequel */
gst_base_video_decoder_reset (dec, FALSE); gst_base_video_decoder_reset (dec, FALSE);
/* also inform subclass */
/* FIXME ? only if hard, or tell it if hard ? */
if (klass->reset)
klass->reset (dec);
return ret; return ret;
} }
@ -364,9 +403,9 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS: case GST_EVENT_EOS:
{ {
if (!base_video_decoder->packetized) {
GstFlowReturn flow_ret; GstFlowReturn flow_ret;
;
if (!base_video_decoder->packetized) {
do { do {
flow_ret = flow_ret =
base_video_decoder_class->parse_data (base_video_decoder, TRUE); base_video_decoder_class->parse_data (base_video_decoder, TRUE);
@ -374,12 +413,13 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
} }
if (base_video_decoder_class->finish) { if (base_video_decoder_class->finish) {
base_video_decoder_class->finish (base_video_decoder); flow_ret = base_video_decoder_class->finish (base_video_decoder);
} else {
flow_ret = GST_FLOW_OK;
} }
ret = if (flow_ret == GST_FLOW_OK)
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
event);
} }
break; break;
case GST_EVENT_NEWSEGMENT: case GST_EVENT_NEWSEGMENT:
@ -441,9 +481,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
gst_segment_set_newsegment_full (segment, gst_segment_set_newsegment_full (segment,
update, rate, arate, format, start, stop, pos); update, rate, arate, format, start, stop, pos);
ret = ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
event);
break; break;
} }
case GST_EVENT_FLUSH_STOP: case GST_EVENT_FLUSH_STOP:
@ -453,9 +491,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
} }
default: default:
/* FIXME this changes the order of events */ /* FIXME this changes the order of events */
ret = ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
event);
break; break;
} }
@ -876,16 +912,16 @@ gst_base_video_decoder_clear_queues (GstBaseVideoDecoder * dec)
g_list_foreach (dec->gather, (GFunc) gst_mini_object_unref, NULL); g_list_foreach (dec->gather, (GFunc) gst_mini_object_unref, NULL);
g_list_free (dec->gather); g_list_free (dec->gather);
dec->gather = NULL; dec->gather = NULL;
g_list_foreach (dec->decode, (GFunc) gst_base_video_decoder_free_frame, NULL); g_list_foreach (dec->decode, (GFunc) gst_base_video_codec_free_frame, NULL);
g_list_free (dec->decode); g_list_free (dec->decode);
dec->decode = NULL; dec->decode = NULL;
g_list_foreach (dec->parse, (GFunc) gst_mini_object_unref, NULL); g_list_foreach (dec->parse, (GFunc) gst_mini_object_unref, NULL);
g_list_free (dec->parse); g_list_free (dec->parse);
dec->decode = NULL; dec->parse = NULL;
g_list_foreach (dec->parse_gather, (GFunc) gst_base_video_decoder_free_frame, g_list_foreach (dec->parse_gather, (GFunc) gst_base_video_codec_free_frame,
NULL); NULL);
g_list_free (dec->parse_gather); g_list_free (dec->parse_gather);
dec->decode = NULL; dec->parse_gather = NULL;
} }
static void static void
@ -917,7 +953,7 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder,
base_video_decoder->timestamps = NULL; base_video_decoder->timestamps = NULL;
if (base_video_decoder->current_frame) { if (base_video_decoder->current_frame) {
gst_base_video_decoder_free_frame (base_video_decoder->current_frame); gst_base_video_codec_free_frame (base_video_decoder->current_frame);
base_video_decoder->current_frame = NULL; base_video_decoder->current_frame = NULL;
} }
@ -1042,7 +1078,7 @@ gst_base_video_decoder_flush_decode (GstBaseVideoDecoder * dec)
next = g_list_next (walk); next = g_list_next (walk);
if (dec->current_frame) if (dec->current_frame)
gst_base_video_decoder_free_frame (dec->current_frame); gst_base_video_codec_free_frame (dec->current_frame);
dec->current_frame = frame; dec->current_frame = frame;
/* decode buffer, resulting data prepended to queue */ /* decode buffer, resulting data prepended to queue */
res = gst_base_video_decoder_have_frame_2 (dec); res = gst_base_video_decoder_have_frame_2 (dec);
@ -1203,9 +1239,7 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
GST_CLOCK_TIME_NONE, 0); GST_CLOCK_TIME_NONE, 0);
ret = ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
event);
if (!ret) { if (!ret) {
GST_ERROR_OBJECT (base_video_decoder, "new segment event ret=%d", ret); GST_ERROR_OBJECT (base_video_decoder, "new segment event ret=%d", ret);
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
@ -1216,12 +1250,13 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
gint64 ts, index; gint64 ts, index;
GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer"); GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
gst_base_video_decoder_flush (base_video_decoder, FALSE);
/* track present position */ /* track present position */
ts = base_video_decoder->timestamp_offset; ts = base_video_decoder->timestamp_offset;
index = base_video_decoder->field_index; index = base_video_decoder->field_index;
gst_base_video_decoder_flush (base_video_decoder, FALSE);
/* buffer may claim DISCONT loudly, if it can't tell us where we are now, /* buffer may claim DISCONT loudly, if it can't tell us where we are now,
* we'll stick to where we were ... * we'll stick to where we were ...
* Particularly useful/needed for upstream BYTE based */ * Particularly useful/needed for upstream BYTE based */
@ -1268,6 +1303,10 @@ gst_base_video_decoder_change_state (GstElement * element,
base_video_decoder_class->stop (base_video_decoder); base_video_decoder_class->stop (base_video_decoder);
} }
gst_base_video_decoder_reset (base_video_decoder, TRUE); gst_base_video_decoder_reset (base_video_decoder, TRUE);
g_list_foreach (base_video_decoder->current_frame_events,
(GFunc) gst_event_unref, NULL);
g_list_free (base_video_decoder->current_frame_events);
base_video_decoder->current_frame_events = NULL;
break; break;
default: default:
break; break;
@ -1276,31 +1315,14 @@ gst_base_video_decoder_change_state (GstElement * element,
return ret; return ret;
} }
static void
gst_base_video_decoder_free_frame (GstVideoFrame * frame)
{
g_return_if_fail (frame != NULL);
if (frame->sink_buffer) {
gst_buffer_unref (frame->sink_buffer);
}
if (frame->src_buffer) {
gst_buffer_unref (frame->src_buffer);
}
g_free (frame);
}
static GstVideoFrame * static GstVideoFrame *
gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
{ {
GstVideoFrame *frame; GstVideoFrame *frame;
frame = g_malloc0 (sizeof (GstVideoFrame)); frame =
gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC
frame->system_frame_number = (base_video_decoder));
GST_BASE_VIDEO_CODEC (base_video_decoder)->system_frame_number;
GST_BASE_VIDEO_CODEC (base_video_decoder)->system_frame_number++;
frame->decode_frame_number = frame->system_frame_number - frame->decode_frame_number = frame->system_frame_number -
base_video_decoder->reorder_depth; base_video_decoder->reorder_depth;
@ -1310,6 +1332,9 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
frame->presentation_duration = GST_CLOCK_TIME_NONE; frame->presentation_duration = GST_CLOCK_TIME_NONE;
frame->n_fields = 2; frame->n_fields = 2;
frame->events = base_video_decoder->current_frame_events;
base_video_decoder->current_frame_events = NULL;
return frame; return frame;
} }
@ -1332,17 +1357,46 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
GstBuffer *src_buffer; GstBuffer *src_buffer;
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
GList *l, *events = NULL;
GST_LOG_OBJECT (base_video_decoder, "finish frame"); GST_LOG_OBJECT (base_video_decoder, "finish frame");
#ifndef GST_DISABLE_GST_DEBUG
GST_OBJECT_LOCK (base_video_decoder);
GST_LOG_OBJECT (base_video_decoder, "n %d in %d out %d", GST_LOG_OBJECT (base_video_decoder, "n %d in %d out %d",
g_list_length (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames), g_list_length (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames),
gst_adapter_available (base_video_decoder->input_adapter), gst_adapter_available (base_video_decoder->input_adapter),
gst_adapter_available (base_video_decoder->output_adapter)); gst_adapter_available (base_video_decoder->output_adapter));
GST_OBJECT_UNLOCK (base_video_decoder);
#endif
GST_LOG_OBJECT (base_video_decoder, GST_LOG_OBJECT (base_video_decoder,
"finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point, "finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point,
GST_TIME_ARGS (frame->presentation_timestamp)); GST_TIME_ARGS (frame->presentation_timestamp));
/* Push all pending events that arrived before this frame */
GST_OBJECT_LOCK (base_video_decoder);
for (l = base_video_decoder->base_video_codec.frames; l; l = l->next) {
GstVideoFrame *tmp = l->data;
if (tmp->events) {
GList *k;
for (k = g_list_last (tmp->events); k; k = k->prev)
events = g_list_prepend (events, k->data);
g_list_free (tmp->events);
tmp->events = NULL;
}
if (tmp == frame)
break;
}
GST_OBJECT_UNLOCK (base_video_decoder);
for (l = g_list_last (events); l; l = l->next)
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
l->data);
g_list_free (events);
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
GST_DEBUG_OBJECT (base_video_decoder, GST_DEBUG_OBJECT (base_video_decoder,
@ -1508,9 +1562,11 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
} }
done: done:
GST_OBJECT_LOCK (base_video_decoder);
GST_BASE_VIDEO_CODEC (base_video_decoder)->frames = GST_BASE_VIDEO_CODEC (base_video_decoder)->frames =
g_list_remove (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame); g_list_remove (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame);
gst_base_video_decoder_free_frame (frame); GST_OBJECT_UNLOCK (base_video_decoder);
gst_base_video_codec_free_frame (frame);
return ret; return ret;
} }
@ -1673,8 +1729,10 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder)
GST_TIME_ARGS (frame->decode_timestamp)); GST_TIME_ARGS (frame->decode_timestamp));
GST_LOG_OBJECT (base_video_decoder, "dist %d", frame->distance_from_sync); GST_LOG_OBJECT (base_video_decoder, "dist %d", frame->distance_from_sync);
GST_OBJECT_LOCK (base_video_decoder);
GST_BASE_VIDEO_CODEC (base_video_decoder)->frames = GST_BASE_VIDEO_CODEC (base_video_decoder)->frames =
g_list_append (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame); g_list_append (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame);
GST_OBJECT_UNLOCK (base_video_decoder);
frame->deadline = frame->deadline =
gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
@ -1756,7 +1814,9 @@ gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *
{ {
GList *g; GList *g;
GST_OBJECT_LOCK (base_video_decoder);
g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames); g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames);
GST_OBJECT_UNLOCK (base_video_decoder);
if (g == NULL) if (g == NULL)
return NULL; return NULL;
@ -1775,17 +1835,21 @@ gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder,
int frame_number) int frame_number)
{ {
GList *g; GList *g;
GstVideoFrame *frame = NULL;
GST_OBJECT_LOCK (base_video_decoder);
for (g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames); for (g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames);
g; g = g_list_next (g)) { g; g = g_list_next (g)) {
GstVideoFrame *frame = g->data; GstVideoFrame *tmp = g->data;
if (frame->system_frame_number == frame_number) { if (frame->system_frame_number == frame_number) {
return frame; frame = tmp;
break;
} }
} }
GST_OBJECT_UNLOCK (base_video_decoder);
return NULL; return frame;
} }
/** /**

View file

@ -65,6 +65,13 @@ G_BEGIN_DECLS
**/ **/
#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS #define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
/**
* GST_BASE_VIDEO_DECODER_FLOW_DROPPED:
*
* Returned when the event/buffer should be dropped.
*/
#define GST_BASE_VIDEO_DECODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder; typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass; typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
@ -136,6 +143,8 @@ struct _GstBaseVideoDecoder
* only available during parsing */ * only available during parsing */
/* FIXME remove and add parameter to method */ /* FIXME remove and add parameter to method */
GstVideoFrame *current_frame; GstVideoFrame *current_frame;
/* events that should apply to the current frame */
GList *current_frame_events;
/* relative offset of input data */ /* relative offset of input data */
guint64 input_offset; guint64 input_offset;
/* relative offset of frame */ /* relative offset of frame */

View file

@ -106,6 +106,9 @@
#endif #endif
#include "gstbasevideoencoder.h" #include "gstbasevideoencoder.h"
#include "gstbasevideoutils.h"
#include <string.h>
GST_DEBUG_CATEGORY (basevideoencoder_debug); GST_DEBUG_CATEGORY (basevideoencoder_debug);
#define GST_CAT_DEFAULT basevideoencoder_debug #define GST_CAT_DEFAULT basevideoencoder_debug
@ -185,6 +188,11 @@ gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder)
gst_event_unref (base_video_encoder->force_keyunit_event); gst_event_unref (base_video_encoder->force_keyunit_event);
base_video_encoder->force_keyunit_event = NULL; base_video_encoder->force_keyunit_event = NULL;
} }
g_list_foreach (base_video_encoder->current_frame_events,
(GFunc) gst_event_unref, NULL);
g_list_free (base_video_encoder->current_frame_events);
base_video_encoder->current_frame_events = NULL;
} }
static void static void
@ -236,9 +244,9 @@ gst_base_video_encoder_drain (GstBaseVideoEncoder * enc)
return TRUE; return TRUE;
} }
if (enc_class->finish) { if (enc_class->reset) {
GST_DEBUG_OBJECT (enc, "requesting subclass to finish"); GST_DEBUG_OBJECT (enc, "requesting subclass to finish");
ret = enc_class->finish (enc); ret = enc_class->reset (enc);
} }
/* everything should be away now */ /* everything should be away now */
if (codec->frames) { if (codec->frames) {
@ -262,11 +270,9 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
GstBaseVideoEncoder *base_video_encoder; GstBaseVideoEncoder *base_video_encoder;
GstBaseVideoEncoderClass *base_video_encoder_class; GstBaseVideoEncoderClass *base_video_encoder_class;
GstStructure *structure; GstStructure *structure;
GstVideoState *state; GstVideoState *state, tmp_state;
gboolean ret; gboolean ret;
gboolean changed = FALSE, u, v; gboolean changed = FALSE;
GstVideoFormat fmt;
gint w, h, num, den;
base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
base_video_encoder_class = base_video_encoder_class =
@ -278,58 +284,49 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps); GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps);
state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state; state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
memset (&tmp_state, 0, sizeof (tmp_state));
tmp_state.caps = gst_caps_ref (caps);
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure (caps, 0);
ret = gst_video_format_parse_caps (caps, &fmt, &w, &h); ret =
gst_video_format_parse_caps (caps, &tmp_state.format, &tmp_state.width,
&tmp_state.height);
if (!ret) if (!ret)
goto exit; goto exit;
if (fmt != state->format || w != state->width || h != state->height) { changed = (tmp_state.format != state->format
changed = TRUE; || tmp_state.width != state->width || tmp_state.height != state->height);
state->format = fmt;
state->width = w;
state->height = h;
}
num = 0; if (!gst_video_parse_caps_framerate (caps, &tmp_state.fps_n,
den = 1; &tmp_state.fps_d)) {
gst_video_parse_caps_framerate (caps, &num, &den); tmp_state.fps_n = 0;
if (den == 0) { tmp_state.fps_d = 1;
num = 0;
den = 1;
}
if (num != state->fps_n || den != state->fps_d) {
changed = TRUE;
state->fps_n = num;
state->fps_d = den;
} }
changed = changed || (tmp_state.fps_n != state->fps_n
|| tmp_state.fps_d != state->fps_d);
num = 0; if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp_state.par_n,
den = 1; &tmp_state.par_d)) {
gst_video_parse_caps_pixel_aspect_ratio (caps, &num, &den); tmp_state.par_n = 1;
if (den == 0) { tmp_state.par_d = 1;
num = 0;
den = 1;
}
if (num != state->par_n || den != state->par_d) {
changed = TRUE;
state->par_n = num;
state->par_d = den;
} }
changed = changed || (tmp_state.par_n != state->par_n
|| tmp_state.par_d != state->par_d);
u = gst_structure_get_boolean (structure, "interlaced", &v); tmp_state.have_interlaced =
if (u != state->have_interlaced || v != state->interlaced) { gst_structure_get_boolean (structure, "interlaced",
changed = TRUE; &tmp_state.interlaced);
state->have_interlaced = u; changed = changed || (tmp_state.have_interlaced != state->have_interlaced
state->interlaced = v; || tmp_state.interlaced != state->interlaced);
}
state->bytes_per_picture = tmp_state.bytes_per_picture =
gst_video_format_get_size (state->format, state->width, state->height); gst_video_format_get_size (tmp_state.format, tmp_state.width,
state->clean_width = state->width; tmp_state.height);
state->clean_height = state->height; tmp_state.clean_width = tmp_state.width;
state->clean_offset_left = 0; tmp_state.clean_height = tmp_state.height;
state->clean_offset_top = 0; tmp_state.clean_offset_left = 0;
tmp_state.clean_offset_top = 0;
if (changed) { if (changed) {
/* arrange draining pending frames */ /* arrange draining pending frames */
@ -337,11 +334,17 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
/* and subclass should be ready to configure format at any time around */ /* and subclass should be ready to configure format at any time around */
if (base_video_encoder_class->set_format) if (base_video_encoder_class->set_format)
ret = base_video_encoder_class->set_format (base_video_encoder, state); ret =
base_video_encoder_class->set_format (base_video_encoder, &tmp_state);
if (ret) {
gst_caps_replace (&state->caps, NULL);
*state = tmp_state;
}
} else { } else {
/* no need to stir things up */ /* no need to stir things up */
GST_DEBUG_OBJECT (base_video_encoder, GST_DEBUG_OBJECT (base_video_encoder,
"new video format identical to configured format"); "new video format identical to configured format");
gst_caps_unref (tmp_state.caps);
ret = TRUE; ret = TRUE;
} }
@ -368,13 +371,26 @@ static gboolean
gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder, gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
GstEvent * event) GstEvent * event)
{ {
GstBaseVideoEncoderClass *base_video_encoder_class;
gboolean ret = FALSE; gboolean ret = FALSE;
base_video_encoder_class =
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS: case GST_EVENT_EOS:
{ {
GstFlowReturn flow_ret;
base_video_encoder->a.at_eos = TRUE; base_video_encoder->a.at_eos = TRUE;
gst_base_video_encoder_drain (base_video_encoder);
if (base_video_encoder_class->finish) {
flow_ret = base_video_encoder_class->finish (base_video_encoder);
} else {
flow_ret = GST_FLOW_OK;
}
ret = (flow_ret == GST_BASE_VIDEO_ENCODER_FLOW_DROPPED);
break; break;
} }
case GST_EVENT_NEWSEGMENT: case GST_EVENT_NEWSEGMENT:
@ -453,8 +469,27 @@ gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event)
if (!handled) if (!handled)
handled = gst_base_video_encoder_sink_eventfunc (enc, event); handled = gst_base_video_encoder_sink_eventfunc (enc, event);
if (!handled) if (!handled) {
ret = gst_pad_event_default (pad, event); /* Forward non-serialized events and EOS/FLUSH_STOP immediately.
* For EOS this is required because no buffer or serialized event
* will come after EOS and nothing could trigger another
* _finish_frame() call. *
* If the subclass handles sending of EOS manually it can return
* _DROPPED from ::finish() and all other subclasses should have
* decoded/flushed all remaining data before this
*
* For FLUSH_STOP this is required because it is expected
* to be forwarded immediately and no buffers are queued anyway.
*/
if (!GST_EVENT_IS_SERIALIZED (event)
|| GST_EVENT_TYPE (event) == GST_EVENT_EOS
|| GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
ret = gst_pad_push_event (enc->base_video_codec.srcpad, event);
} else {
enc->current_frame_events =
g_list_prepend (enc->current_frame_events, event);
}
}
GST_DEBUG_OBJECT (enc, "event handled"); GST_DEBUG_OBJECT (enc, "event handled");
@ -630,6 +665,8 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
frame = frame =
gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC
(base_video_encoder)); (base_video_encoder));
frame->events = base_video_encoder->current_frame_events;
base_video_encoder->current_frame_events = NULL;
frame->sink_buffer = buf; frame->sink_buffer = buf;
frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf); frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf);
frame->presentation_duration = GST_BUFFER_DURATION (buf); frame->presentation_duration = GST_BUFFER_DURATION (buf);
@ -639,8 +676,10 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
frame->force_keyframe = base_video_encoder->force_keyframe; frame->force_keyframe = base_video_encoder->force_keyframe;
base_video_encoder->force_keyframe = FALSE; base_video_encoder->force_keyframe = FALSE;
GST_OBJECT_LOCK (base_video_encoder);
GST_BASE_VIDEO_CODEC (base_video_encoder)->frames = GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame); g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);
GST_OBJECT_UNLOCK (base_video_encoder);
/* new data, more finish needed */ /* new data, more finish needed */
base_video_encoder->drained = FALSE; base_video_encoder->drained = FALSE;
@ -701,7 +740,8 @@ gst_base_video_encoder_change_state (GstElement * element,
* @frame: an encoded #GstVideoFrame * @frame: an encoded #GstVideoFrame
* *
* @frame must have a valid encoded data buffer, whose metadata fields * @frame must have a valid encoded data buffer, whose metadata fields
* are then appropriately set according to frame data. * are then appropriately set according to frame data or no buffer at
* all if the frame should be dropped.
* It is subsequently pushed downstream or provided to @shape_output. * It is subsequently pushed downstream or provided to @shape_output.
* In any case, the frame is considered finished and released. * In any case, the frame is considered finished and released.
* *
@ -711,10 +751,9 @@ GstFlowReturn
gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame) GstVideoFrame * frame)
{ {
GstFlowReturn ret; GstFlowReturn ret = GST_FLOW_OK;
GstBaseVideoEncoderClass *base_video_encoder_class; GstBaseVideoEncoderClass *base_video_encoder_class;
GList *l;
g_return_val_if_fail (frame->src_buffer != NULL, GST_FLOW_ERROR);
base_video_encoder_class = base_video_encoder_class =
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
@ -722,6 +761,86 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
GST_LOG_OBJECT (base_video_encoder, GST_LOG_OBJECT (base_video_encoder,
"finish frame fpn %d", frame->presentation_frame_number); "finish frame fpn %d", frame->presentation_frame_number);
/* FIXME get rid of this ?
* seems a roundabout way that adds little benefit to simply get
* and subsequently set. subclass is adult enough to set_caps itself ...
* so simply check/ensure/assert that src pad caps are set by now */
if (!base_video_encoder->set_output_caps) {
if (!GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))) {
GstCaps *caps;
if (base_video_encoder_class->get_caps) {
caps = base_video_encoder_class->get_caps (base_video_encoder);
} else {
caps = gst_caps_new_simple ("video/unknown", NULL);
}
GST_DEBUG_OBJECT (base_video_encoder, "src caps %" GST_PTR_FORMAT, caps);
gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
caps);
gst_caps_unref (caps);
}
base_video_encoder->set_output_caps = TRUE;
}
/* Push all pending events that arrived before this frame */
for (l = base_video_encoder->base_video_codec.frames; l; l = l->next) {
GstVideoFrame *tmp = l->data;
if (tmp->events) {
GList *k;
for (k = g_list_last (tmp->events); k; k = k->prev)
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
k->data);
g_list_free (tmp->events);
tmp->events = NULL;
}
if (tmp == frame)
break;
}
if (frame->force_keyframe) {
GstClockTime stream_time;
GstClockTime running_time;
GstEvent *ev;
running_time =
gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
stream_time =
gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
/* re-use upstream event if any so it also conveys any additional
* info upstream arranged in there */
GST_OBJECT_LOCK (base_video_encoder);
if (base_video_encoder->force_keyunit_event) {
ev = base_video_encoder->force_keyunit_event;
base_video_encoder->force_keyunit_event = NULL;
} else {
ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new ("GstForceKeyUnit", NULL));
}
GST_OBJECT_UNLOCK (base_video_encoder);
gst_structure_set (ev->structure,
"timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
"stream-time", G_TYPE_UINT64, stream_time,
"running-time", G_TYPE_UINT64, running_time, NULL);
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev);
}
/* no buffer data means this frame is skipped/dropped */
if (!frame->src_buffer) {
GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT,
GST_TIME_ARGS (frame->presentation_timestamp));
goto done;
}
if (frame->is_sync_point) { if (frame->is_sync_point) {
GST_LOG_OBJECT (base_video_encoder, "key frame"); GST_LOG_OBJECT (base_video_encoder, "key frame");
base_video_encoder->distance_from_sync = 0; base_video_encoder->distance_from_sync = 0;
@ -763,64 +882,9 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE; GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE;
} }
GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);
/* FIXME get rid of this ?
* seems a roundabout way that adds little benefit to simply get
* and subsequently set. subclass is adult enough to set_caps itself ...
* so simply check/ensure/assert that src pad caps are set by now */
if (!base_video_encoder->set_output_caps) {
GstCaps *caps;
if (base_video_encoder_class->get_caps) {
caps = base_video_encoder_class->get_caps (base_video_encoder);
} else {
caps = gst_caps_new_simple ("video/unknown", NULL);
}
GST_DEBUG_OBJECT (base_video_encoder, "src caps %" GST_PTR_FORMAT, caps);
gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), caps);
gst_caps_unref (caps);
base_video_encoder->set_output_caps = TRUE;
}
gst_buffer_set_caps (GST_BUFFER (frame->src_buffer), gst_buffer_set_caps (GST_BUFFER (frame->src_buffer),
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))); GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));
if (frame->force_keyframe) {
GstClockTime stream_time;
GstClockTime running_time;
GstEvent *ev;
running_time =
gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
stream_time =
gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC
(base_video_encoder)->segment, GST_FORMAT_TIME,
frame->presentation_timestamp);
/* re-use upstream event if any so it also conveys any additional
* info upstream arranged in there */
GST_OBJECT_LOCK (base_video_encoder);
if (base_video_encoder->force_keyunit_event) {
ev = base_video_encoder->force_keyunit_event;
base_video_encoder->force_keyunit_event = NULL;
} else {
ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new ("GstForceKeyUnit", NULL));
}
GST_OBJECT_UNLOCK (base_video_encoder);
gst_structure_set (ev->structure,
"timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
"stream-time", G_TYPE_UINT64, stream_time,
"running-time", G_TYPE_UINT64, running_time, NULL);
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev);
}
if (base_video_encoder_class->shape_output) { if (base_video_encoder_class->shape_output) {
ret = base_video_encoder_class->shape_output (base_video_encoder, frame); ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
} else { } else {
@ -828,9 +892,15 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
frame->src_buffer); frame->src_buffer);
} }
/* handed out */
frame->src_buffer = NULL; frame->src_buffer = NULL;
done:
/* handed out */
GST_OBJECT_LOCK (base_video_encoder);
GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);
GST_OBJECT_UNLOCK (base_video_encoder);
gst_base_video_codec_free_frame (frame); gst_base_video_codec_free_frame (frame);
return ret; return ret;
@ -906,7 +976,9 @@ gst_base_video_encoder_get_oldest_frame (GstBaseVideoEncoder *
{ {
GList *g; GList *g;
GST_OBJECT_LOCK (base_video_encoder);
g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames); g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames);
GST_OBJECT_UNLOCK (base_video_encoder);
if (g == NULL) if (g == NULL)
return NULL; return NULL;

View file

@ -58,6 +58,12 @@ G_BEGIN_DECLS
*/ */
#define GST_BASE_VIDEO_ENCODER_SRC_NAME "src" #define GST_BASE_VIDEO_ENCODER_SRC_NAME "src"
/**
* GST_BASE_VIDEO_ENCODER_FLOW_DROPPED:
*
* Returned when the event/buffer should be dropped.
*/
#define GST_BASE_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
typedef struct _GstBaseVideoEncoder GstBaseVideoEncoder; typedef struct _GstBaseVideoEncoder GstBaseVideoEncoder;
typedef struct _GstBaseVideoEncoderClass GstBaseVideoEncoderClass; typedef struct _GstBaseVideoEncoderClass GstBaseVideoEncoderClass;
@ -90,6 +96,7 @@ struct _GstBaseVideoEncoder
gint64 max_latency; gint64 max_latency;
GstEvent *force_keyunit_event; GstEvent *force_keyunit_event;
GList *current_frame_events;
union { union {
void *padding; void *padding;
@ -148,7 +155,8 @@ struct _GstBaseVideoEncoderClass
GstFlowReturn (*handle_frame) (GstBaseVideoEncoder *coder, GstFlowReturn (*handle_frame) (GstBaseVideoEncoder *coder,
GstVideoFrame *frame); GstVideoFrame *frame);
gboolean (*finish) (GstBaseVideoEncoder *coder); gboolean (*reset) (GstBaseVideoEncoder *coder);
GstFlowReturn (*finish) (GstBaseVideoEncoder *coder);
GstFlowReturn (*shape_output) (GstBaseVideoEncoder *coder, GstFlowReturn (*shape_output) (GstBaseVideoEncoder *coder,
GstVideoFrame *frame); GstVideoFrame *frame);

View file

@ -21,7 +21,7 @@
#include "config.h" #include "config.h"
#endif #endif
#include "gstbasevideocodec.h" #include "gstbasevideoutils.h"
#include <string.h> #include <string.h>

View file

@ -0,0 +1,46 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_UTILS_H_
#define _GST_BASE_VIDEO_UTILS_H_
#ifndef GST_USE_UNSTABLE_API
#warning "GstBaseVideoCodec is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gstbasevideocodec.h"
G_BEGIN_DECLS
gboolean gst_base_video_rawvideo_convert (GstVideoState *state,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 *dest_value);
gboolean gst_base_video_encoded_video_convert (GstVideoState * state,
gint64 bytes, gint64 time, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
GstClockTime gst_video_state_get_timestamp (const GstVideoState *state,
GstSegment *segment, int frame_number);
G_END_DECLS
#endif

View file

@ -3,6 +3,9 @@ plugin_LTLIBRARIES = libgstbayer.la
ORC_SOURCE=gstbayerorc ORC_SOURCE=gstbayerorc
include $(top_srcdir)/common/orc.mak include $(top_srcdir)/common/orc.mak
# orc-generated code creates warnings
ERROR_CFLAGS=
libgstbayer_la_SOURCES = \ libgstbayer_la_SOURCES = \
gstbayer.c \ gstbayer.c \
gstbayer2rgb.c \ gstbayer2rgb.c \

View file

@ -26,6 +26,8 @@ property and encodebin manages to instantiate the elements for the format.
* Previews * Previews
new "post-previews" property for enabling/disabling preview image posting new "post-previews" property for enabling/disabling preview image posting
set location=NULL to skip writing image to file but to receive the preview,
useful for scenarios that wants the image in memory.
* Configuring resolution and framerate * Configuring resolution and framerate
Camerabin2 has new GstCaps type properties for configuring capture and Camerabin2 has new GstCaps type properties for configuring capture and

View file

@ -20,10 +20,10 @@
/** /**
* SECTION:camerabingeneral * SECTION:camerabingeneral
* @short_description: helper functions for #GstCameraBin and it's modules * @short_description: helper functions for #GstCameraBin2 and it's modules
* *
* Common helper functions for #GstCameraBin, #GstCameraBinImage and * Common helper functions for #GstCameraBin2, #GstCameraBin2Image and
* #GstCameraBinVideo. * #GstCameraBin2Video.
* *
*/ */
#ifdef HAVE_CONFIG_H #ifdef HAVE_CONFIG_H

View file

@ -17,12 +17,23 @@
* Boston, MA 02111-1307, USA. * Boston, MA 02111-1307, USA.
*/ */
/** /**
* SECTION:element-gstcamerabin2 * SECTION:element-camerabin2
* *
* The gstcamerabin2 element does FIXME stuff. * GstCameraBin22 is a high-level camera object that encapsulates the gstreamer
* internals and provides a task based API for the application.
* *
* <note>
* Note that camerabin2 is still UNSTABLE, EXPERIMENTAL and under heavy * Note that camerabin2 is still UNSTABLE, EXPERIMENTAL and under heavy
* development. * development.
* </note>
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v -m camerabin2
* ]|
* </refsect2>
*/ */
/* /*
@ -59,23 +70,23 @@
#include <gst/gst-i18n-plugin.h> #include <gst/gst-i18n-plugin.h>
#include <gst/pbutils/pbutils.h> #include <gst/pbutils/pbutils.h>
#define GST_CAMERA_BIN_PROCESSING_INC(c) \ #define GST_CAMERA_BIN2_PROCESSING_INC(c) \
{ \ { \
gint bef = g_atomic_int_exchange_and_add (&c->processing_counter, 1); \ gint bef = g_atomic_int_exchange_and_add (&c->processing_counter, 1); \
if (bef == 0) \ if (bef == 0) \
g_object_notify (G_OBJECT (c), "idle"); \ g_object_notify (G_OBJECT (c), "idle"); \
GST_DEBUG_OBJECT ((c), "Processing counter incremented to: %d", \ GST_DEBUG_OBJECT ((c), "Processing counter increModemented to: %d", \
bef + 1); \ bef + 1); \
} }
#define GST_CAMERA_BIN_PROCESSING_DEC(c) \ #define GST_CAMERA_BIN2_PROCESSING_DEC(c) \
{ \ { \
if (g_atomic_int_dec_and_test (&c->processing_counter)) \ if (g_atomic_int_dec_and_test (&c->processing_counter)) \
g_object_notify (G_OBJECT (c), "idle"); \ g_object_notify (G_OBJECT (c), "idle"); \
GST_DEBUG_OBJECT ((c), "Processing counter decremented"); \ GST_DEBUG_OBJECT ((c), "Processing counter decremented"); \
} }
#define GST_CAMERA_BIN_RESET_PROCESSING_COUNTER(c) \ #define GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER(c) \
{ \ { \
g_atomic_int_set (&c->processing_counter, 0); \ g_atomic_int_set (&c->processing_counter, 0); \
GST_DEBUG_OBJECT ((c), "Processing counter reset"); \ GST_DEBUG_OBJECT ((c), "Processing counter reset"); \
@ -113,7 +124,8 @@ enum
PROP_ZOOM, PROP_ZOOM,
PROP_MAX_ZOOM, PROP_MAX_ZOOM,
PROP_IMAGE_ENCODING_PROFILE, PROP_IMAGE_ENCODING_PROFILE,
PROP_IDLE PROP_IDLE,
PROP_FLAGS
}; };
enum enum
@ -127,11 +139,11 @@ enum
static guint camerabin_signals[LAST_SIGNAL]; static guint camerabin_signals[LAST_SIGNAL];
#define DEFAULT_MODE MODE_IMAGE #define DEFAULT_MODE MODE_IMAGE
#define DEFAULT_VID_LOCATION "vid_%d" #define DEFAULT_LOCATION "cap_%d"
#define DEFAULT_IMG_LOCATION "img_%d"
#define DEFAULT_POST_PREVIEWS TRUE #define DEFAULT_POST_PREVIEWS TRUE
#define DEFAULT_MUTE_AUDIO FALSE #define DEFAULT_MUTE_AUDIO FALSE
#define DEFAULT_IDLE TRUE #define DEFAULT_IDLE TRUE
#define DEFAULT_FLAGS 0
#define DEFAULT_AUDIO_SRC "autoaudiosrc" #define DEFAULT_AUDIO_SRC "autoaudiosrc"
@ -141,9 +153,9 @@ static guint camerabin_signals[LAST_SIGNAL];
********************************/ ********************************/
static GstPipelineClass *parent_class; static GstPipelineClass *parent_class;
static void gst_camera_bin_class_init (GstCameraBinClass * klass); static void gst_camera_bin_class_init (GstCameraBin2Class * klass);
static void gst_camera_bin_base_init (gpointer klass); static void gst_camera_bin_base_init (gpointer klass);
static void gst_camera_bin_init (GstCameraBin * camera); static void gst_camera_bin_init (GstCameraBin2 * camera);
static void gst_camera_bin_dispose (GObject * object); static void gst_camera_bin_dispose (GObject * object);
static void gst_camera_bin_finalize (GObject * object); static void gst_camera_bin_finalize (GObject * object);
@ -151,8 +163,33 @@ static void gst_camera_bin_handle_message (GstBin * bin, GstMessage * message);
static gboolean gst_camera_bin_send_event (GstElement * element, static gboolean gst_camera_bin_send_event (GstElement * element,
GstEvent * event); GstEvent * event);
#define C_FLAGS(v) ((guint) v)
#define GST_TYPE_CAM_FLAGS (gst_cam_flags_get_type())
static GType
gst_cam_flags_get_type (void)
{
static const GFlagsValue values[] = {
{C_FLAGS (GST_CAM_FLAG_NO_AUDIO_CONVERSION), "Do not use audio conversion "
"elements", "no-audio-conversion"},
{C_FLAGS (GST_CAM_FLAG_NO_VIDEO_CONVERSION), "Do not use video conversion "
"elements", "no-video-conversion"},
{0, NULL, NULL}
};
static volatile GType id = 0;
if (g_once_init_enter ((gsize *) & id)) {
GType _id;
_id = g_flags_register_static ("GstCamFlags", values);
g_once_init_leave ((gsize *) & id, _id);
}
return id;
}
GType GType
gst_camera_bin_get_type (void) gst_camera_bin2_get_type (void)
{ {
static GType gst_camera_bin_type = 0; static GType gst_camera_bin_type = 0;
static const GInterfaceInfo camerabin_tagsetter_info = { static const GInterfaceInfo camerabin_tagsetter_info = {
@ -163,13 +200,13 @@ gst_camera_bin_get_type (void)
if (!gst_camera_bin_type) { if (!gst_camera_bin_type) {
static const GTypeInfo gst_camera_bin_info = { static const GTypeInfo gst_camera_bin_info = {
sizeof (GstCameraBinClass), sizeof (GstCameraBin2Class),
(GBaseInitFunc) gst_camera_bin_base_init, (GBaseInitFunc) gst_camera_bin_base_init,
NULL, NULL,
(GClassInitFunc) gst_camera_bin_class_init, (GClassInitFunc) gst_camera_bin_class_init,
NULL, NULL,
NULL, NULL,
sizeof (GstCameraBin), sizeof (GstCameraBin2),
0, 0,
(GInstanceInitFunc) gst_camera_bin_init, (GInstanceInitFunc) gst_camera_bin_init,
NULL NULL
@ -206,36 +243,64 @@ gst_camera_bin_new_event_renegotiate (void)
gst_structure_new ("renegotiate", NULL)); gst_structure_new ("renegotiate", NULL));
} }
static GstEvent *
gst_camera_bin_new_event_file_location (const gchar * location)
{
return gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
gst_structure_new ("new-location", "location", G_TYPE_STRING, location,
NULL));
}
static void static void
gst_camera_bin_start_capture (GstCameraBin * camerabin) gst_camera_bin_start_capture (GstCameraBin2 * camerabin)
{ {
const GstTagList *taglist; const GstTagList *taglist;
GST_DEBUG_OBJECT (camerabin, "Received start-capture"); GST_DEBUG_OBJECT (camerabin, "Received start-capture");
/* check that we have a valid location */ /* check that we have a valid location */
if ((camerabin->mode == MODE_VIDEO && camerabin->video_location == NULL) if (camerabin->mode == MODE_VIDEO && camerabin->location == NULL) {
|| (camerabin->mode == MODE_IMAGE && camerabin->image_location == NULL)) {
GST_ELEMENT_ERROR (camerabin, RESOURCE, OPEN_WRITE, GST_ELEMENT_ERROR (camerabin, RESOURCE, OPEN_WRITE,
(_("File location is set to NULL, please set it to a valid filename")), (_("File location is set to NULL, please set it to a valid filename")),
(NULL)); (NULL));
return; return;
} }
GST_CAMERA_BIN_PROCESSING_INC (camerabin); GST_CAMERA_BIN2_PROCESSING_INC (camerabin);
if (camerabin->mode == MODE_VIDEO) {
if (camerabin->audio_src) {
GstClock *clock = gst_pipeline_get_clock (GST_PIPELINE_CAST (camerabin));
/* FIXME We need to set audiosrc to null to make it resync the ringbuffer
* while bug https://bugzilla.gnome.org/show_bug.cgi?id=648359 isn't
* fixed */
gst_element_set_state (camerabin->audio_src, GST_STATE_NULL);
if (camerabin->mode == MODE_VIDEO && camerabin->audio_src) {
gst_element_set_state (camerabin->audio_src, GST_STATE_READY);
/* need to reset eos status (pads could be flushing) */ /* need to reset eos status (pads could be flushing) */
gst_element_set_state (camerabin->audio_queue, GST_STATE_READY);
gst_element_set_state (camerabin->audio_convert, GST_STATE_READY);
gst_element_set_state (camerabin->audio_capsfilter, GST_STATE_READY); gst_element_set_state (camerabin->audio_capsfilter, GST_STATE_READY);
gst_element_set_state (camerabin->audio_volume, GST_STATE_READY); gst_element_set_state (camerabin->audio_volume, GST_STATE_READY);
gst_element_sync_state_with_parent (camerabin->audio_queue);
gst_element_sync_state_with_parent (camerabin->audio_convert);
gst_element_sync_state_with_parent (camerabin->audio_capsfilter); gst_element_sync_state_with_parent (camerabin->audio_capsfilter);
gst_element_sync_state_with_parent (camerabin->audio_volume); gst_element_sync_state_with_parent (camerabin->audio_volume);
gst_element_set_state (camerabin->audio_src, GST_STATE_PAUSED);
gst_element_set_base_time (camerabin->audio_src,
gst_element_get_base_time (GST_ELEMENT_CAST (camerabin)));
if (clock) {
gst_element_set_clock (camerabin->audio_src, clock);
gst_object_unref (clock);
}
}
} else {
gchar *location = NULL;
/* store the next capture buffer filename */
if (camerabin->location)
location =
g_strdup_printf (camerabin->location, camerabin->capture_index++);
camerabin->image_location_list =
g_slist_append (camerabin->image_location_list, location);
} }
g_signal_emit_by_name (camerabin->src, "start-capture", NULL); g_signal_emit_by_name (camerabin->src, "start-capture", NULL);
@ -270,7 +335,7 @@ gst_camera_bin_start_capture (GstCameraBin * camerabin)
} }
static void static void
gst_camera_bin_stop_capture (GstCameraBin * camerabin) gst_camera_bin_stop_capture (GstCameraBin2 * camerabin)
{ {
GST_DEBUG_OBJECT (camerabin, "Received stop-capture"); GST_DEBUG_OBJECT (camerabin, "Received stop-capture");
if (camerabin->src) if (camerabin->src)
@ -282,7 +347,7 @@ gst_camera_bin_stop_capture (GstCameraBin * camerabin)
} }
static void static void
gst_camera_bin_change_mode (GstCameraBin * camerabin, gint mode) gst_camera_bin_change_mode (GstCameraBin2 * camerabin, gint mode)
{ {
if (mode == camerabin->mode) if (mode == camerabin->mode)
return; return;
@ -300,7 +365,7 @@ static void
gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec, gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec,
gpointer user_data) gpointer user_data)
{ {
GstCameraBin *camera = GST_CAMERA_BIN_CAST (user_data); GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (user_data);
gboolean ready; gboolean ready;
g_object_get (camera->src, "ready-for-capture", &ready, NULL); g_object_get (camera->src, "ready-for-capture", &ready, NULL);
@ -313,28 +378,13 @@ gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec,
gst_element_set_state (camera->videosink, GST_STATE_NULL); gst_element_set_state (camera->videosink, GST_STATE_NULL);
gst_element_set_state (camera->video_encodebin, GST_STATE_NULL); gst_element_set_state (camera->video_encodebin, GST_STATE_NULL);
gst_element_set_state (camera->videobin_capsfilter, GST_STATE_NULL); gst_element_set_state (camera->videobin_capsfilter, GST_STATE_NULL);
gst_element_set_state (camera->videobin_queue, GST_STATE_NULL); location = g_strdup_printf (camera->location, camera->capture_index++);
location =
g_strdup_printf (camera->video_location, camera->video_index++);
GST_DEBUG_OBJECT (camera, "Switching videobin location to %s", location); GST_DEBUG_OBJECT (camera, "Switching videobin location to %s", location);
g_object_set (camera->videosink, "location", location, NULL); g_object_set (camera->videosink, "location", location, NULL);
g_free (location); g_free (location);
gst_element_set_state (camera->videosink, GST_STATE_PLAYING); gst_element_set_state (camera->videosink, GST_STATE_PLAYING);
gst_element_set_state (camera->video_encodebin, GST_STATE_PLAYING); gst_element_set_state (camera->video_encodebin, GST_STATE_PLAYING);
gst_element_set_state (camera->videobin_capsfilter, GST_STATE_PLAYING); gst_element_set_state (camera->videobin_capsfilter, GST_STATE_PLAYING);
gst_element_set_state (camera->videobin_queue, GST_STATE_PLAYING);
} else if (camera->mode == MODE_IMAGE) {
gst_element_set_state (camera->imagesink, GST_STATE_NULL);
gst_element_set_state (camera->image_encodebin, GST_STATE_NULL);
gst_element_set_state (camera->imagebin_queue, GST_STATE_NULL);
gst_element_set_state (camera->imagebin_capsfilter, GST_STATE_NULL);
GST_DEBUG_OBJECT (camera, "Switching imagebin location to %s", location);
g_object_set (camera->imagesink, "location", camera->image_location,
NULL);
gst_element_set_state (camera->imagesink, GST_STATE_PLAYING);
gst_element_set_state (camera->image_encodebin, GST_STATE_PLAYING);
gst_element_set_state (camera->imagebin_capsfilter, GST_STATE_PLAYING);
gst_element_set_state (camera->imagebin_queue, GST_STATE_PLAYING);
} }
} }
@ -343,10 +393,9 @@ gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec,
static void static void
gst_camera_bin_dispose (GObject * object) gst_camera_bin_dispose (GObject * object)
{ {
GstCameraBin *camerabin = GST_CAMERA_BIN_CAST (object); GstCameraBin2 *camerabin = GST_CAMERA_BIN2_CAST (object);
g_free (camerabin->image_location); g_free (camerabin->location);
g_free (camerabin->video_location);
if (camerabin->src_capture_notify_id) if (camerabin->src_capture_notify_id)
g_signal_handler_disconnect (camerabin->src, g_signal_handler_disconnect (camerabin->src,
@ -363,10 +412,6 @@ gst_camera_bin_dispose (GObject * object)
if (camerabin->audio_capsfilter) if (camerabin->audio_capsfilter)
gst_object_unref (camerabin->audio_capsfilter); gst_object_unref (camerabin->audio_capsfilter);
if (camerabin->audio_queue)
gst_object_unref (camerabin->audio_queue);
if (camerabin->audio_convert)
gst_object_unref (camerabin->audio_convert);
if (camerabin->audio_volume) if (camerabin->audio_volume)
gst_object_unref (camerabin->audio_volume); gst_object_unref (camerabin->audio_volume);
@ -381,18 +426,10 @@ gst_camera_bin_dispose (GObject * object)
g_signal_handler_disconnect (camerabin->video_encodebin, g_signal_handler_disconnect (camerabin->video_encodebin,
camerabin->video_encodebin_signal_id); camerabin->video_encodebin_signal_id);
if (camerabin->videosink_probe) {
GstPad *pad = gst_element_get_static_pad (camerabin->videosink, "sink");
gst_pad_remove_data_probe (pad, camerabin->videosink_probe);
gst_object_unref (pad);
}
if (camerabin->videosink) if (camerabin->videosink)
gst_object_unref (camerabin->videosink); gst_object_unref (camerabin->videosink);
if (camerabin->video_encodebin) if (camerabin->video_encodebin)
gst_object_unref (camerabin->video_encodebin); gst_object_unref (camerabin->video_encodebin);
if (camerabin->videobin_queue)
gst_object_unref (camerabin->videobin_queue);
if (camerabin->videobin_capsfilter) if (camerabin->videobin_capsfilter)
gst_object_unref (camerabin->videobin_capsfilter); gst_object_unref (camerabin->videobin_capsfilter);
@ -454,7 +491,7 @@ gst_camera_bin_base_init (gpointer g_class)
} }
static void static void
gst_camera_bin_class_init (GstCameraBinClass * klass) gst_camera_bin_class_init (GstCameraBin2Class * klass)
{ {
GObjectClass *object_class; GObjectClass *object_class;
GstElementClass *element_class; GstElementClass *element_class;
@ -479,7 +516,7 @@ gst_camera_bin_class_init (GstCameraBinClass * klass)
klass->stop_capture = gst_camera_bin_stop_capture; klass->stop_capture = gst_camera_bin_stop_capture;
/** /**
* GstCameraBin:mode: * GstCameraBin2:mode:
* *
* Set the mode of operation: still image capturing or video recording. * Set the mode of operation: still image capturing or video recording.
*/ */
@ -493,8 +530,8 @@ gst_camera_bin_class_init (GstCameraBinClass * klass)
g_param_spec_string ("location", "Location", g_param_spec_string ("location", "Location",
"Location to save the captured files. A %d might be used on the" "Location to save the captured files. A %d might be used on the"
"filename as a placeholder for a numeric index of the capture." "filename as a placeholder for a numeric index of the capture."
"Default for images is img_%d and vid_%d for videos", "Default is cap_%d",
DEFAULT_IMG_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); DEFAULT_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (object_class, PROP_CAMERA_SRC, g_object_class_install_property (object_class, PROP_CAMERA_SRC,
g_param_spec_object ("camera-source", "Camera source", g_param_spec_object ("camera-source", "Camera source",
@ -662,7 +699,17 @@ gst_camera_bin_class_init (GstCameraBinClass * klass)
GST_TYPE_CAPS, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); GST_TYPE_CAPS, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
/** /**
* GstCameraBin::capture-start: * GstCameraBin:flags
*
* Control the behaviour of encodebin.
*/
g_object_class_install_property (object_class, PROP_FLAGS,
g_param_spec_flags ("flags", "Flags", "Flags to control behaviour",
GST_TYPE_CAM_FLAGS, DEFAULT_FLAGS,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstCameraBin2::capture-start:
* @camera: the camera bin element * @camera: the camera bin element
* *
* Starts image capture or video recording depending on the Mode. * Starts image capture or video recording depending on the Mode.
@ -671,31 +718,31 @@ gst_camera_bin_class_init (GstCameraBinClass * klass)
g_signal_new ("start-capture", g_signal_new ("start-capture",
G_TYPE_FROM_CLASS (klass), G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstCameraBinClass, start_capture), G_STRUCT_OFFSET (GstCameraBin2Class, start_capture),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
/** /**
* GstCameraBin::capture-stop: * GstCameraBin2::capture-stop:
* @camera: the camera bin element * @camera: the camera bin element
*/ */
camerabin_signals[STOP_CAPTURE_SIGNAL] = camerabin_signals[STOP_CAPTURE_SIGNAL] =
g_signal_new ("stop-capture", g_signal_new ("stop-capture",
G_TYPE_FROM_CLASS (klass), G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION,
G_STRUCT_OFFSET (GstCameraBinClass, stop_capture), G_STRUCT_OFFSET (GstCameraBin2Class, stop_capture),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
} }
static void static void
gst_camera_bin_init (GstCameraBin * camera) gst_camera_bin_init (GstCameraBin2 * camera)
{ {
camera->post_previews = DEFAULT_POST_PREVIEWS; camera->post_previews = DEFAULT_POST_PREVIEWS;
camera->mode = DEFAULT_MODE; camera->mode = DEFAULT_MODE;
camera->video_location = g_strdup (DEFAULT_VID_LOCATION); camera->location = g_strdup (DEFAULT_LOCATION);
camera->image_location = g_strdup (DEFAULT_IMG_LOCATION);
camera->viewfinderbin = gst_element_factory_make ("viewfinderbin", "vf-bin"); camera->viewfinderbin = gst_element_factory_make ("viewfinderbin", "vf-bin");
camera->zoom = DEFAULT_ZOOM; camera->zoom = DEFAULT_ZOOM;
camera->max_zoom = MAX_ZOOM; camera->max_zoom = MAX_ZOOM;
camera->flags = DEFAULT_FLAGS;
/* capsfilters are created here as we proxy their caps properties and /* capsfilters are created here as we proxy their caps properties and
* this way we avoid having to store the caps while on NULL state to * this way we avoid having to store the caps while on NULL state to
@ -720,7 +767,7 @@ gst_camera_bin_init (GstCameraBin * camera)
} }
static void static void
gst_image_capture_bin_post_image_done (GstCameraBin * camera, gst_image_capture_bin_post_image_done (GstCameraBin2 * camera,
const gchar * filename) const gchar * filename)
{ {
GstMessage *msg; GstMessage *msg;
@ -744,10 +791,10 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
const gchar *filename; const gchar *filename;
if (gst_structure_has_name (structure, "GstMultiFileSink")) { if (gst_structure_has_name (structure, "GstMultiFileSink")) {
GST_CAMERA_BIN_PROCESSING_DEC (GST_CAMERA_BIN_CAST (bin)); GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin));
filename = gst_structure_get_string (structure, "filename"); filename = gst_structure_get_string (structure, "filename");
if (filename) { if (filename) {
gst_image_capture_bin_post_image_done (GST_CAMERA_BIN_CAST (bin), gst_image_capture_bin_post_image_done (GST_CAMERA_BIN2_CAST (bin),
filename); filename);
} }
} }
@ -760,15 +807,15 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
gst_message_parse_warning (message, &err, &debug); gst_message_parse_warning (message, &err, &debug);
if (err->domain == GST_RESOURCE_ERROR) { if (err->domain == GST_RESOURCE_ERROR) {
/* some capturing failed */ /* some capturing failed */
GST_CAMERA_BIN_PROCESSING_DEC (GST_CAMERA_BIN_CAST (bin)); GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin));
} }
} }
break; break;
case GST_MESSAGE_EOS:{ case GST_MESSAGE_EOS:{
GstElement *src = GST_ELEMENT (GST_MESSAGE_SRC (message)); GstElement *src = GST_ELEMENT (GST_MESSAGE_SRC (message));
if (src == GST_CAMERA_BIN_CAST (bin)->videosink) { if (src == GST_CAMERA_BIN2_CAST (bin)->videosink) {
GST_DEBUG_OBJECT (bin, "EOS from video branch"); GST_DEBUG_OBJECT (bin, "EOS from video branch");
GST_CAMERA_BIN_PROCESSING_DEC (GST_CAMERA_BIN_CAST (bin)); GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin));
} }
} }
break; break;
@ -789,9 +836,10 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message)
* Where current_filter and new_filter might or might not be NULL * Where current_filter and new_filter might or might not be NULL
*/ */
static void static void
gst_camera_bin_check_and_replace_filter (GstCameraBin * camera, gst_camera_bin_check_and_replace_filter (GstCameraBin2 * camera,
GstElement ** current_filter, GstElement * new_filter, GstElement ** current_filter, GstElement * new_filter,
GstElement * previous_element, GstElement * next_element) GstElement * previous_element, GstElement * next_element,
const gchar * prev_elem_pad)
{ {
if (*current_filter == new_filter) { if (*current_filter == new_filter) {
GST_DEBUG_OBJECT (camera, "Current filter is the same as the previous, " GST_DEBUG_OBJECT (camera, "Current filter is the same as the previous, "
@ -815,15 +863,27 @@ gst_camera_bin_check_and_replace_filter (GstCameraBin * camera,
if (new_filter) { if (new_filter) {
*current_filter = gst_object_ref (new_filter); *current_filter = gst_object_ref (new_filter);
gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (new_filter)); gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (new_filter));
gst_element_link_many (previous_element, new_filter, next_element, NULL); }
if (prev_elem_pad) {
if (new_filter) {
gst_element_link_pads (previous_element, prev_elem_pad, new_filter, NULL);
gst_element_link (new_filter, next_element);
} else { } else {
gst_element_link_pads (previous_element, prev_elem_pad, next_element,
NULL);
}
} else {
if (new_filter)
gst_element_link_many (previous_element, new_filter, next_element, NULL);
else
gst_element_link (previous_element, next_element); gst_element_link (previous_element, next_element);
} }
} }
static void static void
encodebin_element_added (GstElement * encodebin, GstElement * new_element, encodebin_element_added (GstElement * encodebin, GstElement * new_element,
GstCameraBin * camera) GstCameraBin2 * camera)
{ {
GstElementFactory *factory = gst_element_get_factory (new_element); GstElementFactory *factory = gst_element_get_factory (new_element);
@ -833,12 +893,18 @@ encodebin_element_added (GstElement * encodebin, GstElement * new_element,
g_object_set (new_element, "skip-to-first", TRUE, NULL); g_object_set (new_element, "skip-to-first", TRUE, NULL);
} }
} }
if (gst_element_implements_interface (new_element, GST_TYPE_TAG_SETTER)) {
GstTagSetter *tagsetter = GST_TAG_SETTER (new_element);
gst_tag_setter_set_tag_merge_mode (tagsetter, GST_TAG_MERGE_REPLACE);
}
} }
#define VIDEO_PAD 1 #define VIDEO_PAD 1
#define AUDIO_PAD 2 #define AUDIO_PAD 2
static GstPad * static GstPad *
encodebin_find_pad (GstCameraBin * camera, GstElement * encodebin, encodebin_find_pad (GstCameraBin2 * camera, GstElement * encodebin,
gint pad_type) gint pad_type)
{ {
GstPad *pad = NULL; GstPad *pad = NULL;
@ -903,7 +969,7 @@ encodebin_find_pad (GstCameraBin * camera, GstElement * encodebin,
} }
static gboolean static gboolean
gst_camera_bin_video_profile_has_audio (GstCameraBin * camera) gst_camera_bin_video_profile_has_audio (GstCameraBin2 * camera)
{ {
const GList *list; const GList *list;
@ -925,7 +991,7 @@ gst_camera_bin_video_profile_has_audio (GstCameraBin * camera)
} }
static GstPadLinkReturn static GstPadLinkReturn
gst_camera_bin_link_encodebin (GstCameraBin * camera, GstElement * encodebin, gst_camera_bin_link_encodebin (GstCameraBin2 * camera, GstElement * encodebin,
GstElement * element, gint padtype) GstElement * element, gint padtype)
{ {
GstPadLinkReturn ret; GstPadLinkReturn ret;
@ -955,18 +1021,85 @@ static void
gst_camera_bin_src_notify_max_zoom_cb (GObject * self, GParamSpec * pspec, gst_camera_bin_src_notify_max_zoom_cb (GObject * self, GParamSpec * pspec,
gpointer user_data) gpointer user_data)
{ {
GstCameraBin *camera = (GstCameraBin *) user_data; GstCameraBin2 *camera = (GstCameraBin2 *) user_data;
g_object_get (self, "max-zoom", &camera->max_zoom, NULL); g_object_get (self, "max-zoom", &camera->max_zoom, NULL);
GST_DEBUG_OBJECT (camera, "Max zoom updated to %f", camera->max_zoom); GST_DEBUG_OBJECT (camera, "Max zoom updated to %f", camera->max_zoom);
g_object_notify (G_OBJECT (camera), "max-zoom"); g_object_notify (G_OBJECT (camera), "max-zoom");
} }
static gboolean
gst_camera_bin_image_src_buffer_probe (GstPad * pad, GstBuffer * buf,
gpointer data)
{
gboolean ret = TRUE;
GstCameraBin2 *camerabin = data;
GstEvent *evt;
gchar *location = NULL;
GstPad *peer;
if (camerabin->image_location_list) {
location = camerabin->image_location_list->data;
camerabin->image_location_list =
g_slist_delete_link (camerabin->image_location_list,
camerabin->image_location_list);
GST_DEBUG_OBJECT (camerabin, "Sending image location change to '%s'",
location);
} else {
GST_DEBUG_OBJECT (camerabin, "No filename location change to send");
return ret;
}
if (location) {
evt = gst_camera_bin_new_event_file_location (location);
peer = gst_pad_get_peer (pad);
gst_pad_send_event (peer, evt);
gst_object_unref (peer);
g_free (location);
} else {
/* This means we don't have to encode the capture, it is used for
* signaling the application just wants the preview */
ret = FALSE;
GST_CAMERA_BIN2_PROCESSING_DEC (camerabin);
}
return ret;
}
static gboolean
gst_camera_bin_image_sink_event_probe (GstPad * pad, GstEvent * event,
gpointer data)
{
GstCameraBin2 *camerabin = data;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CUSTOM_DOWNSTREAM:{
if (gst_event_has_name (event, "new-location")) {
const GstStructure *structure = gst_event_get_structure (event);
const gchar *filename = gst_structure_get_string (structure,
"location");
gst_element_set_state (camerabin->imagesink, GST_STATE_NULL);
GST_DEBUG_OBJECT (camerabin, "Setting filename to imagesink: %s",
filename);
g_object_set (camerabin->imagesink, "location", filename, NULL);
gst_element_set_state (camerabin->imagesink, GST_STATE_PLAYING);
}
}
break;
default:
break;
}
return TRUE;
}
/** /**
* gst_camera_bin_create_elements: * gst_camera_bin_create_elements:
* @param camera: the #GstCameraBin * @param camera: the #GstCameraBin2
* *
* Creates all elements inside #GstCameraBin * Creates all elements inside #GstCameraBin2
* *
* Each of the pads on the camera source is linked as follows: * Each of the pads on the camera source is linked as follows:
* .pad ! queue ! capsfilter ! correspondingbin * .pad ! queue ! capsfilter ! correspondingbin
@ -975,18 +1108,20 @@ gst_camera_bin_src_notify_max_zoom_cb (GObject * self, GParamSpec * pspec,
* the camera source pad. * the camera source pad.
*/ */
static gboolean static gboolean
gst_camera_bin_create_elements (GstCameraBin * camera) gst_camera_bin_create_elements (GstCameraBin2 * camera)
{ {
gboolean new_src = FALSE; gboolean new_src = FALSE;
gboolean new_audio_src = FALSE; gboolean new_audio_src = FALSE;
gboolean has_audio; gboolean has_audio;
gboolean profile_switched = FALSE; gboolean profile_switched = FALSE;
const gchar *missing_element_name; const gchar *missing_element_name;
gint encbin_flags = 0;
if (!camera->elements_created) { if (!camera->elements_created) {
/* TODO check that elements created in _init were really created */ /* TODO check that elements created in _init were really created */
camera->video_encodebin = gst_element_factory_make ("encodebin", NULL); camera->video_encodebin =
gst_element_factory_make ("encodebin", "video-encodebin");
if (!camera->video_encodebin) { if (!camera->video_encodebin) {
missing_element_name = "encodebin"; missing_element_name = "encodebin";
goto missing_element; goto missing_element;
@ -995,18 +1130,19 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
g_signal_connect (camera->video_encodebin, "element-added", g_signal_connect (camera->video_encodebin, "element-added",
(GCallback) encodebin_element_added, camera); (GCallback) encodebin_element_added, camera);
/* propagate the flags property by translating appropriate values
* to GstEncFlags values */
if (camera->flags & GST_CAM_FLAG_NO_AUDIO_CONVERSION)
encbin_flags |= (1 << 0);
if (camera->flags & GST_CAM_FLAG_NO_VIDEO_CONVERSION)
encbin_flags |= (1 << 1);
g_object_set (camera->video_encodebin, "flags", encbin_flags, NULL);
camera->videosink = camera->videosink =
gst_element_factory_make ("filesink", "videobin-filesink"); gst_element_factory_make ("filesink", "videobin-filesink");
g_object_set (camera->videosink, "async", FALSE, NULL); g_object_set (camera->videosink, "async", FALSE, NULL);
/* audio elements */ /* audio elements */
camera->audio_queue = gst_element_factory_make ("queue", "audio-queue");
camera->audio_convert = gst_element_factory_make ("audioconvert",
"audio-convert");
if (!camera->audio_convert) {
missing_element_name = "audioconvert";
goto missing_element;
}
if (!camera->audio_volume) { if (!camera->audio_volume) {
missing_element_name = "volume"; missing_element_name = "volume";
goto missing_element; goto missing_element;
@ -1041,7 +1177,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
camera->video_profile_switch = TRUE; camera->video_profile_switch = TRUE;
} }
camera->image_encodebin = gst_element_factory_make ("encodebin", NULL); camera->image_encodebin =
gst_element_factory_make ("encodebin", "image-encodebin");
if (!camera->image_encodebin) { if (!camera->image_encodebin) {
missing_element_name = "encodebin"; missing_element_name = "encodebin";
goto missing_element; goto missing_element;
@ -1078,8 +1215,6 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
camera->image_profile_switch = TRUE; camera->image_profile_switch = TRUE;
} }
camera->videobin_queue =
gst_element_factory_make ("queue", "videobin-queue");
camera->imagebin_queue = camera->imagebin_queue =
gst_element_factory_make ("queue", "imagebin-queue"); gst_element_factory_make ("queue", "imagebin-queue");
camera->viewfinderbin_queue = camera->viewfinderbin_queue =
@ -1089,20 +1224,16 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
NULL); NULL);
g_object_set (camera->imagebin_queue, "max-size-time", (guint64) 0, g_object_set (camera->imagebin_queue, "max-size-time", (guint64) 0,
"silent", TRUE, NULL); "silent", TRUE, NULL);
g_object_set (camera->videobin_queue, "silent", TRUE, NULL);
gst_bin_add_many (GST_BIN_CAST (camera), gst_bin_add_many (GST_BIN_CAST (camera),
gst_object_ref (camera->video_encodebin), gst_object_ref (camera->video_encodebin),
gst_object_ref (camera->videosink), gst_object_ref (camera->videosink),
gst_object_ref (camera->image_encodebin), gst_object_ref (camera->image_encodebin),
gst_object_ref (camera->imagesink), gst_object_ref (camera->imagesink),
gst_object_ref (camera->videobin_queue),
gst_object_ref (camera->imagebin_queue), gst_object_ref (camera->imagebin_queue),
gst_object_ref (camera->viewfinderbin_queue), NULL); gst_object_ref (camera->viewfinderbin_queue), NULL);
/* Linking can be optimized TODO */ /* Linking can be optimized TODO */
gst_element_link_many (camera->videobin_queue, camera->videobin_capsfilter,
NULL);
gst_element_link (camera->video_encodebin, camera->videosink); gst_element_link (camera->video_encodebin, camera->videosink);
gst_element_link_many (camera->imagebin_queue, camera->imagebin_capsfilter, gst_element_link_many (camera->imagebin_queue, camera->imagebin_capsfilter,
@ -1110,6 +1241,19 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
gst_element_link (camera->image_encodebin, camera->imagesink); gst_element_link (camera->image_encodebin, camera->imagesink);
gst_element_link_many (camera->viewfinderbin_queue, gst_element_link_many (camera->viewfinderbin_queue,
camera->viewfinderbin_capsfilter, camera->viewfinderbin, NULL); camera->viewfinderbin_capsfilter, camera->viewfinderbin, NULL);
{
/* set an event probe to watch for custom location changes */
GstPad *srcpad;
srcpad = gst_element_get_static_pad (camera->image_encodebin, "src");
gst_pad_add_event_probe (srcpad,
(GCallback) gst_camera_bin_image_sink_event_probe, camera);
gst_object_unref (srcpad);
}
/* /*
* Video can't get into playing as its internal filesink will open * Video can't get into playing as its internal filesink will open
* a file for writing and leave it empty if unused. * a file for writing and leave it empty if unused.
@ -1122,8 +1266,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
gst_element_set_locked_state (camera->videosink, TRUE); gst_element_set_locked_state (camera->videosink, TRUE);
gst_element_set_locked_state (camera->imagesink, TRUE); gst_element_set_locked_state (camera->imagesink, TRUE);
g_object_set (camera->videosink, "location", camera->video_location, NULL); g_object_set (camera->videosink, "location", camera->location, NULL);
g_object_set (camera->imagesink, "location", camera->image_location, NULL); g_object_set (camera->imagesink, "location", camera->location, NULL);
} }
if (camera->video_profile_switch) { if (camera->video_profile_switch) {
@ -1192,6 +1336,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
(GCallback) gst_camera_bin_src_notify_max_zoom_cb, camera); (GCallback) gst_camera_bin_src_notify_max_zoom_cb, camera);
} }
if (new_src) { if (new_src) {
GstPad *imgsrc = gst_element_get_static_pad (camera->src, "imgsrc");
gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->src)); gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->src));
camera->src_capture_notify_id = g_signal_connect (G_OBJECT (camera->src), camera->src_capture_notify_id = g_signal_connect (G_OBJECT (camera->src),
"notify::ready-for-capture", "notify::ready-for-capture",
@ -1200,19 +1346,27 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
"sink"); "sink");
gst_element_link_pads (camera->src, "imgsrc", camera->imagebin_queue, gst_element_link_pads (camera->src, "imgsrc", camera->imagebin_queue,
"sink"); "sink");
gst_element_link_pads (camera->src, "vidsrc", camera->videobin_queue, if (!gst_element_link_pads (camera->src, "vidsrc",
"sink"); camera->videobin_capsfilter, "sink")) {
GST_ERROR_OBJECT (camera,
"Failed to link camera source's vidsrc pad to video bin capsfilter");
goto fail;
}
gst_pad_add_buffer_probe (imgsrc,
(GCallback) gst_camera_bin_image_src_buffer_probe, camera);
gst_object_unref (imgsrc);
} }
gst_camera_bin_check_and_replace_filter (camera, &camera->image_filter, gst_camera_bin_check_and_replace_filter (camera, &camera->image_filter,
camera->user_image_filter, camera->imagebin_queue, camera->user_image_filter, camera->imagebin_queue,
camera->imagebin_capsfilter); camera->imagebin_capsfilter, NULL);
gst_camera_bin_check_and_replace_filter (camera, &camera->video_filter, gst_camera_bin_check_and_replace_filter (camera, &camera->video_filter,
camera->user_video_filter, camera->videobin_queue, camera->user_video_filter, camera->src, camera->videobin_capsfilter,
camera->videobin_capsfilter); "vidsrc");
gst_camera_bin_check_and_replace_filter (camera, &camera->viewfinder_filter, gst_camera_bin_check_and_replace_filter (camera, &camera->viewfinder_filter,
camera->user_viewfinder_filter, camera->viewfinderbin_queue, camera->user_viewfinder_filter, camera->viewfinderbin_queue,
camera->viewfinderbin_capsfilter); camera->viewfinderbin_capsfilter, NULL);
/* check if we need to replace the camera audio src */ /* check if we need to replace the camera audio src */
has_audio = gst_camera_bin_video_profile_has_audio (camera); has_audio = gst_camera_bin_video_profile_has_audio (camera);
@ -1220,10 +1374,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
if ((camera->user_audio_src && camera->user_audio_src != camera->audio_src) if ((camera->user_audio_src && camera->user_audio_src != camera->audio_src)
|| !has_audio) { || !has_audio) {
gst_bin_remove (GST_BIN_CAST (camera), camera->audio_src); gst_bin_remove (GST_BIN_CAST (camera), camera->audio_src);
gst_bin_remove (GST_BIN_CAST (camera), camera->audio_queue);
gst_bin_remove (GST_BIN_CAST (camera), camera->audio_volume); gst_bin_remove (GST_BIN_CAST (camera), camera->audio_volume);
gst_bin_remove (GST_BIN_CAST (camera), camera->audio_capsfilter); gst_bin_remove (GST_BIN_CAST (camera), camera->audio_capsfilter);
gst_bin_remove (GST_BIN_CAST (camera), camera->audio_convert);
gst_object_unref (camera->audio_src); gst_object_unref (camera->audio_src);
camera->audio_src = NULL; camera->audio_src = NULL;
} }
@ -1242,21 +1394,23 @@ gst_camera_bin_create_elements (GstCameraBin * camera)
} }
if (new_audio_src) { if (new_audio_src) {
if (g_object_class_find_property (G_OBJECT_GET_CLASS (camera->audio_src),
"provide-clock")) {
g_object_set (camera->audio_src, "provide-clock", FALSE, NULL);
}
gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_src)); gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_src));
gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_queue));
gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_volume)); gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_volume));
gst_bin_add (GST_BIN_CAST (camera), gst_bin_add (GST_BIN_CAST (camera),
gst_object_ref (camera->audio_capsfilter)); gst_object_ref (camera->audio_capsfilter));
gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_convert));
gst_element_link_many (camera->audio_src, camera->audio_queue, gst_element_link_many (camera->audio_src, camera->audio_volume,
camera->audio_volume, camera->audio_capsfilter, NULL);
camera->audio_capsfilter, camera->audio_convert, NULL);
} }
if ((profile_switched && has_audio) || new_audio_src) { if ((profile_switched && has_audio) || new_audio_src) {
if (GST_PAD_LINK_FAILED (gst_camera_bin_link_encodebin (camera, if (GST_PAD_LINK_FAILED (gst_camera_bin_link_encodebin (camera,
camera->video_encodebin, camera->audio_convert, AUDIO_PAD))) { camera->video_encodebin, camera->audio_capsfilter,
AUDIO_PAD))) {
goto fail; goto fail;
} }
} }
@ -1282,7 +1436,7 @@ static GstStateChangeReturn
gst_camera_bin_change_state (GstElement * element, GstStateChange trans) gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
{ {
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstCameraBin *camera = GST_CAMERA_BIN_CAST (element); GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (element);
switch (trans) { switch (trans) {
case GST_STATE_CHANGE_NULL_TO_READY: case GST_STATE_CHANGE_NULL_TO_READY:
@ -1291,7 +1445,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
} }
break; break;
case GST_STATE_CHANGE_READY_TO_PAUSED: case GST_STATE_CHANGE_READY_TO_PAUSED:
GST_CAMERA_BIN_RESET_PROCESSING_COUNTER (camera); GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera);
break; break;
case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_PAUSED_TO_READY:
if (GST_STATE (camera->videosink) >= GST_STATE_PAUSED) if (GST_STATE (camera->videosink) >= GST_STATE_PAUSED)
@ -1315,23 +1469,23 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
gst_element_set_state (camera->audio_src, GST_STATE_READY); gst_element_set_state (camera->audio_src, GST_STATE_READY);
gst_tag_setter_reset_tags (GST_TAG_SETTER (camera)); gst_tag_setter_reset_tags (GST_TAG_SETTER (camera));
GST_CAMERA_BIN_RESET_PROCESSING_COUNTER (camera); GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera);
g_slist_foreach (camera->image_location_list, (GFunc) g_free, NULL);
g_slist_free (camera->image_location_list);
camera->image_location_list = NULL;
/* explicitly set to READY as they might be outside of the bin */ /* explicitly set to READY as they might be outside of the bin */
gst_element_set_state (camera->audio_queue, GST_STATE_READY);
gst_element_set_state (camera->audio_volume, GST_STATE_READY); gst_element_set_state (camera->audio_volume, GST_STATE_READY);
gst_element_set_state (camera->audio_capsfilter, GST_STATE_READY); gst_element_set_state (camera->audio_capsfilter, GST_STATE_READY);
gst_element_set_state (camera->audio_convert, GST_STATE_READY);
break; break;
case GST_STATE_CHANGE_READY_TO_NULL: case GST_STATE_CHANGE_READY_TO_NULL:
if (camera->audio_src) if (camera->audio_src)
gst_element_set_state (camera->audio_src, GST_STATE_NULL); gst_element_set_state (camera->audio_src, GST_STATE_NULL);
/* explicitly set to NULL as they might be outside of the bin */ /* explicitly set to NULL as they might be outside of the bin */
gst_element_set_state (camera->audio_queue, GST_STATE_NULL);
gst_element_set_state (camera->audio_volume, GST_STATE_NULL); gst_element_set_state (camera->audio_volume, GST_STATE_NULL);
gst_element_set_state (camera->audio_capsfilter, GST_STATE_NULL); gst_element_set_state (camera->audio_capsfilter, GST_STATE_NULL);
gst_element_set_state (camera->audio_convert, GST_STATE_NULL);
break; break;
default: default:
@ -1344,7 +1498,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
static gboolean static gboolean
gst_camera_bin_send_event (GstElement * element, GstEvent * event) gst_camera_bin_send_event (GstElement * element, GstEvent * event)
{ {
GstCameraBin *camera = GST_CAMERA_BIN_CAST (element); GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (element);
gboolean res; gboolean res;
res = GST_ELEMENT_CLASS (parent_class)->send_event (element, event); res = GST_ELEMENT_CLASS (parent_class)->send_event (element, event);
@ -1376,21 +1530,16 @@ gst_camera_bin_send_event (GstElement * element, GstEvent * event)
} }
static void static void
gst_camera_bin_set_location (GstCameraBin * camera, const gchar * location) gst_camera_bin_set_location (GstCameraBin2 * camera, const gchar * location)
{ {
GST_DEBUG_OBJECT (camera, "Setting mode %d location to %s", camera->mode, GST_DEBUG_OBJECT (camera, "Setting mode %d location to %s", camera->mode,
location); location);
if (camera->mode == MODE_IMAGE) { g_free (camera->location);
g_free (camera->image_location); camera->location = g_strdup (location);
camera->image_location = g_strdup (location);
} else {
g_free (camera->video_location);
camera->video_location = g_strdup (location);
}
} }
static void static void
gst_camera_bin_set_audio_src (GstCameraBin * camera, GstElement * src) gst_camera_bin_set_audio_src (GstCameraBin2 * camera, GstElement * src)
{ {
GST_DEBUG_OBJECT (GST_OBJECT (camera), GST_DEBUG_OBJECT (GST_OBJECT (camera),
"Setting audio source %" GST_PTR_FORMAT, src); "Setting audio source %" GST_PTR_FORMAT, src);
@ -1404,7 +1553,7 @@ gst_camera_bin_set_audio_src (GstCameraBin * camera, GstElement * src)
} }
static void static void
gst_camera_bin_set_camera_src (GstCameraBin * camera, GstElement * src) gst_camera_bin_set_camera_src (GstCameraBin2 * camera, GstElement * src)
{ {
GST_DEBUG_OBJECT (GST_OBJECT (camera), GST_DEBUG_OBJECT (GST_OBJECT (camera),
"Setting camera source %" GST_PTR_FORMAT, src); "Setting camera source %" GST_PTR_FORMAT, src);
@ -1421,7 +1570,7 @@ static void
gst_camera_bin_set_property (GObject * object, guint prop_id, gst_camera_bin_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec) const GValue * value, GParamSpec * pspec)
{ {
GstCameraBin *camera = GST_CAMERA_BIN_CAST (object); GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (object);
switch (prop_id) { switch (prop_id) {
case PROP_MODE: case PROP_MODE:
@ -1592,6 +1741,9 @@ gst_camera_bin_set_property (GObject * object, guint prop_id,
(GstEncodingProfile *) gst_value_dup_mini_object (value); (GstEncodingProfile *) gst_value_dup_mini_object (value);
camera->image_profile_switch = TRUE; camera->image_profile_switch = TRUE;
break; break;
case PROP_FLAGS:
camera->flags = g_value_get_flags (value);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break; break;
@ -1602,18 +1754,14 @@ static void
gst_camera_bin_get_property (GObject * object, guint prop_id, gst_camera_bin_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec) GValue * value, GParamSpec * pspec)
{ {
GstCameraBin *camera = GST_CAMERA_BIN_CAST (object); GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (object);
switch (prop_id) { switch (prop_id) {
case PROP_MODE: case PROP_MODE:
g_value_set_enum (value, camera->mode); g_value_set_enum (value, camera->mode);
break; break;
case PROP_LOCATION: case PROP_LOCATION:
if (camera->mode == MODE_VIDEO) { g_value_set_string (value, camera->location);
g_value_set_string (value, camera->video_location);
} else {
g_value_set_string (value, camera->image_location);
}
break; break;
case PROP_CAMERA_SRC: case PROP_CAMERA_SRC:
g_value_set_object (value, camera->user_src); g_value_set_object (value, camera->user_src);
@ -1754,6 +1902,9 @@ gst_camera_bin_get_property (GObject * object, guint prop_id,
g_value_set_boolean (value, g_value_set_boolean (value,
g_atomic_int_get (&camera->processing_counter) == 0); g_atomic_int_get (&camera->processing_counter) == 0);
break; break;
case PROP_FLAGS:
g_value_set_flags (value, camera->flags);
break;
default: default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break; break;
@ -1761,10 +1912,10 @@ gst_camera_bin_get_property (GObject * object, guint prop_id,
} }
gboolean gboolean
gst_camera_bin_plugin_init (GstPlugin * plugin) gst_camera_bin2_plugin_init (GstPlugin * plugin)
{ {
GST_DEBUG_CATEGORY_INIT (gst_camera_bin_debug, "camerabin2", 0, "CameraBin2"); GST_DEBUG_CATEGORY_INIT (gst_camera_bin_debug, "camerabin2", 0, "CameraBin2");
return gst_element_register (plugin, "camerabin2", GST_RANK_NONE, return gst_element_register (plugin, "camerabin2", GST_RANK_NONE,
gst_camera_bin_get_type ()); gst_camera_bin2_get_type ());
} }

View file

@ -16,25 +16,34 @@
* Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA. * Boston, MA 02111-1307, USA.
*/ */
#ifndef _GST_CAMERA_BIN_H_ #ifndef _GST_CAMERA_BIN2_H_
#define _GST_CAMERA_BIN_H_ #define _GST_CAMERA_BIN2_H_
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/pbutils/encoding-profile.h> #include <gst/pbutils/encoding-profile.h>
G_BEGIN_DECLS G_BEGIN_DECLS
#define GST_TYPE_CAMERA_BIN (gst_camera_bin_get_type()) #define GST_TYPE_CAMERA_BIN2 (gst_camera_bin2_get_type())
#define GST_CAMERA_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_BIN,GstCameraBin)) #define GST_CAMERA_BIN2(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_BIN2,GstCameraBin2))
#define GST_CAMERA_BIN_CAST(obj) ((GstCameraBin *) obj) #define GST_CAMERA_BIN2_CAST(obj) ((GstCameraBin2 *) obj)
#define GST_CAMERA_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_BIN,GstCameraBinClass)) #define GST_CAMERA_BIN2_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_BIN2,GstCameraBin2Class))
#define GST_IS_CAMERA_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_BIN)) #define GST_IS_CAMERA_BIN2(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_BIN2))
#define GST_IS_CAMERA_BIN_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_BIN)) #define GST_IS_CAMERA_BIN2_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_BIN2))
typedef struct _GstCameraBin GstCameraBin; typedef enum
typedef struct _GstCameraBinClass GstCameraBinClass; {
/* matches GstEncFlags GST_ENC_FLAG_NO_AUDIO_CONVERSION in encodebin */
GST_CAM_FLAG_NO_AUDIO_CONVERSION = (1 << 0),
/* matches GstEncFlags GST_ENC_FLAG_NO_VIDEO_CONVERSION in encodebin */
GST_CAM_FLAG_NO_VIDEO_CONVERSION = (1 << 1)
} GstCamFlags;
struct _GstCameraBin
typedef struct _GstCameraBin2 GstCameraBin2;
typedef struct _GstCameraBin2Class GstCameraBin2Class;
struct _GstCameraBin2
{ {
GstPipeline pipeline; GstPipeline pipeline;
@ -45,8 +54,6 @@ struct _GstCameraBin
GstElement *video_encodebin; GstElement *video_encodebin;
gulong video_encodebin_signal_id; gulong video_encodebin_signal_id;
GstElement *videosink; GstElement *videosink;
gulong videosink_probe;
GstElement *videobin_queue;
GstElement *videobin_capsfilter; GstElement *videobin_capsfilter;
GstElement *viewfinderbin; GstElement *viewfinderbin;
@ -68,23 +75,25 @@ struct _GstCameraBin
GstElement *audio_src; GstElement *audio_src;
GstElement *user_audio_src; GstElement *user_audio_src;
GstElement *audio_queue;
GstElement *audio_volume; GstElement *audio_volume;
GstElement *audio_capsfilter; GstElement *audio_capsfilter;
GstElement *audio_convert;
gint processing_counter; /* atomic int */ gint processing_counter; /* atomic int */
/* Index of the auto incrementing file index for video recordings */ /* Index of the auto incrementing file index for captures */
gint video_index; gint capture_index;
/* stores list of image locations to be pushed to the image sink
* as file location change notifications, they are pushed before
* each buffer capture */
GSList *image_location_list;
gboolean video_profile_switch; gboolean video_profile_switch;
gboolean image_profile_switch; gboolean image_profile_switch;
/* properties */ /* properties */
gint mode; gint mode;
gchar *video_location; gchar *location;
gchar *image_location;
gboolean post_previews; gboolean post_previews;
GstCaps *preview_caps; GstCaps *preview_caps;
GstElement *preview_filter; GstElement *preview_filter;
@ -92,21 +101,22 @@ struct _GstCameraBin
GstEncodingProfile *image_profile; GstEncodingProfile *image_profile;
gfloat zoom; gfloat zoom;
gfloat max_zoom; gfloat max_zoom;
GstCamFlags flags;
gboolean elements_created; gboolean elements_created;
}; };
struct _GstCameraBinClass struct _GstCameraBin2Class
{ {
GstPipelineClass pipeline_class; GstPipelineClass pipeline_class;
/* Action signals */ /* Action signals */
void (*start_capture) (GstCameraBin * camera); void (*start_capture) (GstCameraBin2 * camera);
void (*stop_capture) (GstCameraBin * camera); void (*stop_capture) (GstCameraBin2 * camera);
}; };
GType gst_camera_bin_get_type (void); GType gst_camera_bin2_get_type (void);
gboolean gst_camera_bin_plugin_init (GstPlugin * plugin); gboolean gst_camera_bin2_plugin_init (GstPlugin * plugin);
G_END_DECLS G_END_DECLS

View file

@ -34,7 +34,7 @@ plugin_init (GstPlugin * plugin)
return FALSE; return FALSE;
if (!gst_wrapper_camera_bin_src_plugin_init (plugin)) if (!gst_wrapper_camera_bin_src_plugin_init (plugin))
return FALSE; return FALSE;
if (!gst_camera_bin_plugin_init (plugin)) if (!gst_camera_bin2_plugin_init (plugin))
return FALSE; return FALSE;
return TRUE; return TRUE;

View file

@ -395,7 +395,6 @@ gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
GstElement *src_csp; GstElement *src_csp;
GstElement *capsfilter; GstElement *capsfilter;
gboolean ret = FALSE; gboolean ret = FALSE;
GstElement *videoscale;
GstPad *vf_pad; GstPad *vf_pad;
GstPad *tee_capture_pad; GstPad *tee_capture_pad;
GstPad *src_caps_src_pad; GstPad *src_caps_src_pad;
@ -473,17 +472,9 @@ gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
/* viewfinder pad */ /* viewfinder pad */
vf_pad = gst_element_get_request_pad (tee, "src%d"); vf_pad = gst_element_get_request_pad (tee, "src%d");
g_object_set (tee, "alloc-pad", vf_pad, NULL); g_object_set (tee, "alloc-pad", vf_pad, NULL);
gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
gst_object_unref (vf_pad); gst_object_unref (vf_pad);
/* the viewfinder should always work, so we add some converters to it */
if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace",
"viewfinder-colorspace"))
goto done;
if (!(videoscale =
gst_camerabin_create_and_add_element (cbin, "videoscale",
"viewfinder-scale")))
goto done;
/* image/video pad from tee */ /* image/video pad from tee */
tee_capture_pad = gst_element_get_request_pad (tee, "src%d"); tee_capture_pad = gst_element_get_request_pad (tee, "src%d");
@ -526,10 +517,7 @@ gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
NULL); NULL);
} }
/* hook-up the vf ghostpad */
vf_pad = gst_element_get_static_pad (videoscale, "src");
gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
gst_object_unref (vf_pad);
gst_pad_set_active (self->vfsrc, TRUE); gst_pad_set_active (self->vfsrc, TRUE);
gst_pad_set_active (self->imgsrc, TRUE); /* XXX ??? */ gst_pad_set_active (self->imgsrc, TRUE); /* XXX ??? */
@ -854,73 +842,6 @@ gst_wrapper_camera_bin_src_set_zoom (GstBaseCameraSrc * bcamsrc, gfloat zoom)
} }
} }
static GstCaps *
gst_wrapper_camera_bin_src_get_allowed_input_caps (GstBaseCameraSrc * bcamsrc)
{
GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
GstCaps *caps = NULL;
GstPad *pad = NULL, *peer_pad = NULL;
GstState state;
GstElement *videosrc;
videosrc = self->src_vid_src ? self->src_vid_src : self->app_vid_src;
if (!videosrc) {
GST_WARNING_OBJECT (self, "no videosrc, can't get allowed caps");
goto failed;
}
if (self->allowed_caps) {
GST_DEBUG_OBJECT (self, "returning cached caps");
goto done;
}
pad = gst_element_get_static_pad (videosrc, "src");
if (!pad) {
GST_WARNING_OBJECT (self, "no srcpad in videosrc");
goto failed;
}
state = GST_STATE (videosrc);
/* Make this function work also in NULL state */
if (state == GST_STATE_NULL) {
GST_DEBUG_OBJECT (self, "setting videosrc to ready temporarily");
peer_pad = gst_pad_get_peer (pad);
if (peer_pad) {
gst_pad_unlink (pad, peer_pad);
}
/* Set videosrc to READY to open video device */
gst_element_set_locked_state (videosrc, TRUE);
gst_element_set_state (videosrc, GST_STATE_READY);
}
self->allowed_caps = gst_pad_get_caps (pad);
/* Restore state and re-link if necessary */
if (state == GST_STATE_NULL) {
GST_DEBUG_OBJECT (self, "restoring videosrc state %d", state);
/* Reset videosrc to NULL state, some drivers seem to need this */
gst_element_set_state (videosrc, GST_STATE_NULL);
if (peer_pad) {
gst_pad_link (pad, peer_pad);
gst_object_unref (peer_pad);
}
gst_element_set_locked_state (videosrc, FALSE);
}
gst_object_unref (pad);
done:
if (self->allowed_caps) {
caps = gst_caps_copy (self->allowed_caps);
}
GST_DEBUG_OBJECT (self, "allowed caps:%" GST_PTR_FORMAT, caps);
failed:
return caps;
}
/** /**
* update_aspect_filter: * update_aspect_filter:
* @self: camerasrc object * @self: camerasrc object
@ -1172,8 +1093,6 @@ gst_wrapper_camera_bin_src_class_init (GstWrapperCameraBinSrcClass * klass)
gst_wrapper_camera_bin_src_construct_pipeline; gst_wrapper_camera_bin_src_construct_pipeline;
gstbasecamerasrc_class->set_zoom = gst_wrapper_camera_bin_src_set_zoom; gstbasecamerasrc_class->set_zoom = gst_wrapper_camera_bin_src_set_zoom;
gstbasecamerasrc_class->set_mode = gst_wrapper_camera_bin_src_set_mode; gstbasecamerasrc_class->set_mode = gst_wrapper_camera_bin_src_set_mode;
gstbasecamerasrc_class->get_allowed_input_caps =
gst_wrapper_camera_bin_src_get_allowed_input_caps;
gstbasecamerasrc_class->start_capture = gstbasecamerasrc_class->start_capture =
gst_wrapper_camera_bin_src_start_capture; gst_wrapper_camera_bin_src_start_capture;
gstbasecamerasrc_class->stop_capture = gstbasecamerasrc_class->stop_capture =

View file

@ -22,10 +22,14 @@ libgstdebugutilsbad_la_SOURCES = \
gstchecksumsink.h \ gstchecksumsink.h \
gstchopmydata.c \ gstchopmydata.c \
gstchopmydata.h \ gstchopmydata.h \
gstcompare.c \
gstcompare.h \
gstdebugspy.h gstdebugspy.h
nodist_libgstdebugutilsbad_la_SOURCES = $(BUILT_SOURCES) nodist_libgstdebugutilsbad_la_SOURCES = $(BUILT_SOURCES)
libgstdebugutilsbad_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) libgstdebugutilsbad_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
libgstdebugutilsbad_la_LIBADD = $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) \ libgstdebugutilsbad_la_LIBADD = $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) \
-lgstvideo-$(GST_MAJORMINOR) \
-lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS) -lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS)
libgstdebugutilsbad_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstdebugutilsbad_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdebugutilsbad_la_LIBTOOLFLAGS = --tag=disable-static libgstdebugutilsbad_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -26,6 +26,7 @@
GType gst_checksum_sink_get_type (void); GType gst_checksum_sink_get_type (void);
GType fps_display_sink_get_type (void); GType fps_display_sink_get_type (void);
GType gst_chop_my_data_get_type (void); GType gst_chop_my_data_get_type (void);
GType gst_compare_get_type (void);
GType gst_debug_spy_get_type (void); GType gst_debug_spy_get_type (void);
static gboolean static gboolean
@ -37,8 +38,11 @@ plugin_init (GstPlugin * plugin)
fps_display_sink_get_type ()); fps_display_sink_get_type ());
gst_element_register (plugin, "chopmydata", GST_RANK_NONE, gst_element_register (plugin, "chopmydata", GST_RANK_NONE,
gst_chop_my_data_get_type ()); gst_chop_my_data_get_type ());
gst_element_register (plugin, "compare", GST_RANK_NONE,
gst_compare_get_type ());
gst_element_register (plugin, "debugspy", GST_RANK_NONE, gst_element_register (plugin, "debugspy", GST_RANK_NONE,
gst_debug_spy_get_type ()); gst_debug_spy_get_type ());
return TRUE; return TRUE;
} }

664
gst/debugutils/gstcompare.c Normal file
View file

@ -0,0 +1,664 @@
/* GStreamer Element
*
* Copyright 2011 Collabora Ltd.
* @author: Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
* Copyright 2011 Nokia Corp.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
#include <gst/video/video.h>
#include "gstcompare.h"
GST_DEBUG_CATEGORY_STATIC (compare_debug);
#define GST_CAT_DEFAULT compare_debug
static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate check_sink_factory =
GST_STATIC_PAD_TEMPLATE ("check",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
enum GstCompareMethod
{
GST_COMPARE_METHOD_MEM,
GST_COMPARE_METHOD_MAX,
GST_COMPARE_METHOD_SSIM
};
#define GST_COMPARE_METHOD_TYPE (gst_compare_method_get_type())
static GType
gst_compare_method_get_type (void)
{
static GType method_type = 0;
static const GEnumValue method_types[] = {
{GST_COMPARE_METHOD_MEM, "Memory", "mem"},
{GST_COMPARE_METHOD_MAX, "Maximum metric", "max"},
{GST_COMPARE_METHOD_SSIM, "SSIM (raw video)", "ssim"},
{0, NULL, NULL}
};
if (!method_type) {
method_type = g_enum_register_static ("GstCompareMethod", method_types);
}
return method_type;
}
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
PROP_0,
PROP_META,
PROP_OFFSET_TS,
PROP_METHOD,
PROP_THRESHOLD,
PROP_UPPER,
PROP_LAST
};
#define DEFAULT_META GST_BUFFER_COPY_ALL
#define DEFAULT_OFFSET_TS FALSE
#define DEFAULT_METHOD GST_COMPARE_METHOD_MEM
#define DEFAULT_THRESHOLD 0
#define DEFAULT_UPPER TRUE
static void gst_compare_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_compare_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static void gst_compare_reset (GstCompare * overlay);
static GstCaps *gst_compare_getcaps (GstPad * pad);
static GstFlowReturn gst_compare_collect_pads (GstCollectPads * cpads,
GstCompare * comp);
static GstStateChangeReturn gst_compare_change_state (GstElement * element,
GstStateChange transition);
GST_BOILERPLATE (GstCompare, gst_compare, GstElement, GST_TYPE_ELEMENT);
static void
gst_compare_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_factory));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&check_sink_factory));
gst_element_class_set_details_simple (element_class, "Compare buffers",
"Filter/Debug", "Compares incoming buffers",
"Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>");
}
static void
gst_compare_finalize (GObject * object)
{
GstCompare *comp = GST_COMPARE (object);
gst_object_unref (comp->cpads);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_compare_class_init (GstCompareClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
GST_DEBUG_CATEGORY_INIT (compare_debug, "compare", 0, "Compare buffers");
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_compare_change_state);
gobject_class->set_property = gst_compare_set_property;
gobject_class->get_property = gst_compare_get_property;
gobject_class->finalize = gst_compare_finalize;
g_object_class_install_property (gobject_class, PROP_META,
g_param_spec_flags ("meta", "Compare Meta",
"Indicates which metadata should be compared",
gst_buffer_copy_flags_get_type (), DEFAULT_META,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_OFFSET_TS,
g_param_spec_boolean ("offset-ts", "Offsets Timestamps",
"Consider OFFSET and OFFSET_END part of timestamp metadata",
DEFAULT_OFFSET_TS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_METHOD,
g_param_spec_enum ("method", "Content Compare Method",
"Method to compare buffer content",
GST_COMPARE_METHOD_TYPE, DEFAULT_METHOD,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_THRESHOLD,
g_param_spec_double ("threshold", "Content Threshold",
"Threshold beyond which to consider content different as determined by content-method",
0, G_MAXDOUBLE, DEFAULT_THRESHOLD,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_UPPER,
g_param_spec_boolean ("upper", "Threshold Upper Bound",
"Whether threshold value is upper bound or lower bound for difference measure",
DEFAULT_UPPER, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
}
static void
gst_compare_init (GstCompare * comp, GstCompareClass * klass)
{
comp->cpads = gst_collect_pads_new ();
gst_collect_pads_set_function (comp->cpads,
(GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_compare_collect_pads),
comp);
comp->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
gst_pad_set_getcaps_function (comp->sinkpad, gst_compare_getcaps);
gst_element_add_pad (GST_ELEMENT (comp), comp->sinkpad);
comp->checkpad =
gst_pad_new_from_static_template (&check_sink_factory, "check");
gst_pad_set_getcaps_function (comp->checkpad, gst_compare_getcaps);
gst_element_add_pad (GST_ELEMENT (comp), comp->checkpad);
gst_collect_pads_add_pad_full (comp->cpads, comp->sinkpad,
sizeof (GstCollectData), NULL);
gst_collect_pads_add_pad_full (comp->cpads, comp->checkpad,
sizeof (GstCollectData), NULL);
comp->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
gst_pad_set_getcaps_function (comp->srcpad, gst_compare_getcaps);
gst_element_add_pad (GST_ELEMENT (comp), comp->srcpad);
/* init properties */
comp->meta = DEFAULT_META;
comp->offset_ts = DEFAULT_OFFSET_TS;
comp->method = DEFAULT_METHOD;
comp->threshold = DEFAULT_THRESHOLD;
comp->upper = DEFAULT_UPPER;
gst_compare_reset (comp);
}
static void
gst_compare_reset (GstCompare * comp)
{
}
static GstCaps *
gst_compare_getcaps (GstPad * pad)
{
GstCompare *comp;
GstPad *otherpad;
GstCaps *result;
comp = GST_COMPARE (gst_pad_get_parent (pad));
if (G_UNLIKELY (comp == NULL))
return gst_caps_new_any ();
otherpad = (pad == comp->srcpad ? comp->sinkpad : comp->srcpad);
result = gst_pad_peer_get_caps (otherpad);
if (result == NULL)
result = gst_caps_new_any ();
gst_object_unref (comp);
return result;
}
static void
gst_compare_meta (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2)
{
gint flags = 0;
if (comp->meta & GST_BUFFER_COPY_FLAGS) {
if (GST_BUFFER_FLAGS (buf1) != GST_BUFFER_FLAGS (buf2)) {
flags |= GST_BUFFER_COPY_FLAGS;
GST_DEBUG_OBJECT (comp, "flags %d != flags %d", GST_BUFFER_FLAGS (buf1),
GST_BUFFER_FLAGS (buf2));
}
}
if (comp->meta & GST_BUFFER_COPY_TIMESTAMPS) {
if (GST_BUFFER_TIMESTAMP (buf1) != GST_BUFFER_TIMESTAMP (buf2)) {
flags |= GST_BUFFER_COPY_TIMESTAMPS;
GST_DEBUG_OBJECT (comp,
"ts %" GST_TIME_FORMAT " != ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf1)),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf2)));
}
if (GST_BUFFER_DURATION (buf1) != GST_BUFFER_DURATION (buf2)) {
flags |= GST_BUFFER_COPY_TIMESTAMPS;
GST_DEBUG_OBJECT (comp,
"dur %" GST_TIME_FORMAT " != dur %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_DURATION (buf1)),
GST_TIME_ARGS (GST_BUFFER_DURATION (buf2)));
}
if (comp->offset_ts) {
if (GST_BUFFER_OFFSET (buf1) != GST_BUFFER_OFFSET (buf2)) {
flags |= GST_BUFFER_COPY_TIMESTAMPS;
GST_DEBUG_OBJECT (comp,
"offset %" G_GINT64_FORMAT " != offset %" G_GINT64_FORMAT,
GST_BUFFER_OFFSET (buf1), GST_BUFFER_OFFSET (buf2));
}
if (GST_BUFFER_OFFSET_END (buf1) != GST_BUFFER_OFFSET_END (buf2)) {
flags |= GST_BUFFER_COPY_TIMESTAMPS;
GST_DEBUG_OBJECT (comp,
"offset_end %" G_GINT64_FORMAT " != offset_end %" G_GINT64_FORMAT,
GST_BUFFER_OFFSET_END (buf1), GST_BUFFER_OFFSET_END (buf2));
}
}
}
if (comp->meta & GST_BUFFER_COPY_CAPS) {
if (!gst_caps_is_equal (GST_BUFFER_CAPS (buf1), GST_BUFFER_CAPS (buf2))) {
flags |= GST_BUFFER_COPY_CAPS;
GST_DEBUG_OBJECT (comp,
"caps %" GST_PTR_FORMAT " != caps %" GST_PTR_FORMAT,
GST_BUFFER_CAPS (buf1), GST_BUFFER_CAPS (buf2));
}
}
/* signal mismatch by debug and message */
if (flags) {
GST_WARNING_OBJECT (comp, "buffers %p and %p failed metadata match %d",
buf1, buf2, flags);
gst_element_post_message (GST_ELEMENT (comp),
gst_message_new_element (GST_OBJECT (comp),
gst_structure_new ("delta", "meta", G_TYPE_INT, flags, NULL)));
}
}
/* when comparing contents, it is already ensured sizes are equal */
static gint
gst_compare_mem (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2)
{
return memcmp (GST_BUFFER_DATA (buf1), GST_BUFFER_DATA (buf2),
GST_BUFFER_SIZE (buf1)) ? 1 : 0;
}
static gint
gst_compare_max (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2)
{
gint i, delta = 0;
gint8 *data1, *data2;
data1 = (gint8 *) GST_BUFFER_DATA (buf1);
data2 = (gint8 *) GST_BUFFER_DATA (buf2);
/* primitive loop */
for (i = 0; i < GST_BUFFER_SIZE (buf1); i++) {
gint diff = ABS (*data1 - *data2);
if (diff > 0)
GST_LOG_OBJECT (comp, "diff at %d = %d", i, diff);
delta = MAX (delta, ABS (*data1 - *data2));
data1++;
data2++;
}
return delta;
}
static double
gst_compare_ssim_window (GstCompare * comp, guint8 * data1, guint8 * data2,
gint width, gint height, gint step, gint stride)
{
gint count = 0, i, j;
gint sum1 = 0, sum2 = 0, ssum1 = 0, ssum2 = 0, acov = 0;
gdouble avg1, avg2, var1, var2, cov;
const gdouble k1 = 0.01;
const gdouble k2 = 0.03;
const gdouble L = 255.0;
const gdouble c1 = (k1 * L) * (k1 * L);
const gdouble c2 = (k2 * L) * (k2 * L);
/* plain and simple; no fancy optimizations */
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
sum1 += *data1;
sum2 += *data2;
ssum1 += *data1 * *data1;
ssum2 += *data2 * *data2;
acov += *data1 * *data2;
count++;
data1 += step;
data2 += step;
}
data1 -= j * step;
data2 -= j * step;
data1 += stride;
data2 += stride;
}
avg1 = sum1 / count;
avg2 = sum2 / count;
var1 = ssum1 / count - avg1 * avg1;
var2 = ssum2 / count - avg2 * avg2;
cov = acov / count - avg1 * avg2;
return (2 * avg1 * avg2 + c1) * (2 * cov + c2) /
((avg1 * avg1 + avg2 * avg2 + c1) * (var1 + var2 + c2));
}
/* @width etc are for the particular component */
static gdouble
gst_compare_ssim_component (GstCompare * comp, guint8 * data1, guint8 * data2,
gint width, gint height, gint step, gint stride)
{
const gint window = 16;
gdouble ssim_sum = 0;
gint count = 0, i, j;
for (j = 0; j + (window / 2) < height; j += (window / 2)) {
for (i = 0; i + (window / 2) < width; i += (window / 2)) {
gdouble ssim;
ssim = gst_compare_ssim_window (comp, data1 + step * i + j * stride,
data2 + step * i + j * stride,
MIN (window, width - i), MIN (window, height - j), step, stride);
GST_LOG_OBJECT (comp, "ssim for %dx%d at (%d, %d) = %f", window, window,
i, j, ssim);
ssim_sum += ssim;
count++;
}
}
return (ssim_sum / count);
}
static gdouble
gst_compare_ssim (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2)
{
GstCaps *caps;
GstVideoFormat format, f;
gint width, height, w, h, i, comps;
gdouble cssim[4], ssim, c[4] = { 1.0, 0.0, 0.0, 0.0 };
guint8 *data1, *data2;
caps = GST_BUFFER_CAPS (buf1);
if (!caps)
goto invalid_input;
if (!gst_video_format_parse_caps (caps, &format, &width, &height))
goto invalid_input;
caps = GST_BUFFER_CAPS (buf2);
if (!caps)
goto invalid_input;
if (!gst_video_format_parse_caps (caps, &f, &w, &h))
goto invalid_input;
if (f != format || w != width || h != height)
return comp->threshold + 1;
comps = gst_video_format_is_gray (format) ? 1 : 3;
if (gst_video_format_has_alpha (format))
comps += 1;
/* note that some are reported both yuv and gray */
for (i = 0; i < comps; ++i)
c[i] = 1.0;
/* increase luma weight if yuv */
if (gst_video_format_is_yuv (format) && (comps > 1))
c[0] = comps - 1;
for (i = 0; i < comps; ++i)
c[i] /= (gst_video_format_is_yuv (format) && (comps > 1)) ?
2 * (comps - 1) : comps;
data1 = GST_BUFFER_DATA (buf1);
data2 = GST_BUFFER_DATA (buf2);
for (i = 0; i < comps; i++) {
gint offset, cw, ch, step, stride;
/* only support most common formats */
if (gst_video_format_get_component_depth (format, i) != 8)
goto unsupported_input;
offset = gst_video_format_get_component_offset (format, i, width, height);
cw = gst_video_format_get_component_width (format, i, width);
ch = gst_video_format_get_component_height (format, i, height);
step = gst_video_format_get_pixel_stride (format, i);
stride = gst_video_format_get_row_stride (format, i, width);
GST_LOG_OBJECT (comp, "component %d", i);
cssim[i] = gst_compare_ssim_component (comp, data1 + offset, data2 + offset,
cw, ch, step, stride);
GST_LOG_OBJECT (comp, "ssim[%d] = %f", i, cssim[i]);
}
#ifndef GST_DISABLE_GST_DEBUG
for (i = 0; i < 4; i++) {
GST_DEBUG_OBJECT (comp, "ssim[%d] = %f, c[%d] = %f", i, cssim[i], i, c[i]);
}
#endif
ssim = cssim[0] * c[0] + cssim[1] * c[1] + cssim[2] * c[2] + cssim[3] * c[3];
return ssim;
/* ERRORS */
invalid_input:
{
GST_ERROR_OBJECT (comp, "ssim method needs raw video input");
return 0;
}
unsupported_input:
{
GST_ERROR_OBJECT (comp, "raw video format not supported %" GST_PTR_FORMAT,
caps);
return 0;
}
}
static void
gst_compare_buffers (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2)
{
gdouble delta = 0;
/* first check metadata */
gst_compare_meta (comp, buf1, buf2);
/* check content according to method */
/* but at least size should match */
if (GST_BUFFER_SIZE (buf1) != GST_BUFFER_SIZE (buf2)) {
delta = comp->threshold + 1;
} else {
GST_MEMDUMP_OBJECT (comp, "buffer 1", GST_BUFFER_DATA (buf1),
GST_BUFFER_SIZE (buf1));
GST_MEMDUMP_OBJECT (comp, "buffer 2", GST_BUFFER_DATA (buf2),
GST_BUFFER_SIZE (buf2));
switch (comp->method) {
case GST_COMPARE_METHOD_MEM:
delta = gst_compare_mem (comp, buf1, buf2);
break;
case GST_COMPARE_METHOD_MAX:
delta = gst_compare_max (comp, buf1, buf2);
break;
case GST_COMPARE_METHOD_SSIM:
delta = gst_compare_ssim (comp, buf1, buf2);
break;
default:
g_assert_not_reached ();
break;
}
}
if ((comp->upper && delta > comp->threshold) ||
(!comp->upper && delta < comp->threshold)) {
GST_WARNING_OBJECT (comp, "buffers %p and %p failed content match %f",
buf1, buf2, delta);
gst_element_post_message (GST_ELEMENT (comp),
gst_message_new_element (GST_OBJECT (comp),
gst_structure_new ("delta", "content", G_TYPE_DOUBLE, delta,
NULL)));
}
}
static GstFlowReturn
gst_compare_collect_pads (GstCollectPads * cpads, GstCompare * comp)
{
GstBuffer *buf1, *buf2;
buf1 = gst_collect_pads_pop (comp->cpads,
gst_pad_get_element_private (comp->sinkpad));
buf2 = gst_collect_pads_pop (comp->cpads,
gst_pad_get_element_private (comp->checkpad));
if (!buf1 && !buf2) {
gst_pad_push_event (comp->srcpad, gst_event_new_eos ());
return GST_FLOW_UNEXPECTED;
} else if (buf1 && buf2) {
gst_compare_buffers (comp, buf1, buf2);
} else {
GST_WARNING_OBJECT (comp, "buffer %p != NULL", buf1 ? buf1 : buf2);
comp->count++;
gst_element_post_message (GST_ELEMENT (comp),
gst_message_new_element (GST_OBJECT (comp),
gst_structure_new ("delta", "count", G_TYPE_INT, comp->count,
NULL)));
}
if (buf1)
gst_pad_push (comp->srcpad, buf1);
if (buf2)
gst_buffer_unref (buf2);
return GST_FLOW_OK;
}
static void
gst_compare_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstCompare *comp = GST_COMPARE (object);
switch (prop_id) {
case PROP_META:
comp->meta = g_value_get_flags (value);
break;
case PROP_OFFSET_TS:
comp->offset_ts = g_value_get_boolean (value);
break;
case PROP_METHOD:
comp->method = g_value_get_enum (value);
break;
case PROP_THRESHOLD:
comp->threshold = g_value_get_double (value);
break;
case PROP_UPPER:
comp->upper = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_compare_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstCompare *comp = GST_COMPARE (object);
switch (prop_id) {
case PROP_META:
g_value_set_flags (value, comp->meta);
break;
case PROP_OFFSET_TS:
g_value_set_boolean (value, comp->offset_ts);
break;
case PROP_METHOD:
g_value_set_enum (value, comp->method);
break;
case PROP_THRESHOLD:
g_value_set_double (value, comp->threshold);
break;
case PROP_UPPER:
g_value_set_boolean (value, comp->upper);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_compare_change_state (GstElement * element, GstStateChange transition)
{
GstCompare *comp = GST_COMPARE (element);
GstStateChangeReturn ret;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_collect_pads_start (comp->cpads);
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_collect_pads_stop (comp->cpads);
break;
default:
break;
}
ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state,
(element, transition), GST_STATE_CHANGE_SUCCESS);
if (ret != GST_STATE_CHANGE_SUCCESS)
return ret;
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_compare_reset (comp);
break;
default:
break;
}
return GST_STATE_CHANGE_SUCCESS;
}

View file

@ -0,0 +1,75 @@
/* GStreamer Element
*
* Copyright 2011 Collabora Ltd.
* @author: Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>
* Copyright 2011 Nokia Corp.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_COMPARE_H__
#define __GST_COMPARE_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_COMPARE \
(gst_compare_get_type())
#define GST_COMPARE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_COMPARE, GstCompare))
#define GST_COMPARE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_COMPARE, GstCompareClass))
#define GST_COMPARE_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_COMPARE, GstCompareClass))
#define GST_IS_COMPARE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_COMPARE))
#define GST_IS_COMPARE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_COMPARE))
typedef struct _GstCompare GstCompare;
typedef struct _GstCompareClass GstCompareClass;
struct _GstCompare {
GstElement element;
GstPad *srcpad;
GstPad *sinkpad;
GstPad *checkpad;
GstCollectPads *cpads;
gint count;
/* properties */
GstBufferCopyFlags meta;
gboolean offset_ts;
gint method;
gdouble threshold;
gboolean upper;
};
struct _GstCompareClass {
GstElementClass parent_class;
};
GType gst_compare_get_type(void);
G_END_DECLS
#endif /* __GST_COMPARE_H__ */

View file

@ -1,7 +1,6 @@
/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */ /* dvb-sub.c - DVB subtitle decoding
/*
* libdvbsub - DVB subtitle decoding
* Copyright (C) Mart Raudsepp 2009 <mart.raudsepp@artecdesign.ee> * Copyright (C) Mart Raudsepp 2009 <mart.raudsepp@artecdesign.ee>
* Copyright (C) 2010 ONELAN Ltd.
* *
* Heavily uses code algorithms ported from ffmpeg's libavcodec/dvbsubdec.c, * Heavily uses code algorithms ported from ffmpeg's libavcodec/dvbsubdec.c,
* especially the segment parsers. The original license applies to this * especially the segment parsers. The original license applies to this

View file

@ -1,5 +1,6 @@
/* GStreamer DVB subtitles overlay /* GStreamer DVB subtitles overlay
* Copyright (c) 2010 Mart Raudsepp <mart.raudsepp@collabora.co.uk> * Copyright (c) 2010 Mart Raudsepp <mart.raudsepp@collabora.co.uk>
* Copyright (c) 2010 ONELAN Ltd.
* *
* This library is free software; you can redistribute it and/or * This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public * modify it under the terms of the GNU Library General Public

View file

@ -1,5 +1,6 @@
/* GStreamer DVB subtitles overlay /* GStreamer DVB subtitles overlay
* Copyright (c) 2010 Mart Raudsepp <mart.raudsepp@collabora.co.uk> * Copyright (c) 2010 Mart Raudsepp <mart.raudsepp@collabora.co.uk>
* Copyright (c) 2010 ONELAN Ltd.
* *
* This library is free software; you can redistribute it and/or * This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public License * modify it under the terms of the GNU Library General Public License

View file

@ -1134,11 +1134,11 @@ gst_h264_parse_make_codec_data (GstH264Parse * h264parse)
num_sps++; num_sps++;
/* size bytes also count */ /* size bytes also count */
sps_size += GST_BUFFER_SIZE (nal) - 4 + 2; sps_size += GST_BUFFER_SIZE (nal) - 4 + 2;
if (GST_BUFFER_SIZE (nal) >= 7) { if (GST_BUFFER_SIZE (nal) >= 8) {
found = TRUE; found = TRUE;
profile_idc = (GST_BUFFER_DATA (nal))[4]; profile_idc = (GST_BUFFER_DATA (nal))[5];
profile_comp = (GST_BUFFER_DATA (nal))[5]; profile_comp = (GST_BUFFER_DATA (nal))[6];
level_idc = (GST_BUFFER_DATA (nal))[6]; level_idc = (GST_BUFFER_DATA (nal))[7];
} }
} }
} }
@ -1313,17 +1313,19 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps)
alignment = "au"; alignment = "au";
} else { } else {
if (h264parse->packetized) { if (h264parse->packetized) {
/* if packetized input, take upstream alignment if validly provided, if (h264parse->split_packetized)
alignment = "nal";
else {
/* if packetized input is not split,
* take upstream alignment if validly provided,
* otherwise assume au aligned ... */ * otherwise assume au aligned ... */
alignment = gst_structure_get_string (structure, "alignment"); alignment = gst_structure_get_string (structure, "alignment");
if (!alignment || (alignment && if (!alignment || (alignment &&
strcmp (alignment, "au") != 0 && strcmp (alignment, "au") != 0 &&
strcmp (alignment, "nal") != 0)) { strcmp (alignment, "nal") != 0)) {
if (h264parse->split_packetized)
alignment = "nal";
else
alignment = "au"; alignment = "au";
} }
}
} else { } else {
alignment = "nal"; alignment = "nal";
} }

View file

@ -43,9 +43,9 @@
#include <gst/base/gsttypefindhelper.h> #include <gst/base/gsttypefindhelper.h>
#include "gsthlsdemux.h" #include "gsthlsdemux.h"
static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src%d",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_SOMETIMES,
GST_STATIC_CAPS_ANY); GST_STATIC_CAPS_ANY);
static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink", static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink",
@ -214,15 +214,6 @@ gst_hls_demux_init (GstHLSDemux * demux, GstHLSDemuxClass * klass)
GST_DEBUG_FUNCPTR (gst_hls_demux_sink_event)); GST_DEBUG_FUNCPTR (gst_hls_demux_sink_event));
gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad); gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
/* demux pad */
demux->srcpad = gst_pad_new_from_static_template (&srctemplate, "src");
gst_pad_set_event_function (demux->srcpad,
GST_DEBUG_FUNCPTR (gst_hls_demux_src_event));
gst_pad_set_query_function (demux->srcpad,
GST_DEBUG_FUNCPTR (gst_hls_demux_src_query));
gst_pad_set_element_private (demux->srcpad, demux);
gst_element_add_pad (GST_ELEMENT (demux), demux->srcpad);
/* fetcher pad */ /* fetcher pad */
demux->fetcherpad = demux->fetcherpad =
gst_pad_new_from_static_template (&fetchertemplate, "sink"); gst_pad_new_from_static_template (&fetchertemplate, "sink");
@ -233,6 +224,8 @@ gst_hls_demux_init (GstHLSDemux * demux, GstHLSDemuxClass * klass)
gst_pad_set_element_private (demux->fetcherpad, demux); gst_pad_set_element_private (demux->fetcherpad, demux);
gst_pad_activate_push (demux->fetcherpad, TRUE); gst_pad_activate_push (demux->fetcherpad, TRUE);
demux->do_typefind = TRUE;
/* Properties */ /* Properties */
demux->fragments_cache = DEFAULT_FRAGMENTS_CACHE; demux->fragments_cache = DEFAULT_FRAGMENTS_CACHE;
demux->bitrate_switch_tol = DEFAULT_BITRATE_SWITCH_TOLERANCE; demux->bitrate_switch_tol = DEFAULT_BITRATE_SWITCH_TOLERANCE;
@ -249,6 +242,8 @@ gst_hls_demux_init (GstHLSDemux * demux, GstHLSDemuxClass * klass)
g_static_rec_mutex_init (&demux->task_lock); g_static_rec_mutex_init (&demux->task_lock);
demux->task = gst_task_create ((GstTaskFunction) gst_hls_demux_loop, demux); demux->task = gst_task_create ((GstTaskFunction) gst_hls_demux_loop, demux);
gst_task_set_lock (demux->task, &demux->task_lock); gst_task_set_lock (demux->task, &demux->task_lock);
demux->position = 0;
} }
static void static void
@ -370,14 +365,15 @@ gst_hls_demux_sink_event (GstPad * pad, GstEvent * event)
/* In most cases, this will happen if we set a wrong url in the /* In most cases, this will happen if we set a wrong url in the
* source element and we have received the 404 HTML response instead of * source element and we have received the 404 HTML response instead of
* the playlist */ * the playlist */
GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."), NULL); GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."),
(NULL));
return FALSE; return FALSE;
} }
if (!ret && gst_m3u8_client_is_live (demux->client)) { if (!ret && gst_m3u8_client_is_live (demux->client)) {
GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND, GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
("Failed querying the playlist uri, " ("Failed querying the playlist uri, "
"required for live sources."), NULL); "required for live sources."), (NULL));
return FALSE; return FALSE;
} }
@ -385,6 +381,10 @@ gst_hls_demux_sink_event (GstPad * pad, GstEvent * event)
gst_event_unref (event); gst_event_unref (event);
return TRUE; return TRUE;
} }
case GST_EVENT_NEWSEGMENT:
/* Swallow newsegments, we'll push our own */
gst_event_unref (event);
return TRUE;
default: default:
break; break;
} }
@ -555,6 +555,34 @@ gst_hls_demux_stop (GstHLSDemux * demux)
g_cond_signal (demux->thread_cond); g_cond_signal (demux->thread_cond);
} }
static void
switch_pads (GstHLSDemux * demux, GstCaps * newcaps)
{
GstPad *oldpad = demux->srcpad;
GST_DEBUG ("Switching pads (oldpad:%p)", oldpad);
/* First create and activate new pad */
demux->srcpad = gst_pad_new_from_static_template (&srctemplate, NULL);
gst_pad_set_event_function (demux->srcpad,
GST_DEBUG_FUNCPTR (gst_hls_demux_src_event));
gst_pad_set_query_function (demux->srcpad,
GST_DEBUG_FUNCPTR (gst_hls_demux_src_query));
gst_pad_set_element_private (demux->srcpad, demux);
gst_pad_set_active (demux->srcpad, TRUE);
gst_element_add_pad (GST_ELEMENT (demux), demux->srcpad);
gst_pad_set_caps (demux->srcpad, newcaps);
gst_element_no_more_pads (GST_ELEMENT (demux));
if (oldpad) {
/* Push out EOS */
gst_pad_push_event (oldpad, gst_event_new_eos ());
gst_pad_set_active (oldpad, FALSE);
gst_element_remove_pad (GST_ELEMENT (demux), oldpad);
}
}
static void static void
gst_hls_demux_loop (GstHLSDemux * demux) gst_hls_demux_loop (GstHLSDemux * demux)
{ {
@ -588,6 +616,20 @@ gst_hls_demux_loop (GstHLSDemux * demux)
} }
buf = g_queue_pop_head (demux->queue); buf = g_queue_pop_head (demux->queue);
/* Figure out if we need to create/switch pads */
if (G_UNLIKELY (!demux->srcpad
|| GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad))) {
switch_pads (demux, GST_BUFFER_CAPS (buf));
/* And send a newsegment */
gst_pad_push_event (demux->srcpad,
gst_event_new_new_segment (0, 1.0, GST_FORMAT_TIME, demux->position,
GST_CLOCK_TIME_NONE, demux->position));
}
if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
demux->position += GST_BUFFER_DURATION (buf);
ret = gst_pad_push (demux->srcpad, buf); ret = gst_pad_push (demux->srcpad, buf);
if (ret != GST_FLOW_OK) if (ret != GST_FLOW_OK)
goto error; goto error;
@ -605,7 +647,7 @@ end_of_playlist:
cache_error: cache_error:
{ {
GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND, GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND,
("Could not cache the first fragments"), NULL); ("Could not cache the first fragments"), (NULL));
gst_hls_demux_stop (demux); gst_hls_demux_stop (demux);
return; return;
} }
@ -667,6 +709,7 @@ gst_hls_demux_reset (GstHLSDemux * demux, gboolean dispose)
demux->accumulated_delay = 0; demux->accumulated_delay = 0;
demux->end_of_playlist = FALSE; demux->end_of_playlist = FALSE;
demux->cancelled = FALSE; demux->cancelled = FALSE;
demux->do_typefind = TRUE;
if (demux->input_caps) { if (demux->input_caps) {
gst_caps_unref (demux->input_caps); gst_caps_unref (demux->input_caps);
@ -868,7 +911,7 @@ uri_error:
state_change_error: state_change_error:
{ {
GST_ELEMENT_ERROR (demux, CORE, STATE_CHANGE, GST_ELEMENT_ERROR (demux, CORE, STATE_CHANGE,
("Error changing state of the fetcher element."), NULL); ("Error changing state of the fetcher element."), (NULL));
bret = FALSE; bret = FALSE;
goto quit; goto quit;
} }
@ -946,6 +989,9 @@ gst_hls_demux_change_playlist (GstHLSDemux * demux, gboolean is_fast)
gst_element_post_message (GST_ELEMENT_CAST (demux), gst_element_post_message (GST_ELEMENT_CAST (demux),
gst_message_new_element (GST_OBJECT_CAST (demux), s)); gst_message_new_element (GST_OBJECT_CAST (demux), s));
/* Force typefinding since we might have changed media type */
demux->do_typefind = TRUE;
return TRUE; return TRUE;
} }
@ -993,6 +1039,9 @@ gst_hls_demux_switch_playlist (GstHLSDemux * demux)
limit = demux->client->current->targetduration * GST_SECOND * limit = demux->client->current->targetduration * GST_SECOND *
demux->bitrate_switch_tol; demux->bitrate_switch_tol;
GST_DEBUG ("diff:%s%" GST_TIME_FORMAT ", limit:%" GST_TIME_FORMAT,
diff < 0 ? "-" : " ", GST_TIME_ARGS (ABS (diff)), GST_TIME_ARGS (limit));
/* if we are on time switch to a higher bitrate */ /* if we are on time switch to a higher bitrate */
if (diff > limit) { if (diff > limit) {
gst_hls_demux_change_playlist (demux, TRUE); gst_hls_demux_change_playlist (demux, TRUE);
@ -1035,14 +1084,20 @@ gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean retry)
buf = gst_adapter_take_buffer (demux->download, avail); buf = gst_adapter_take_buffer (demux->download, avail);
GST_BUFFER_DURATION (buf) = duration; GST_BUFFER_DURATION (buf) = duration;
if (G_UNLIKELY (demux->input_caps == NULL)) { /* We actually need to do this every time we switch bitrate */
demux->input_caps = gst_type_find_helper_for_buffer (NULL, buf, NULL); if (G_UNLIKELY (demux->do_typefind)) {
if (demux->input_caps) { GstCaps *caps = gst_type_find_helper_for_buffer (NULL, buf, NULL);
gst_pad_set_caps (demux->srcpad, demux->input_caps);
if (!demux->input_caps || !gst_caps_is_equal (caps, demux->input_caps)) {
gst_caps_replace (&demux->input_caps, caps);
/* gst_pad_set_caps (demux->srcpad, demux->input_caps); */
GST_INFO_OBJECT (demux, "Input source caps: %" GST_PTR_FORMAT, GST_INFO_OBJECT (demux, "Input source caps: %" GST_PTR_FORMAT,
demux->input_caps); demux->input_caps);
demux->do_typefind = FALSE;
} else
gst_caps_unref (caps);
} }
} gst_buffer_set_caps (buf, demux->input_caps);
if (discont) { if (discont) {
GST_DEBUG_OBJECT (demux, "Marking fragment as discontinuous"); GST_DEBUG_OBJECT (demux, "Marking fragment as discontinuous");

View file

@ -61,7 +61,7 @@ struct _GstHLSDemux
GQueue *queue; /* Queue storing the fetched fragments */ GQueue *queue; /* Queue storing the fetched fragments */
gboolean need_cache; /* Wheter we need to cache some fragments before starting to push data */ gboolean need_cache; /* Wheter we need to cache some fragments before starting to push data */
gboolean end_of_playlist; gboolean end_of_playlist;
gboolean do_typefind; /* Whether we need to typefind the next buffer */
/* Properties */ /* Properties */
guint fragments_cache; /* number of fragments needed to be cached to start playing */ guint fragments_cache; /* number of fragments needed to be cached to start playing */
@ -87,6 +87,8 @@ struct _GstHLSDemux
gboolean cancelled; gboolean cancelled;
GstAdapter *download; GstAdapter *download;
/* Position in the stream */
GstClockTime position;
}; };
struct _GstHLSDemuxClass struct _GstHLSDemuxClass

View file

@ -2,7 +2,6 @@ plugin_LTLIBRARIES = libgstid3tag.la
libgstid3tag_la_SOURCES = \ libgstid3tag_la_SOURCES = \
gstid3mux.c \ gstid3mux.c \
gsttagmux.c \
id3tag.c id3tag.c
libgstid3tag_la_CFLAGS = \ libgstid3tag_la_CFLAGS = \
@ -16,7 +15,7 @@ libgstid3tag_la_LIBADD = \
libgstid3tag_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstid3tag_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstid3tag_la_LIBTOOLFLAGS = --tag=disable-static libgstid3tag_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstid3mux.h gsttagmux.h id3tag.h noinst_HEADERS = gstid3mux.h id3tag.h
Android.mk: Makefile.am $(BUILT_SOURCES) Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \ androgenizer \

View file

@ -71,6 +71,11 @@ enum
#define DEFAULT_WRITE_V2 TRUE #define DEFAULT_WRITE_V2 TRUE
#define DEFAULT_V2_MAJOR_VERSION 3 #define DEFAULT_V2_MAJOR_VERSION 3
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("ANY"));
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
@ -79,9 +84,9 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_BOILERPLATE (GstId3Mux, gst_id3_mux, GstTagMux, GST_TYPE_TAG_MUX); GST_BOILERPLATE (GstId3Mux, gst_id3_mux, GstTagMux, GST_TYPE_TAG_MUX);
static GstBuffer *gst_id3_mux_render_v2_tag (GstTagMux * mux, static GstBuffer *gst_id3_mux_render_v2_tag (GstTagMux * mux,
GstTagList * taglist); const GstTagList * taglist);
static GstBuffer *gst_id3_mux_render_v1_tag (GstTagMux * mux, static GstBuffer *gst_id3_mux_render_v1_tag (GstTagMux * mux,
GstTagList * taglist); const GstTagList * taglist);
static void gst_id3_mux_set_property (GObject * object, guint prop_id, static void gst_id3_mux_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec); const GValue * value, GParamSpec * pspec);
@ -93,6 +98,9 @@ gst_id3_mux_base_init (gpointer g_class)
{ {
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_add_pad_template (element_class, gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template)); gst_static_pad_template_get (&src_template));
@ -129,8 +137,8 @@ gst_id3_mux_class_init (GstId3MuxClass * klass)
GST_TAG_MUX_CLASS (klass)->render_start_tag = GST_TAG_MUX_CLASS (klass)->render_start_tag =
GST_DEBUG_FUNCPTR (gst_id3_mux_render_v2_tag); GST_DEBUG_FUNCPTR (gst_id3_mux_render_v2_tag);
GST_TAG_MUX_CLASS (klass)->render_end_tag =
GST_TAG_MUX_CLASS (klass)->render_end_tag = gst_id3_mux_render_v1_tag; GST_DEBUG_FUNCPTR (gst_id3_mux_render_v1_tag);
} }
static void static void
@ -187,7 +195,7 @@ gst_id3_mux_get_property (GObject * object, guint prop_id,
} }
static GstBuffer * static GstBuffer *
gst_id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist) gst_id3_mux_render_v2_tag (GstTagMux * mux, const GstTagList * taglist)
{ {
GstId3Mux *id3mux = GST_ID3_MUX (mux); GstId3Mux *id3mux = GST_ID3_MUX (mux);
@ -198,7 +206,7 @@ gst_id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist)
} }
static GstBuffer * static GstBuffer *
gst_id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist) gst_id3_mux_render_v1_tag (GstTagMux * mux, const GstTagList * taglist)
{ {
GstId3Mux *id3mux = GST_ID3_MUX (mux); GstId3Mux *id3mux = GST_ID3_MUX (mux);

View file

@ -23,7 +23,7 @@
#ifndef GST_ID3_MUX_H #ifndef GST_ID3_MUX_H
#define GST_ID3_MUX_H #define GST_ID3_MUX_H
#include "gsttagmux.h" #include <gst/tag/gsttagmux.h>
#include "id3tag.h" #include "id3tag.h"
G_BEGIN_DECLS G_BEGIN_DECLS

View file

@ -1,495 +0,0 @@
/* GStreamer tag muxer base class
*
* Copyright (C) 2006 Christophe Fergeau <teuf@gnome.org>
* Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
* Copyright (C) 2006 Sebastian Dröge <slomo@circular-chaos.org>
* Copyright (C) 2009 Pioneers of the Inevitable <songbird@songbirdnest.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <string.h>
#include <gst/gsttagsetter.h>
#include <gst/tag/tag.h>
#include "gsttagmux.h"
GST_DEBUG_CATEGORY_STATIC (gst_tag_mux_debug);
#define GST_CAT_DEFAULT gst_tag_mux_debug
/* Subclass provides a src template and pad. We accept anything as input here,
however. */
static GstStaticPadTemplate gst_tag_mux_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("ANY"));
static void
gst_tag_mux_iface_init (GType tag_type)
{
static const GInterfaceInfo tag_setter_info = {
NULL,
NULL,
NULL
};
g_type_add_interface_static (tag_type, GST_TYPE_TAG_SETTER, &tag_setter_info);
}
/* make sure to register a less generic type so we can easily move this
* GstTagMux base class into -base without causing GType name conflicts */
typedef GstTagMux GstId3BaseMux;
typedef GstTagMuxClass GstId3BaseMuxClass;
GST_BOILERPLATE_FULL (GstId3BaseMux, gst_tag_mux,
GstElement, GST_TYPE_ELEMENT, gst_tag_mux_iface_init);
static GstStateChangeReturn
gst_tag_mux_change_state (GstElement * element, GstStateChange transition);
static GstFlowReturn gst_tag_mux_chain (GstPad * pad, GstBuffer * buffer);
static gboolean gst_tag_mux_sink_event (GstPad * pad, GstEvent * event);
static void
gst_tag_mux_finalize (GObject * obj)
{
GstTagMux *mux = GST_TAG_MUX (obj);
if (mux->newsegment_ev) {
gst_event_unref (mux->newsegment_ev);
mux->newsegment_ev = NULL;
}
if (mux->event_tags) {
gst_tag_list_free (mux->event_tags);
mux->event_tags = NULL;
}
if (mux->final_tags) {
gst_tag_list_free (mux->final_tags);
mux->final_tags = NULL;
}
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
static void
gst_tag_mux_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_tag_mux_sink_template));
GST_DEBUG_CATEGORY_INIT (gst_tag_mux_debug, "id3basemux", 0,
"tag muxer base class for Id3Mux");
}
static void
gst_tag_mux_class_init (GstTagMuxClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_tag_mux_finalize);
gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_tag_mux_change_state);
}
static void
gst_tag_mux_init (GstTagMux * mux, GstTagMuxClass * mux_class)
{
GstElementClass *element_klass = GST_ELEMENT_CLASS (mux_class);
GstPadTemplate *tmpl;
/* pad through which data comes in to the element */
mux->sinkpad =
gst_pad_new_from_static_template (&gst_tag_mux_sink_template, "sink");
gst_pad_set_chain_function (mux->sinkpad,
GST_DEBUG_FUNCPTR (gst_tag_mux_chain));
gst_pad_set_event_function (mux->sinkpad,
GST_DEBUG_FUNCPTR (gst_tag_mux_sink_event));
gst_element_add_pad (GST_ELEMENT (mux), mux->sinkpad);
/* pad through which data goes out of the element */
tmpl = gst_element_class_get_pad_template (element_klass, "src");
if (tmpl) {
mux->srcpad = gst_pad_new_from_template (tmpl, "src");
gst_pad_use_fixed_caps (mux->srcpad);
gst_pad_set_caps (mux->srcpad, gst_pad_template_get_caps (tmpl));
gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad);
}
mux->render_start_tag = TRUE;
mux->render_end_tag = TRUE;
}
static GstTagList *
gst_tag_mux_get_tags (GstTagMux * mux)
{
GstTagSetter *tagsetter = GST_TAG_SETTER (mux);
const GstTagList *tagsetter_tags;
GstTagMergeMode merge_mode;
if (mux->final_tags)
return mux->final_tags;
tagsetter_tags = gst_tag_setter_get_tag_list (tagsetter);
merge_mode = gst_tag_setter_get_tag_merge_mode (tagsetter);
GST_LOG_OBJECT (mux, "merging tags, merge mode = %d", merge_mode);
GST_LOG_OBJECT (mux, "event tags: %" GST_PTR_FORMAT, mux->event_tags);
GST_LOG_OBJECT (mux, "set tags: %" GST_PTR_FORMAT, tagsetter_tags);
mux->final_tags =
gst_tag_list_merge (tagsetter_tags, mux->event_tags, merge_mode);
GST_LOG_OBJECT (mux, "final tags: %" GST_PTR_FORMAT, mux->final_tags);
return mux->final_tags;
}
static GstFlowReturn
gst_tag_mux_render_start_tag (GstTagMux * mux)
{
GstTagMuxClass *klass;
GstBuffer *buffer;
GstTagList *taglist;
GstEvent *event;
GstFlowReturn ret;
taglist = gst_tag_mux_get_tags (mux);
klass = GST_TAG_MUX_CLASS (G_OBJECT_GET_CLASS (mux));
if (klass->render_start_tag == NULL)
goto no_vfunc;
buffer = klass->render_start_tag (mux, taglist);
/* Null buffer is ok, just means we're not outputting anything */
if (buffer == NULL) {
GST_INFO_OBJECT (mux, "No start tag generated");
mux->start_tag_size = 0;
return GST_FLOW_OK;
}
mux->start_tag_size = GST_BUFFER_SIZE (buffer);
GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes",
mux->start_tag_size);
/* Send newsegment event from byte position 0, so the tag really gets
* written to the start of the file, independent of the upstream segment */
gst_pad_push_event (mux->srcpad,
gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
/* Send an event about the new tags to downstream elements */
/* gst_event_new_tag takes ownership of the list, so use a copy */
event = gst_event_new_tag (gst_tag_list_copy (taglist));
gst_pad_push_event (mux->srcpad, event);
GST_BUFFER_OFFSET (buffer) = 0;
ret = gst_pad_push (mux->srcpad, buffer);
mux->current_offset = mux->start_tag_size;
mux->max_offset = MAX (mux->max_offset, mux->current_offset);
return ret;
no_vfunc:
{
GST_ERROR_OBJECT (mux, "Subclass does not implement "
"render_start_tag vfunc!");
return GST_FLOW_ERROR;
}
}
static GstFlowReturn
gst_tag_mux_render_end_tag (GstTagMux * mux)
{
GstTagMuxClass *klass;
GstBuffer *buffer;
GstTagList *taglist;
GstFlowReturn ret;
taglist = gst_tag_mux_get_tags (mux);
klass = GST_TAG_MUX_CLASS (G_OBJECT_GET_CLASS (mux));
if (klass->render_end_tag == NULL)
goto no_vfunc;
buffer = klass->render_end_tag (mux, taglist);
if (buffer == NULL) {
GST_INFO_OBJECT (mux, "No end tag generated");
mux->end_tag_size = 0;
return GST_FLOW_OK;
}
mux->end_tag_size = GST_BUFFER_SIZE (buffer);
GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes",
mux->end_tag_size);
/* Send newsegment event from the end of the file, so it gets written there,
independent of whatever new segment events upstream has sent us */
gst_pad_push_event (mux->srcpad,
gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, mux->max_offset,
-1, 0));
GST_BUFFER_OFFSET (buffer) = mux->max_offset;
ret = gst_pad_push (mux->srcpad, buffer);
return ret;
no_vfunc:
{
GST_ERROR_OBJECT (mux, "Subclass does not implement "
"render_end_tag vfunc!");
return GST_FLOW_ERROR;
}
}
static GstEvent *
gst_tag_mux_adjust_event_offsets (GstTagMux * mux,
const GstEvent * newsegment_event)
{
GstFormat format;
gint64 start, stop, cur;
gst_event_parse_new_segment ((GstEvent *) newsegment_event, NULL, NULL,
&format, &start, &stop, &cur);
g_assert (format == GST_FORMAT_BYTES);
if (start != -1)
start += mux->start_tag_size;
if (stop != -1)
stop += mux->start_tag_size;
if (cur != -1)
cur += mux->start_tag_size;
GST_DEBUG_OBJECT (mux, "adjusting newsegment event offsets to start=%"
G_GINT64_FORMAT ", stop=%" G_GINT64_FORMAT ", cur=%" G_GINT64_FORMAT
" (delta = +%" G_GSIZE_FORMAT ")", start, stop, cur, mux->start_tag_size);
return gst_event_new_new_segment (TRUE, 1.0, format, start, stop, cur);
}
static GstFlowReturn
gst_tag_mux_chain (GstPad * pad, GstBuffer * buffer)
{
GstTagMux *mux = GST_TAG_MUX (GST_OBJECT_PARENT (pad));
GstFlowReturn ret;
int length;
if (mux->render_start_tag) {
GST_INFO_OBJECT (mux, "Adding tags to stream");
ret = gst_tag_mux_render_start_tag (mux);
if (ret != GST_FLOW_OK) {
GST_DEBUG_OBJECT (mux, "flow: %s", gst_flow_get_name (ret));
gst_buffer_unref (buffer);
return ret;
}
/* Now send the cached newsegment event that we got from upstream */
if (mux->newsegment_ev) {
gint64 start;
GstEvent *newseg;
GST_DEBUG_OBJECT (mux, "sending cached newsegment event");
newseg = gst_tag_mux_adjust_event_offsets (mux, mux->newsegment_ev);
gst_event_unref (mux->newsegment_ev);
mux->newsegment_ev = NULL;
gst_event_parse_new_segment (newseg, NULL, NULL, NULL, &start, NULL,
NULL);
gst_pad_push_event (mux->srcpad, newseg);
mux->current_offset = start;
mux->max_offset = MAX (mux->max_offset, mux->current_offset);
} else {
/* upstream sent no newsegment event or only one in a non-BYTE format */
}
mux->render_start_tag = FALSE;
}
buffer = gst_buffer_make_metadata_writable (buffer);
if (GST_BUFFER_OFFSET (buffer) != GST_BUFFER_OFFSET_NONE) {
GST_LOG_OBJECT (mux, "Adjusting buffer offset from %" G_GINT64_FORMAT
" to %" G_GINT64_FORMAT, GST_BUFFER_OFFSET (buffer),
GST_BUFFER_OFFSET (buffer) + mux->start_tag_size);
GST_BUFFER_OFFSET (buffer) += mux->start_tag_size;
}
length = GST_BUFFER_SIZE (buffer);
gst_buffer_set_caps (buffer, GST_PAD_CAPS (mux->srcpad));
ret = gst_pad_push (mux->srcpad, buffer);
mux->current_offset += length;
mux->max_offset = MAX (mux->max_offset, mux->current_offset);
return ret;
}
static gboolean
gst_tag_mux_sink_event (GstPad * pad, GstEvent * event)
{
GstTagMux *mux;
gboolean result;
mux = GST_TAG_MUX (gst_pad_get_parent (pad));
result = FALSE;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_TAG:{
GstTagList *tags;
gst_event_parse_tag (event, &tags);
GST_INFO_OBJECT (mux, "Got tag event: %" GST_PTR_FORMAT, tags);
if (mux->event_tags != NULL) {
gst_tag_list_insert (mux->event_tags, tags, GST_TAG_MERGE_REPLACE);
} else {
mux->event_tags = gst_tag_list_copy (tags);
}
GST_INFO_OBJECT (mux, "Event tags are now: %" GST_PTR_FORMAT,
mux->event_tags);
/* just drop the event, we'll push a new tag event in render_start_tag */
gst_event_unref (event);
result = TRUE;
break;
}
case GST_EVENT_NEWSEGMENT:{
GstFormat fmt;
gint64 start;
gst_event_parse_new_segment (event, NULL, NULL, &fmt, &start, NULL, NULL);
if (fmt != GST_FORMAT_BYTES) {
GST_WARNING_OBJECT (mux, "dropping newsegment event in %s format",
gst_format_get_name (fmt));
gst_event_unref (event);
break;
}
if (mux->render_start_tag) {
/* we have not rendered the tag yet, which means that we don't know
* how large it is going to be yet, so we can't adjust the offsets
* here at this point and need to cache the newsegment event for now
* (also, there could be tag events coming after this newsegment event
* and before the first buffer). */
if (mux->newsegment_ev) {
GST_WARNING_OBJECT (mux, "discarding old cached newsegment event");
gst_event_unref (mux->newsegment_ev);
}
GST_LOG_OBJECT (mux, "caching newsegment event for later");
mux->newsegment_ev = event;
} else {
GST_DEBUG_OBJECT (mux, "got newsegment event, adjusting offsets");
gst_pad_push_event (mux->srcpad,
gst_tag_mux_adjust_event_offsets (mux, event));
gst_event_unref (event);
mux->current_offset = start;
mux->max_offset = MAX (mux->max_offset, mux->current_offset);
}
event = NULL;
result = TRUE;
break;
}
case GST_EVENT_EOS:{
if (mux->render_end_tag) {
GstFlowReturn ret;
GST_INFO_OBJECT (mux, "Adding tags to stream");
ret = gst_tag_mux_render_end_tag (mux);
if (ret != GST_FLOW_OK) {
GST_DEBUG_OBJECT (mux, "flow: %s", gst_flow_get_name (ret));
return ret;
}
mux->render_end_tag = FALSE;
}
/* Now forward EOS */
result = gst_pad_event_default (pad, event);
break;
}
default:
result = gst_pad_event_default (pad, event);
break;
}
gst_object_unref (mux);
return result;
}
static GstStateChangeReturn
gst_tag_mux_change_state (GstElement * element, GstStateChange transition)
{
GstTagMux *mux;
GstStateChangeReturn result;
mux = GST_TAG_MUX (element);
result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
if (result != GST_STATE_CHANGE_SUCCESS) {
return result;
}
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:{
if (mux->newsegment_ev) {
gst_event_unref (mux->newsegment_ev);
mux->newsegment_ev = NULL;
}
if (mux->event_tags) {
gst_tag_list_free (mux->event_tags);
mux->event_tags = NULL;
}
mux->start_tag_size = 0;
mux->end_tag_size = 0;
mux->render_start_tag = TRUE;
mux->render_end_tag = TRUE;
mux->current_offset = 0;
mux->max_offset = 0;
break;
}
default:
break;
}
return result;
}

View file

@ -1,79 +0,0 @@
/* GStreamer tag muxer base class
*
* Copyright (C) 2006 Christophe Fergeau <teuf@gnome.org>
* Copyright (C) 2006 Tim-Philipp Müller <tim centricular net>
* Copyright (C) 2009 Pioneers of the Inevitable <songbird@songbirdnest.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef GST_TAG_MUX_H
#define GST_TAG_MUX_H
#include <gst/gst.h>
G_BEGIN_DECLS
typedef struct _GstTagMux GstTagMux;
typedef struct _GstTagMuxClass GstTagMuxClass;
/* Definition of structure storing data for this element. */
struct _GstTagMux {
GstElement element;
GstPad *srcpad;
GstPad *sinkpad;
GstTagList *event_tags; /* tags received from upstream elements */
GstTagList *final_tags; /* Final set of tags used for muxing */
gsize start_tag_size;
gsize end_tag_size;
gboolean render_start_tag;
gboolean render_end_tag;
gint64 current_offset;
gint64 max_offset;
GstEvent *newsegment_ev; /* cached newsegment event from upstream */
};
/* Standard definition defining a class for this element. */
struct _GstTagMuxClass {
GstElementClass parent_class;
/* vfuncs */
GstBuffer * (*render_start_tag) (GstTagMux * mux, GstTagList * tag_list);
GstBuffer * (*render_end_tag) (GstTagMux * mux, GstTagList * tag_list);
};
/* Standard macros for defining types for this element. */
#define GST_TYPE_TAG_MUX \
(gst_tag_mux_get_type())
#define GST_TAG_MUX(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_TAG_MUX,GstTagMux))
#define GST_TAG_MUX_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_TAG_MUX,GstTagMuxClass))
#define GST_IS_TAG_MUX(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_TAG_MUX))
#define GST_IS_TAG_MUX_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_TAG_MUX))
/* Standard function returning type information. */
GType gst_tag_mux_get_type (void);
G_END_DECLS
#endif

View file

@ -1132,7 +1132,7 @@ foreach_add_tag (const GstTagList * list, const gchar * tag, gpointer userdata)
} }
GstBuffer * GstBuffer *
id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist, int version) id3_mux_render_v2_tag (GstTagMux * mux, const GstTagList * taglist, int version)
{ {
GstId3v2Tag tag; GstId3v2Tag tag;
GstBuffer *buf; GstBuffer *buf;
@ -1159,7 +1159,6 @@ id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist, int version)
/* Create buffer with tag */ /* Create buffer with tag */
buf = id3v2_tag_to_buffer (&tag); buf = id3v2_tag_to_buffer (&tag);
gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad));
GST_LOG_OBJECT (mux, "tag size = %d bytes", GST_BUFFER_SIZE (buf)); GST_LOG_OBJECT (mux, "tag size = %d bytes", GST_BUFFER_SIZE (buf));
id3v2_tag_unset (&tag); id3v2_tag_unset (&tag);
@ -1285,7 +1284,7 @@ static const struct
}; };
GstBuffer * GstBuffer *
id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist) id3_mux_render_v1_tag (GstTagMux * mux, const GstTagList * taglist)
{ {
GstBuffer *buf = gst_buffer_new_and_alloc (ID3_V1_TAG_SIZE); GstBuffer *buf = gst_buffer_new_and_alloc (ID3_V1_TAG_SIZE);
guint8 *data = GST_BUFFER_DATA (buf); guint8 *data = GST_BUFFER_DATA (buf);
@ -1312,6 +1311,5 @@ id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist)
return NULL; return NULL;
} }
gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad));
return buf; return buf;
} }

View file

@ -17,16 +17,18 @@
* Boston, MA 02111-1307, USA. * Boston, MA 02111-1307, USA.
*/ */
#include "gsttagmux.h" #include <gst/tag/gsttagmux.h>
G_BEGIN_DECLS G_BEGIN_DECLS
#define ID3_VERSION_2_3 3 #define ID3_VERSION_2_3 3
#define ID3_VERSION_2_4 4 #define ID3_VERSION_2_4 4
GstBuffer * id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist, GstBuffer * id3_mux_render_v2_tag (GstTagMux * mux,
const GstTagList * taglist,
int version); int version);
GstBuffer * id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist);
GstBuffer * id3_mux_render_v1_tag (GstTagMux * mux,
const GstTagList * taglist);
G_END_DECLS G_END_DECLS

2
gst/inter/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
gstintertest

56
gst/inter/Makefile.am Normal file
View file

@ -0,0 +1,56 @@
plugin_LTLIBRARIES = libgstinter.la
noinst_PROGRAMS = gstintertest
libgstinter_la_SOURCES = \
gstinteraudiosink.c \
gstinteraudiosrc.c \
gstintervideosink.c \
gstintervideosrc.c \
gstinter.c \
gstintersurface.c
noinst_HEADERS = \
gstinteraudiosink.h \
gstinteraudiosrc.h \
gstintervideosink.h \
gstintervideosrc.h \
gstintersurface.h
libgstinter_la_CFLAGS = \
$(GST_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
libgstinter_la_LIBADD = \
$(GST_LIBS) \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@ -lgstaudio-@GST_MAJORMINOR@ \
$(LIBM)
libgstinter_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstinter_la_LIBTOOLFLAGS = --tag=disable-static
gstintertest_SOURCES = \
gstintertest.c
gstintertest_CFLAGS = \
$(GST_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS)
gstintertest_LDADD = \
$(GST_LIBS) \
$(GST_PLUGINS_BASE_LIBS) \
$(LIBM)
Android.mk: Makefile.am $(BUILT_SOURCES)
androgenizer \
-:PROJECT libgstinter -:SHARED libgstinter \
-:TAGS eng debug \
-:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \
-:SOURCES $(libgstinter_la_SOURCES) \
-:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstinter_la_CFLAGS) \
-:LDFLAGS $(libgstinter_la_LDFLAGS) \
$(libgstinter_la_LIBADD) \
-ldl \
-:PASSTHROUGH LOCAL_ARM_MODE:=arm \
LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \
> $@

51
gst/inter/gstinter.c Normal file
View file

@ -0,0 +1,51 @@
/* GStreamer
* Copyright (C) 2011 FIXME <fixme@example.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
* Boston, MA 02110-1335, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstinteraudiosrc.h"
#include "gstinteraudiosink.h"
#include "gstintervideosrc.h"
#include "gstintervideosink.h"
#include "gstintersurface.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
gst_element_register (plugin, "interaudiosrc", GST_RANK_NONE,
GST_TYPE_INTER_AUDIO_SRC);
gst_element_register (plugin, "interaudiosink", GST_RANK_NONE,
GST_TYPE_INTER_AUDIO_SINK);
gst_element_register (plugin, "intervideosrc", GST_RANK_NONE,
GST_TYPE_INTER_VIDEO_SRC);
gst_element_register (plugin, "intervideosink", GST_RANK_NONE,
GST_TYPE_INTER_VIDEO_SINK);
gst_inter_surface_init ();
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"inter",
"plugin for inter-pipeline communication",
plugin_init, VERSION, "LGPL", PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -0,0 +1,342 @@
/* GStreamer
* Copyright (C) 2011 David A. Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
* Boston, MA 02110-1335, USA.
*/
/**
* SECTION:element-gstinteraudiosink
*
* The interaudiosink element does FIXME stuff.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v fakesrc ! interaudiosink ! FIXME ! fakesink
* ]|
* FIXME Describe what the pipeline does.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasesink.h>
#include <gst/audio/audio.h>
#include "gstinteraudiosink.h"
#include <string.h>
GST_DEBUG_CATEGORY_STATIC (gst_inter_audio_sink_debug_category);
#define GST_CAT_DEFAULT gst_inter_audio_sink_debug_category
/* prototypes */
static void gst_inter_audio_sink_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_inter_audio_sink_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_inter_audio_sink_dispose (GObject * object);
static void gst_inter_audio_sink_finalize (GObject * object);
static GstCaps *gst_inter_audio_sink_get_caps (GstBaseSink * sink);
static gboolean gst_inter_audio_sink_set_caps (GstBaseSink * sink,
GstCaps * caps);
static GstFlowReturn gst_inter_audio_sink_buffer_alloc (GstBaseSink * sink,
guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
static void gst_inter_audio_sink_get_times (GstBaseSink * sink,
GstBuffer * buffer, GstClockTime * start, GstClockTime * end);
static gboolean gst_inter_audio_sink_start (GstBaseSink * sink);
static gboolean gst_inter_audio_sink_stop (GstBaseSink * sink);
static gboolean gst_inter_audio_sink_unlock (GstBaseSink * sink);
static gboolean gst_inter_audio_sink_event (GstBaseSink * sink,
GstEvent * event);
static GstFlowReturn gst_inter_audio_sink_preroll (GstBaseSink * sink,
GstBuffer * buffer);
static GstFlowReturn gst_inter_audio_sink_render (GstBaseSink * sink,
GstBuffer * buffer);
static GstStateChangeReturn gst_inter_audio_sink_async_play (GstBaseSink *
sink);
static gboolean gst_inter_audio_sink_activate_pull (GstBaseSink * sink,
gboolean active);
static gboolean gst_inter_audio_sink_unlock_stop (GstBaseSink * sink);
enum
{
PROP_0
};
/* pad templates */
static GstStaticPadTemplate gst_inter_audio_sink_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw-int, "
"endianness = (int) BYTE_ORDER, "
"signed = (boolean) true, "
"width = (int) 16, "
"depth = (int) 16, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]")
);
/* class initialization */
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_inter_audio_sink_debug_category, "interaudiosink", 0, \
"debug category for interaudiosink element");
GST_BOILERPLATE_FULL (GstInterAudioSink, gst_inter_audio_sink, GstBaseSink,
GST_TYPE_BASE_SINK, DEBUG_INIT);
static void
gst_inter_audio_sink_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_inter_audio_sink_sink_template));
gst_element_class_set_details_simple (element_class, "FIXME Long name",
"Generic", "FIXME Description", "FIXME <fixme@example.com>");
}
static void
gst_inter_audio_sink_class_init (GstInterAudioSinkClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS (klass);
gobject_class->set_property = gst_inter_audio_sink_set_property;
gobject_class->get_property = gst_inter_audio_sink_get_property;
gobject_class->dispose = gst_inter_audio_sink_dispose;
gobject_class->finalize = gst_inter_audio_sink_finalize;
base_sink_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_get_caps);
base_sink_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_set_caps);
if (0)
base_sink_class->buffer_alloc =
GST_DEBUG_FUNCPTR (gst_inter_audio_sink_buffer_alloc);
base_sink_class->get_times =
GST_DEBUG_FUNCPTR (gst_inter_audio_sink_get_times);
base_sink_class->start = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_start);
base_sink_class->stop = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_stop);
base_sink_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_unlock);
if (0)
base_sink_class->event = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_event);
//if (0)
base_sink_class->preroll = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_preroll);
base_sink_class->render = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_render);
if (0)
base_sink_class->async_play =
GST_DEBUG_FUNCPTR (gst_inter_audio_sink_async_play);
if (0)
base_sink_class->activate_pull =
GST_DEBUG_FUNCPTR (gst_inter_audio_sink_activate_pull);
base_sink_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_inter_audio_sink_unlock_stop);
}
static void
gst_inter_audio_sink_init (GstInterAudioSink * interaudiosink,
GstInterAudioSinkClass * interaudiosink_class)
{
interaudiosink->sinkpad =
gst_pad_new_from_static_template (&gst_inter_audio_sink_sink_template,
"sink");
interaudiosink->surface = gst_inter_surface_get ("default");
}
void
gst_inter_audio_sink_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
/* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_audio_sink_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
/* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_audio_sink_dispose (GObject * object)
{
/* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (parent_class)->dispose (object);
}
void
gst_inter_audio_sink_finalize (GObject * object)
{
/* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */
/* clean up object here */
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
gst_inter_audio_sink_get_caps (GstBaseSink * sink)
{
return NULL;
}
static gboolean
gst_inter_audio_sink_set_caps (GstBaseSink * sink, GstCaps * caps)
{
return TRUE;
}
static GstFlowReturn
gst_inter_audio_sink_buffer_alloc (GstBaseSink * sink, guint64 offset,
guint size, GstCaps * caps, GstBuffer ** buf)
{
return GST_FLOW_ERROR;
}
static void
gst_inter_audio_sink_get_times (GstBaseSink * sink, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink);
if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) {
*start = GST_BUFFER_TIMESTAMP (buffer);
if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
*end = *start + GST_BUFFER_DURATION (buffer);
} else {
if (interaudiosink->fps_n > 0) {
*end = *start +
gst_util_uint64_scale_int (GST_SECOND, interaudiosink->fps_d,
interaudiosink->fps_n);
}
}
}
}
static gboolean
gst_inter_audio_sink_start (GstBaseSink * sink)
{
return TRUE;
}
static gboolean
gst_inter_audio_sink_stop (GstBaseSink * sink)
{
GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink);
GST_DEBUG ("stop");
g_mutex_lock (interaudiosink->surface->mutex);
gst_adapter_clear (interaudiosink->surface->audio_adapter);
g_mutex_unlock (interaudiosink->surface->mutex);
return TRUE;
}
static gboolean
gst_inter_audio_sink_unlock (GstBaseSink * sink)
{
return TRUE;
}
static gboolean
gst_inter_audio_sink_event (GstBaseSink * sink, GstEvent * event)
{
return TRUE;
}
static GstFlowReturn
gst_inter_audio_sink_preroll (GstBaseSink * sink, GstBuffer * buffer)
{
return GST_FLOW_OK;
}
static GstFlowReturn
gst_inter_audio_sink_render (GstBaseSink * sink, GstBuffer * buffer)
{
GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink);
int n;
GST_DEBUG ("render %d", GST_BUFFER_SIZE (buffer));
g_mutex_lock (interaudiosink->surface->mutex);
n = gst_adapter_available (interaudiosink->surface->audio_adapter) / 4;
if (n > (800 * 2 * 2)) {
GST_INFO ("flushing 800 samples");
gst_adapter_flush (interaudiosink->surface->audio_adapter, 800 * 4);
n -= 800;
}
gst_adapter_push (interaudiosink->surface->audio_adapter,
gst_buffer_ref (buffer));
g_mutex_unlock (interaudiosink->surface->mutex);
return GST_FLOW_OK;
}
static GstStateChangeReturn
gst_inter_audio_sink_async_play (GstBaseSink * sink)
{
return GST_STATE_CHANGE_SUCCESS;
}
static gboolean
gst_inter_audio_sink_activate_pull (GstBaseSink * sink, gboolean active)
{
return TRUE;
}
static gboolean
gst_inter_audio_sink_unlock_stop (GstBaseSink * sink)
{
return TRUE;
}

View file

@ -0,0 +1,58 @@
/* GStreamer
* Copyright (C) 2011 David A. Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_INTER_AUDIO_SINK_H_
#define _GST_INTER_AUDIO_SINK_H_
#include <gst/base/gstbasesink.h>
#include "gstintersurface.h"
G_BEGIN_DECLS
#define GST_TYPE_INTER_AUDIO_SINK (gst_inter_audio_sink_get_type())
#define GST_INTER_AUDIO_SINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_AUDIO_SINK,GstInterAudioSink))
#define GST_INTER_AUDIO_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_AUDIO_SINK,GstInterAudioSinkClass))
#define GST_IS_INTER_AUDIO_SINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_AUDIO_SINK))
#define GST_IS_INTER_AUDIO_SINK_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_AUDIO_SINK))
typedef struct _GstInterAudioSink GstInterAudioSink;
typedef struct _GstInterAudioSinkClass GstInterAudioSinkClass;
struct _GstInterAudioSink
{
GstBaseSink base_interaudiosink;
GstInterSurface *surface;
GstPad *sinkpad;
int fps_n;
int fps_d;
};
struct _GstInterAudioSinkClass
{
GstBaseSinkClass base_interaudiosink_class;
};
GType gst_inter_audio_sink_get_type (void);
G_END_DECLS
#endif

View file

@ -0,0 +1,481 @@
/* GStreamer
* Copyright (C) 2011 David A. Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
* Boston, MA 02110-1335, USA.
*/
/**
* SECTION:element-gstinteraudiosrc
*
* The interaudiosrc element does FIXME stuff.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v fakesrc ! interaudiosrc ! FIXME ! fakesink
* ]|
* FIXME Describe what the pipeline does.
* </refsect2>
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstbasesrc.h>
#include "gstinteraudiosrc.h"
#include <string.h>
GST_DEBUG_CATEGORY_STATIC (gst_inter_audio_src_debug_category);
#define GST_CAT_DEFAULT gst_inter_audio_src_debug_category
/* prototypes */
static void gst_inter_audio_src_set_property (GObject * object,
guint property_id, const GValue * value, GParamSpec * pspec);
static void gst_inter_audio_src_get_property (GObject * object,
guint property_id, GValue * value, GParamSpec * pspec);
static void gst_inter_audio_src_dispose (GObject * object);
static void gst_inter_audio_src_finalize (GObject * object);
static GstCaps *gst_inter_audio_src_get_caps (GstBaseSrc * src);
static gboolean gst_inter_audio_src_set_caps (GstBaseSrc * src, GstCaps * caps);
static gboolean gst_inter_audio_src_negotiate (GstBaseSrc * src);
static gboolean gst_inter_audio_src_newsegment (GstBaseSrc * src);
static gboolean gst_inter_audio_src_start (GstBaseSrc * src);
static gboolean gst_inter_audio_src_stop (GstBaseSrc * src);
static void
gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end);
static gboolean gst_inter_audio_src_is_seekable (GstBaseSrc * src);
static gboolean gst_inter_audio_src_unlock (GstBaseSrc * src);
static gboolean gst_inter_audio_src_event (GstBaseSrc * src, GstEvent * event);
static GstFlowReturn
gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size,
GstBuffer ** buf);
static gboolean gst_inter_audio_src_do_seek (GstBaseSrc * src,
GstSegment * segment);
static gboolean gst_inter_audio_src_query (GstBaseSrc * src, GstQuery * query);
static gboolean gst_inter_audio_src_check_get_range (GstBaseSrc * src);
static void gst_inter_audio_src_fixate (GstBaseSrc * src, GstCaps * caps);
static gboolean gst_inter_audio_src_unlock_stop (GstBaseSrc * src);
static gboolean
gst_inter_audio_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek,
GstSegment * segment);
enum
{
PROP_0
};
/* pad templates */
static GstStaticPadTemplate gst_inter_audio_src_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw-int, "
"endianness = (int) BYTE_ORDER, "
"signed = (boolean) true, "
"width = (int) 16, "
"depth = (int) 16, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]")
);
/* class initialization */
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_inter_audio_src_debug_category, "interaudiosrc", 0, \
"debug category for interaudiosrc element");
GST_BOILERPLATE_FULL (GstInterAudioSrc, gst_inter_audio_src, GstBaseSrc,
GST_TYPE_BASE_SRC, DEBUG_INIT);
static void
gst_inter_audio_src_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_inter_audio_src_src_template));
gst_element_class_set_details_simple (element_class, "FIXME Long name",
"Generic", "FIXME Description", "FIXME <fixme@example.com>");
}
static void
gst_inter_audio_src_class_init (GstInterAudioSrcClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass);
gobject_class->set_property = gst_inter_audio_src_set_property;
gobject_class->get_property = gst_inter_audio_src_get_property;
gobject_class->dispose = gst_inter_audio_src_dispose;
gobject_class->finalize = gst_inter_audio_src_finalize;
base_src_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_src_get_caps);
base_src_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_src_set_caps);
if (0)
base_src_class->negotiate =
GST_DEBUG_FUNCPTR (gst_inter_audio_src_negotiate);
base_src_class->newsegment =
GST_DEBUG_FUNCPTR (gst_inter_audio_src_newsegment);
base_src_class->start = GST_DEBUG_FUNCPTR (gst_inter_audio_src_start);
base_src_class->stop = GST_DEBUG_FUNCPTR (gst_inter_audio_src_stop);
base_src_class->get_times = GST_DEBUG_FUNCPTR (gst_inter_audio_src_get_times);
if (0)
base_src_class->is_seekable =
GST_DEBUG_FUNCPTR (gst_inter_audio_src_is_seekable);
base_src_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_audio_src_unlock);
base_src_class->event = GST_DEBUG_FUNCPTR (gst_inter_audio_src_event);
base_src_class->create = GST_DEBUG_FUNCPTR (gst_inter_audio_src_create);
if (0)
base_src_class->do_seek = GST_DEBUG_FUNCPTR (gst_inter_audio_src_do_seek);
base_src_class->query = GST_DEBUG_FUNCPTR (gst_inter_audio_src_query);
if (0)
base_src_class->check_get_range =
GST_DEBUG_FUNCPTR (gst_inter_audio_src_check_get_range);
base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_inter_audio_src_fixate);
if (0)
base_src_class->unlock_stop =
GST_DEBUG_FUNCPTR (gst_inter_audio_src_unlock_stop);
if (0)
base_src_class->prepare_seek_segment =
GST_DEBUG_FUNCPTR (gst_inter_audio_src_prepare_seek_segment);
}
static void
gst_inter_audio_src_init (GstInterAudioSrc * interaudiosrc,
GstInterAudioSrcClass * interaudiosrc_class)
{
interaudiosrc->srcpad =
gst_pad_new_from_static_template (&gst_inter_audio_src_src_template,
"src");
gst_base_src_set_live (GST_BASE_SRC (interaudiosrc), TRUE);
gst_base_src_set_blocksize (GST_BASE_SRC (interaudiosrc), -1);
interaudiosrc->surface = gst_inter_surface_get ("default");
}
void
gst_inter_audio_src_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
/* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_audio_src_get_property (GObject * object, guint property_id,
GValue * value, GParamSpec * pspec)
{
/* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */
switch (property_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
void
gst_inter_audio_src_dispose (GObject * object)
{
/* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */
/* clean up as possible. may be called multiple times */
G_OBJECT_CLASS (parent_class)->dispose (object);
}
void
gst_inter_audio_src_finalize (GObject * object)
{
/* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */
/* clean up object here */
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstCaps *
gst_inter_audio_src_get_caps (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "get_caps");
return NULL;
}
static gboolean
gst_inter_audio_src_set_caps (GstBaseSrc * src, GstCaps * caps)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
const GstStructure *structure;
gboolean ret;
int sample_rate;
GST_DEBUG_OBJECT (interaudiosrc, "set_caps");
structure = gst_caps_get_structure (caps, 0);
ret = gst_structure_get_int (structure, "rate", &sample_rate);
if (ret) {
interaudiosrc->sample_rate = sample_rate;
}
return ret;
}
static gboolean
gst_inter_audio_src_negotiate (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "negotiate");
return TRUE;
}
static gboolean
gst_inter_audio_src_newsegment (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "newsegment");
return TRUE;
}
static gboolean
gst_inter_audio_src_start (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "start");
return TRUE;
}
static gboolean
gst_inter_audio_src_stop (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "stop");
return TRUE;
}
static void
gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer,
GstClockTime * start, GstClockTime * end)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "get_times");
/* for live sources, sync on the timestamp of the buffer */
if (gst_base_src_is_live (src)) {
GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer);
if (GST_CLOCK_TIME_IS_VALID (timestamp)) {
/* get duration to calculate end time */
GstClockTime duration = GST_BUFFER_DURATION (buffer);
if (GST_CLOCK_TIME_IS_VALID (duration)) {
*end = timestamp + duration;
}
*start = timestamp;
}
} else {
*start = -1;
*end = -1;
}
}
static gboolean
gst_inter_audio_src_is_seekable (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "is_seekable");
return FALSE;
}
static gboolean
gst_inter_audio_src_unlock (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "unlock");
return TRUE;
}
static gboolean
gst_inter_audio_src_event (GstBaseSrc * src, GstEvent * event)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "event");
return TRUE;
}
static GstFlowReturn
gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size,
GstBuffer ** buf)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GstBuffer *buffer;
int n;
GST_DEBUG_OBJECT (interaudiosrc, "create");
buffer = NULL;
g_mutex_lock (interaudiosrc->surface->mutex);
n = gst_adapter_available (interaudiosrc->surface->audio_adapter) / 4;
if (n > 1600 * 2) {
GST_DEBUG ("flushing %d samples", 800);
gst_adapter_flush (interaudiosrc->surface->audio_adapter, 800 * 4);
n -= 800;
}
if (n > 1600)
n = 1600;
if (n > 0) {
buffer = gst_adapter_take_buffer (interaudiosrc->surface->audio_adapter,
n * 4);
}
g_mutex_unlock (interaudiosrc->surface->mutex);
if (n < 1600) {
GstBuffer *newbuf = gst_buffer_new_and_alloc (1600 * 4);
GST_DEBUG ("creating %d samples of silence", 1600 - n);
memset (GST_BUFFER_DATA (newbuf) + n * 4, 0, 1600 * 4 - n * 4);
if (buffer) {
memcpy (GST_BUFFER_DATA (newbuf), GST_BUFFER_DATA (buffer), n * 4);
gst_buffer_unref (buffer);
}
buffer = newbuf;
}
n = 1600;
GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples;
GST_BUFFER_OFFSET_END (buffer) = interaudiosrc->n_samples + n;
GST_BUFFER_TIMESTAMP (buffer) =
gst_util_uint64_scale_int (interaudiosrc->n_samples, GST_SECOND,
interaudiosrc->sample_rate);
GST_DEBUG_OBJECT (interaudiosrc, "create ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
GST_BUFFER_DURATION (buffer) =
gst_util_uint64_scale_int (interaudiosrc->n_samples + n, GST_SECOND,
interaudiosrc->sample_rate) - GST_BUFFER_TIMESTAMP (buffer);
GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples;
GST_BUFFER_OFFSET_END (buffer) = -1;
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
if (interaudiosrc->n_samples == 0) {
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
}
gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_BASE_SRC_PAD (interaudiosrc)));
interaudiosrc->n_samples += n;
*buf = buffer;
return GST_FLOW_OK;
}
static gboolean
gst_inter_audio_src_do_seek (GstBaseSrc * src, GstSegment * segment)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "do_seek");
return FALSE;
}
static gboolean
gst_inter_audio_src_query (GstBaseSrc * src, GstQuery * query)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "query");
return TRUE;
}
static gboolean
gst_inter_audio_src_check_get_range (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "get_range");
return FALSE;
}
static void
gst_inter_audio_src_fixate (GstBaseSrc * src, GstCaps * caps)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GstStructure *structure;
structure = gst_caps_get_structure (caps, 0);
GST_DEBUG_OBJECT (interaudiosrc, "fixate");
gst_structure_fixate_field_nearest_int (structure, "channels", 2);
gst_structure_fixate_field_nearest_int (structure, "rate", 48000);
}
static gboolean
gst_inter_audio_src_unlock_stop (GstBaseSrc * src)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "stop");
return TRUE;
}
static gboolean
gst_inter_audio_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek,
GstSegment * segment)
{
GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src);
GST_DEBUG_OBJECT (interaudiosrc, "seek_segment");
return FALSE;
}

View file

@ -0,0 +1,57 @@
/* GStreamer
* Copyright (C) 2011 David A. Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_INTER_AUDIO_SRC_H_
#define _GST_INTER_AUDIO_SRC_H_
#include <gst/base/gstbasesrc.h>
#include "gstintersurface.h"
G_BEGIN_DECLS
#define GST_TYPE_INTER_AUDIO_SRC (gst_inter_audio_src_get_type())
#define GST_INTER_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_AUDIO_SRC,GstInterAudioSrc))
#define GST_INTER_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_AUDIO_SRC,GstInterAudioSrcClass))
#define GST_IS_INTER_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_AUDIO_SRC))
#define GST_IS_INTER_AUDIO_SRC_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_AUDIO_SRC))
typedef struct _GstInterAudioSrc GstInterAudioSrc;
typedef struct _GstInterAudioSrcClass GstInterAudioSrcClass;
struct _GstInterAudioSrc
{
GstBaseSrc base_interaudiosrc;
GstPad *srcpad;
GstInterSurface *surface;
guint64 n_samples;
int sample_rate;
};
struct _GstInterAudioSrcClass
{
GstBaseSrcClass base_interaudiosrc_class;
};
GType gst_inter_audio_src_get_type (void);
G_END_DECLS
#endif

View file

@ -0,0 +1,42 @@
/* GStreamer
* Copyright (C) 2011 FIXME <fixme@example.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin Street, Suite 500,
* Boston, MA 02110-1335, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstintersurface.h"
static GstInterSurface *surface;
GstInterSurface *
gst_inter_surface_get (const char *name)
{
return surface;
}
void
gst_inter_surface_init (void)
{
surface = g_malloc0 (sizeof (GstInterSurface));
surface->mutex = g_mutex_new ();
surface->audio_adapter = gst_adapter_new ();
}

Some files were not shown because too many files have changed in this diff Show more