diff --git a/.gitignore b/.gitignore index dd2bbc8aed..899464ac00 100644 --- a/.gitignore +++ b/.gitignore @@ -49,3 +49,4 @@ gst*orc.h /tests/check/orc /tests/examples/shapewipe/shapewipe-example /tests/examples/jack/jack_client +/tests/examples/opencv/gstmotioncells_dynamic_test diff --git a/common b/common index 50b34abb46..605cd9a65e 160000 --- a/common +++ b/common @@ -1 +1 @@ -Subproject commit 50b34abb468b6572a92f6700552f6f541c655be8 +Subproject commit 605cd9a65ed61505f24b840d3fe8e252be72b151 diff --git a/configure.ac b/configure.ac index 82ebfe299a..8213cfd6f3 100644 --- a/configure.ac +++ b/configure.ac @@ -339,6 +339,7 @@ AG_GST_CHECK_PLUGIN(h264parse) AG_GST_CHECK_PLUGIN(hdvparse) AG_GST_CHECK_PLUGIN(hls) AG_GST_CHECK_PLUGIN(id3tag) +AG_GST_CHECK_PLUGIN(inter) AG_GST_CHECK_PLUGIN(interlace) AG_GST_CHECK_PLUGIN(ivfparse) AG_GST_CHECK_PLUGIN(jp2kdecimator) @@ -480,16 +481,15 @@ AG_GST_CHECK_FEATURE(DIRECT3D, [Direct3D plug-in], direct3dsink, [ save_LIBS="$LIBS" CFLAGS="$CFLAGS $DIRECTX_CFLAGS" LDFLAGS="$LDFLAGS $DIRECTX_LDFLAGS" - LIBS="$LIBS -ld3d -lgdi32" + LIBS="$LIBS -ld3d9 -lgdi32" AC_MSG_CHECKING(for Direct3D LDFLAGS) AC_LINK_IFELSE([ #include -#include +#include int main () { - GetStockObject(0); - Direct3DCreate(NULL, NULL, NULL); + Direct3DCreate9(D3D_SDK_VERSION); return 0; } @@ -502,8 +502,7 @@ int main () LIBS=$save_LIBS if test "x$HAVE_DIRECT3D" = "xyes"; then - dnl this is much more than we want - DIRECT3D_LIBS="-ld3d -ldxguid -lgdi32" + DIRECT3D_LIBS="-lgdi32" AC_SUBST(DIRECT3D_LIBS) fi AC_SUBST(HAVE_DIRECT3D) @@ -1403,6 +1402,19 @@ AG_GST_CHECK_FEATURE(OPENCV, [opencv plugins], opencv, [ AC_SUBST(OPENCV_LIBS) ]) +dnl *** Opus *** +translit(dnm, m, l) AM_CONDITIONAL(USE_OPUS, true) +AG_GST_CHECK_FEATURE(OPUS, [opus], opus, [ + PKG_CHECK_MODULES(OPUS, opus >= 0.9.4, [ + AC_DEFINE([HAVE_OPUS], 1, [Define if Opus >= 0.9.4 is installed]) + HAVE_OPUS="yes" + ], [ + HAVE_OPUS="no" + ]) + AC_SUBST(OPUS_CFLAGS) + AC_SUBST(OPUS_LIBS) +]) + dnl *** rsvg *** translit(dnm, m, l) AM_CONDITIONAL(USE_RSVG, true) AG_GST_CHECK_FEATURE(RSVG, [rsvg decoder], rsvg, [ @@ -1605,7 +1617,9 @@ translit(dnm, m, l) AM_CONDITIONAL(USE_WININET, true) AG_GST_CHECK_FEATURE(WININET, [Windows internet library], wininet, [ AC_MSG_CHECKING([Checking for windows internet support]) AC_CHECK_HEADERS([windows.h wininet.h], - [HAVE_WININET="yes"], [HAVE_WININET="no"]) + [HAVE_WININET="yes"], [HAVE_WININET="no"], + [AC_INCLUDES_DEFAULT +#include ]) ]) dnl *** acm *** @@ -1766,6 +1780,7 @@ AM_CONDITIONAL(USE_NEON, false) AM_CONDITIONAL(USE_OFA, false) AM_CONDITIONAL(USE_OPENAL, false) AM_CONDITIONAL(USE_OPENCV, false) +AM_CONDITIONAL(USE_OPUS, false) AM_CONDITIONAL(USE_RSVG, false) AM_CONDITIONAL(USE_TIMIDITY, false) AM_CONDITIONAL(USE_WILDMIDI, false) @@ -1890,6 +1905,7 @@ gst/h264parse/Makefile gst/hdvparse/Makefile gst/hls/Makefile gst/id3tag/Makefile +gst/inter/Makefile gst/interlace/Makefile gst/ivfparse/Makefile gst/jp2kdecimator/Makefile @@ -1972,6 +1988,7 @@ tests/examples/camerabin2/Makefile tests/examples/directfb/Makefile tests/examples/mxf/Makefile tests/examples/scaletempo/Makefile +tests/examples/opencv/Makefile tests/icles/Makefile ext/voamrwbenc/Makefile ext/voaacenc/Makefile @@ -2009,6 +2026,7 @@ ext/neon/Makefile ext/ofa/Makefile ext/openal/Makefile ext/opencv/Makefile +ext/opus/Makefile ext/rsvg/Makefile ext/resindvd/Makefile ext/rtmp/Makefile diff --git a/docs/plugins/Makefile.am b/docs/plugins/Makefile.am index e6bbb5c5fa..4f8f0cfcd9 100644 --- a/docs/plugins/Makefile.am +++ b/docs/plugins/Makefile.am @@ -139,6 +139,7 @@ EXTRA_HFILES = \ $(top_srcdir)/gst/audiovisualizers/gstsynaescope.h \ $(top_srcdir)/gst/audiovisualizers/gstwavescope.h \ $(top_srcdir)/gst/camerabin/gstcamerabin.h \ + $(top_srcdir)/gst/camerabin2/gstcamerabin2.h \ $(top_srcdir)/gst/coloreffects/gstcoloreffects.h \ $(top_srcdir)/gst/dataurisrc/gstdataurisrc.h \ $(top_srcdir)/gst/dccp/gstdccpclientsink.h \ diff --git a/docs/plugins/gst-plugins-bad-plugins-docs.sgml b/docs/plugins/gst-plugins-bad-plugins-docs.sgml index 17acb5739b..07fcdaa261 100644 --- a/docs/plugins/gst-plugins-bad-plugins-docs.sgml +++ b/docs/plugins/gst-plugins-bad-plugins-docs.sgml @@ -24,6 +24,7 @@ + @@ -89,7 +90,9 @@ - + + + @@ -140,6 +143,7 @@ + @@ -196,6 +200,7 @@ + diff --git a/docs/plugins/gst-plugins-bad-plugins-sections.txt b/docs/plugins/gst-plugins-bad-plugins-sections.txt index 5efe2c1166..29315d4af2 100644 --- a/docs/plugins/gst-plugins-bad-plugins-sections.txt +++ b/docs/plugins/gst-plugins-bad-plugins-sections.txt @@ -27,34 +27,6 @@ GstAiffParseState gst_aiff_parse_get_type -
-element-voaacenc -voaacenc -GstVoAacEnc - -GstVoAacEncClass -GST_VOAACENC -GST_VOAACENC_CLASS -GST_IS_VOAACENC -GST_IS_VOAACENC_CLASS -GST_TYPE_VOAACENC -gst_voaacenc_get_type -
- -
-element-voamrwbenc -voamrwbenc -GstVoAmrwbEnc - -GstVoAmrwbEncClass -GST_VOAMRWBENC -GST_VOAMRWBENC_CLASS -GST_IS_VOAMRWBENC -GST_IS_VOAMRWBENC_CLASS -GST_TYPE_VOAMRWBENC -gst_voamrwbenc_get_type -
-
element-assrender assrender @@ -143,6 +115,20 @@ GST_IS_CAMERABIN_CLASS gst_camerabin_get_type
+
+element-camerabin2 +camerabin2 +GstCameraBin2 + +GstCameraBin2Class +GST_CAMERABIN2 +GST_IS_CAMERABIN2 +GST_TYPE_CAMERABIN2 +GST_CAMERABIN2_CLASS +GST_IS_CAMERABIN2_CLASS +gst_camerabin2_get_type +
+
element-celtdec celtdec @@ -1160,6 +1146,34 @@ GST_TYPE_RSVG_DEC gst_rsvg_dec_get_type
+
+element-rtmpsink +rtmpsink +GstRTMPSink + +GstRTMPSinkClass +GST_RTMP_SINK +GST_IS_RTMP_SINK +GST_TYPE_RTMP_SINK +gst_rtmp_sink_get_type +GST_RTMP_SINK_CLASS +GST_IS_RTMP_SINK_CLASS +
+ +
+element-rtmpsrc +rtmpsrc +GstRTMPSrc + +GstRTMPSrcClass +GST_RTMP_SRC +GST_IS_RTMP_SRC +GST_TYPE_RTMP_SRC +gst_rtmp_src_get_type +GST_RTMP_SRC_CLASS +GST_IS_RTMP_SRC_CLASS +
+
element-rtpdtmfdepay rtpdtmfdepay @@ -1639,6 +1653,34 @@ GST_IS_WILDMIDI_CLASS GST_TYPE_WILDMIDI
+
+element-voaacenc +voaacenc +GstVoAacEnc + +GstVoAacEncClass +GST_VOAACENC +GST_VOAACENC_CLASS +GST_IS_VOAACENC +GST_IS_VOAACENC_CLASS +GST_TYPE_VOAACENC +gst_voaacenc_get_type +
+ +
+element-voamrwbenc +voamrwbenc +GstVoAmrwbEnc + +GstVoAmrwbEncClass +GST_VOAMRWBENC +GST_VOAMRWBENC_CLASS +GST_IS_VOAMRWBENC +GST_IS_VOAMRWBENC_CLASS +GST_TYPE_VOAMRWBENC +gst_voamrwbenc_get_type +
+
element-vp8dec vp8dec diff --git a/docs/plugins/gst-plugins-bad-plugins.args b/docs/plugins/gst-plugins-bad-plugins.args index f0778aac54..2640dd488c 100644 --- a/docs/plugins/gst-plugins-bad-plugins.args +++ b/docs/plugins/gst-plugins-bad-plugins.args @@ -1701,7 +1701,7 @@ GstDvbSrc::diseqc-source gint -[-1,7] +[G_MAXULONG,7] rw diseqc source DISEqC selected source (-1 disabled) (DVB-S). @@ -17155,7 +17155,7 @@ rw Path where to search for RealPlayer codecs Path where to search for RealPlayer codecs. -"/usr/lib/win32:/usr/lib/codecs:/usr/local/RealPlayer/codecs:/usr/local/lib/win32:/usr/local/lib/codecs" +"/usr/lib64/win32:/usr/lib64/codecs:/usr/local/lib64/win32:/usr/local/lib64/codecs" @@ -17195,7 +17195,7 @@ rw Path where to search for RealPlayer codecs Path where to search for RealPlayer codecs. -"/usr/lib/win32:/usr/lib/codecs:/usr/local/RealPlayer/codecs:/usr/local/lib/win32:/usr/local/lib/codecs" +"/usr/lib64/win32:/usr/lib64/codecs:/usr/local/lib64/win32:/usr/local/lib64/codecs" @@ -17851,7 +17851,7 @@ DvbBaseBin::diseqc-source gint -[-1,7] +[G_MAXULONG,7] rw diseqc source DISEqC selected source (-1 disabled) (DVB-S). @@ -22026,7 +22026,7 @@ GstDCCPClientSrc::sockfd gint ->= -1 +>= G_MAXULONG rw Socket fd The socket file descriptor. @@ -22066,7 +22066,7 @@ GstDCCPServerSink::sockfd gint ->= -1 +>= G_MAXULONG rw Socket fd The client socket file descriptor. @@ -22126,7 +22126,7 @@ GstDCCPClientSink::sockfd gint ->= -1 +>= G_MAXULONG rw Socket fd The socket file descriptor. @@ -22186,7 +22186,7 @@ GstDCCPServerSrc::sockfd gint ->= -1 +>= G_MAXULONG rw Socket fd The client socket file descriptor. @@ -22246,7 +22246,7 @@ GstMpegTSDemux::program-number gint ->= -1 +>= G_MAXULONG rw Program Number Program number to demux for (-1 to ignore). @@ -22306,7 +22306,7 @@ GstPcapParse::dst-port gint -[-1,65535] +[G_MAXULONG,65535] rw Destination port Destination port to restrict to. @@ -22326,7 +22326,7 @@ GstPcapParse::src-port gint -[-1,65535] +[G_MAXULONG,65535] rw Source port Source port to restrict to. @@ -23356,7 +23356,7 @@ GstRTPDTMFSrc::seqnum-offset gint ->= -1 +>= G_MAXULONG rw Sequence number Offset Offset to add to all outgoing seqnum (-1 = random). @@ -23386,7 +23386,7 @@ GstRTPDTMFSrc::timestamp-offset gint ->= -1 +>= G_MAXULONG rw Timestamp Offset Offset to add to all outgoing timestamps (-1 = random). @@ -23436,7 +23436,7 @@ GstRTPMux::seqnum-offset gint ->= -1 +>= G_MAXULONG rw Sequence number Offset Offset to add to all outgoing seqnum (-1 = random). @@ -23456,7 +23456,7 @@ GstRTPMux::timestamp-offset gint ->= -1 +>= G_MAXULONG rw Timestamp Offset Offset to add to all outgoing timestamps (-1 = random). @@ -27930,7 +27930,7 @@ rw bitrate bitrate. -0 +13824000 @@ -28030,7 +28030,7 @@ rw enable_multiquant enable_multiquant. -FALSE +TRUE @@ -28116,11 +28116,11 @@ GstSchroEnc::horiz-slices gint ->= 0 +>= 1 rw horiz_slices horiz_slices. -0 +8 @@ -28130,7 +28130,7 @@ rw inter_wavelet inter_wavelet. -desl_dubuc_9_7 +le_gall_5_3 @@ -28220,7 +28220,7 @@ rw magic_chroma_lambda_scale magic_chroma_lambda_scale. -0.1 +0.01 @@ -28320,7 +28320,7 @@ rw magic_scene_change_threshold magic_scene_change_threshold. -3 +0.2 @@ -28490,17 +28490,17 @@ rw transform_depth transform_depth. -3 +4 GstSchroEnc::vert-slices gint ->= 0 +>= 1 rw vert_slices vert_slices. -0 +6 @@ -46096,7 +46096,7 @@ GstVideoMaxRate::average-period guint64 -[1,G_MAXINT64] +[1,G_MAXLONG] rw Period over which to average Period over which to average the framerate (in ns). @@ -46946,7 +46946,7 @@ GstJP2kDecimator::max-decomposition-levels gint -[-1,32] +[G_MAXULONG,32] rw Maximum Number of Decomposition Levels Maximum number of decomposition levels to keep (-1 == all). @@ -47336,7 +47336,7 @@ GstTSDemux::program-number gint ->= -1 +>= G_MAXULONG rw Program number Program Number to demux for (-1 to ignore). @@ -47779,8 +47779,8 @@ rw Location -Location to save the captured files. A %d might be used on thefilename as a placeholder for a numeric index of the capture.Default for images is img_%d and vid_%d for videos. -"img_%d" +Location to save the captured files. A %d might be used on thefilename as a placeholder for a numeric index of the capture.Default is cap_%d. +"cap_%d" @@ -57493,3 +57493,163 @@ FALSE + +GstCompare::meta +GstBufferCopyFlags + +rw +Compare Meta +Indicates which metadata should be compared. +GST_BUFFER_COPY_FLAGS|GST_BUFFER_COPY_TIMESTAMPS|GST_BUFFER_COPY_CAPS + + + +GstCompare::method +GstCompareMethod + +rw +Content Compare Method +Method to compare buffer content. +Memory + + + +GstCompare::offset-ts +gboolean + +rw +Offsets Timestamps +Consider OFFSET and OFFSET_END part of timestamp metadata. +FALSE + + + +GstCompare::threshold +gdouble +>= 0 +rw +Content Threshold +Threshold beyond which to consider content different as determined by content-method. +0 + + + +GstCompare::upper +gboolean + +rw +Threshold Upper Bound +Whether threshold value is upper bound or lower bound for difference measure. +TRUE + + + +GstOpenalSrc::device +gchar* + +rw +Device +Specific capture device to open, NULL indicate default device. +NULL + + + +GstOpenalSrc::device-name +gchar* + +r +Device name +Readable name of device. +NULL + + + +GstOpenALSink::context-handle +gpointer + +rw +ALCcontext +Custom playback context. + + + + +GstOpenALSink::device +gchar* + +rw +Device +OpenAL device string. +NULL + + + +GstOpenALSink::device-handle +gpointer + +rw +ALCdevice +Custom playback device. + + + + +GstOpenALSink::device-name +gchar* + +r +Device name +Opened OpenAL device name. +"" + + + +GstOpenALSink::source-id +guint + +rw +Source ID +Custom playback sID. +0 + + + +GstRTMPSink::location +gchar* + +rw +File Location +Location of the file to read. +NULL + + + +GstDecklinkSrc::connection +GstDecklinkConnection + +rwx +Connection +Connection. +sdi + + + +GstDecklinkSrc::mode +GstDecklinkModes + +rwx +Mode +Mode. +ntsc + + + +GstDecklinkSink::mode +GstDecklinkModes + +rwx +Mode +Mode. +ntsc + + diff --git a/docs/plugins/gst-plugins-bad-plugins.hierarchy b/docs/plugins/gst-plugins-bad-plugins.hierarchy index b7e35c17ca..ef9c6f37b6 100644 --- a/docs/plugins/gst-plugins-bad-plugins.hierarchy +++ b/docs/plugins/gst-plugins-bad-plugins.hierarchy @@ -1,4 +1,5 @@ GObject + GstAdapter GstColorBalanceChannel GstObject GstBus @@ -34,21 +35,26 @@ GObject GstBaseAudioSink GstAudioSink GstApExSink + GstNasSink GstSDLAudioSink GstChecksumSink - GstCurlSink GstDCCPClientSink GstDCCPServerSink GstFBDEVSink + GstInterAudioSink + GstInterVideoSink GstLinsysSdiSink GstSFSink GstShmSink GstVideoSink + GstDfbVideoSink GstSDLVideoSink - VdpSink GstBaseSrc GstDTMFSrc GstDataURISrc + GstFliteTestSrc + GstInterAudioSrc + GstInterVideoSrc GstLinsysSdiSrc GstPushSrc GstDCCPClientSrc @@ -56,8 +62,6 @@ GObject GstDc1394 GstDvbSrc GstMMS - GstNeonhttpSrc - GstRTMPSrc GstRfbSrc GstShmSrc GstVCDSrc @@ -109,23 +113,11 @@ GObject GstMirror GstRotate GstSquare - GstOpencvVideoFilter - GstCvDilateErode - GstCvDilate - GstCvErode - GstCvEqualizeHist - GstCvLaplace - GstCvSmooth - GstCvSobel - Gstfacedetect GstRsvgOverlay GstSolarize - GstVideo3DConvert - GstVideo3DPresent GstVideoAnalyse GstVideoDetect GstVideoMark - GstZBar GstVideoFilter2 GstSceneChange GstZebraStripe @@ -133,11 +125,9 @@ GObject GstBaseVideoCodec GstBaseVideoDecoder GstSchroDec - GstVP8Dec GstBaseVideoEncoder GstDiracEnc GstSchroEnc - GstVP8Enc GstBin DvbBaseBin GstAutoConvert @@ -146,12 +136,6 @@ GObject GstWrapperCameraBinSrc GstFPSDisplaySink GstFaceOverlay - GstGSettingsSwitchSink - GstGSettingsAudioSink - GstGSettingsVideoSink - GstGSettingsSwitchSrc - GstGSettingsAudioSrc - GstGSettingsVideoSrc GstPipeline GstCameraBin GstCameraBin2 @@ -165,12 +149,12 @@ GObject GstCeltDec GstCeltEnc GstChopMyData + GstCompare GstDVBSubOverlay GstDVDSpu GstDecklinkSink GstDecklinkSrc GstDtsDec - GstFaac GstFaad GstFestival GstFieldAnalysis @@ -182,7 +166,6 @@ GObject GstId3BaseMux GstId3Mux GstInterlace - GstInvtelecine GstIvfParse GstJP2kDecimator GstJifMux @@ -196,16 +179,16 @@ GObject GstMSE GstMXFDemux GstMXFMux + GstMimDec + GstMimEnc GstModPlug GstMpegPSDemux GstMpegTSDemux - GstMplex GstMusepackDec GstMveDemux GstMveMux GstNsfDec GstNuvDemux - GstOpencvTextOverlay GstPcapParse GstPitch GstPnmdec @@ -225,494 +208,27 @@ GObject GstAudioSegmentClip GstVideoSegmentClip GstSignalProcessor - calf-sourceforge-net-plugins-BassEnhancer - calf-sourceforge-net-plugins-Compressor - calf-sourceforge-net-plugins-Deesser - calf-sourceforge-net-plugins-Equalizer12Band - calf-sourceforge-net-plugins-Equalizer5Band - calf-sourceforge-net-plugins-Equalizer8Band - calf-sourceforge-net-plugins-Exciter - calf-sourceforge-net-plugins-Filter - calf-sourceforge-net-plugins-Filterclavier - calf-sourceforge-net-plugins-Flanger - calf-sourceforge-net-plugins-Fluidsynth - calf-sourceforge-net-plugins-Gate - calf-sourceforge-net-plugins-Monosynth - calf-sourceforge-net-plugins-MultiChorus - calf-sourceforge-net-plugins-Multibandcompressor - calf-sourceforge-net-plugins-Organ - calf-sourceforge-net-plugins-Phaser - calf-sourceforge-net-plugins-Pulsator - calf-sourceforge-net-plugins-Reverb - calf-sourceforge-net-plugins-RotarySpeaker - calf-sourceforge-net-plugins-Saturator - calf-sourceforge-net-plugins-Sidechaincompressor - calf-sourceforge-net-plugins-Sidechaingate - calf-sourceforge-net-plugins-VintageDelay - calf-sourceforge-net-plugins-Wavetable - invadarecords-com-plugins-lv2-compressor-mono - invadarecords-com-plugins-lv2-compressor-stereo - invadarecords-com-plugins-lv2-delay-mono - invadarecords-com-plugins-lv2-delay-sum - invadarecords-com-plugins-lv2-erreverb-mono - invadarecords-com-plugins-lv2-erreverb-sum - invadarecords-com-plugins-lv2-filter-hpf-mono - invadarecords-com-plugins-lv2-filter-hpf-stereo - invadarecords-com-plugins-lv2-filter-lpf-mono - invadarecords-com-plugins-lv2-filter-lpf-stereo - invadarecords-com-plugins-lv2-input - invadarecords-com-plugins-lv2-meter - invadarecords-com-plugins-lv2-phaser-mono - invadarecords-com-plugins-lv2-phaser-stereo - invadarecords-com-plugins-lv2-phaser-sum - invadarecords-com-plugins-lv2-testtone - invadarecords-com-plugins-lv2-tube-mono - invadarecords-com-plugins-lv2-tube-stereo - ladspa-AWfilt - ladspa-Accumulate - ladspa-Ambisonics-11-cube-decoder - ladspa-Ambisonics-11-hexagon-decoder - ladspa-Ambisonics-11-mono-panner - ladspa-Ambisonics-11-rotator - ladspa-Ambisonics-11-square-decoder - ladspa-Ambisonics-11-stereo-panner - ladspa-Ambisonics-21-panner - ladspa-Ambisonics-21-rotator - ladspa-Ambisonics-22-panner - ladspa-Ambisonics-22-rotator - ladspa-Ambisonics-31-panner - ladspa-Ambisonics-31-rotator - ladspa-Ambisonics-33-panner - ladspa-Ambisonics-33-rotator - ladspa-AmpIII - ladspa-AmpIV - ladspa-AmpV - ladspa-AmpVTS - ladspa-AutoWah - ladspa-BassEnhancer - ladspa-BoosterM - ladspa-BoosterS - ladspa-CEO - ladspa-CVFreq - ladspa-CabinetI - ladspa-CabinetII - ladspa-Chorus1 - ladspa-Chorus1-2x2 - ladspa-Chorus2 - ladspa-ChorusI - ladspa-ChorusII - ladspa-Click - ladspa-Clip - ladspa-Compress - ladspa-Compressor - ladspa-Deesser - ladspa-Dirac - ladspa-Eq - ladspa-Eq2x2 - ladspa-Equalizer12Band - ladspa-Equalizer5Band - ladspa-Equalizer8Band - ladspa-Exaggerate - ladspa-Exciter - ladspa-Filter - ladspa-Filterclavier - ladspa-Flanger - ladspa-G2reverb - ladspa-Gate - ladspa-HRTF - ladspa-JVRev - ladspa-Lorenz - ladspa-MUSIC - ladspa-MUSICDrum - ladspa-MultiChorus - ladspa-Multibandcompressor - ladspa-Mvchpf-1 - ladspa-Mvclpf-1 - ladspa-Mvclpf-2 - ladspa-Mvclpf-3 - ladspa-Mvclpf-4 - ladspa-NoisifierM - ladspa-NoisifierS - ladspa-PSG - ladspa-Pan - ladspa-Parametric1 - ladspa-Phaser - ladspa-Phaser1 - ladspa-Phaser1+LFO - ladspa-PhaserI - ladspa-PhaserII - ladspa-Plate - ladspa-Plate2x2 - ladspa-PreampIII - ladspa-PreampIV - ladspa-Pulsator - ladspa-Pulse-VCO - ladspa-Rec-VCO - ladspa-Reverb - ladspa-Roessler - ladspa-RotarySpeaker - ladspa-SCC - ladspa-SID - ladspa-Saturator - ladspa-Saw-VCO - ladspa-Scape - ladspa-Sidechaincompressor - ladspa-Sidechaingate - ladspa-Sin - ladspa-SooperLooper - ladspa-StereoChorusI - ladspa-StereoChorusII - ladspa-SweepVFI - ladspa-SweepVFII - ladspa-Sync-Rect-VCO - ladspa-Sync-Saw-VCO - ladspa-Sync-Tri-VCO - ladspa-ToneStack - ladspa-ToneStackLT - ladspa-Transpose - ladspa-Tricardioid-to-AMB - ladspa-TripleChorus - ladspa-VCOd - ladspa-VCOs - ladspa-VariNoiseM - ladspa-VariNoiseS - ladspa-VintageDelay - ladspa-Virtualmic - ladspa-White - ladspa-XShaperM - ladspa-XShaperS - ladspa-adenv - ladspa-adenv-lvl - ladspa-adsr - ladspa-adsr-g+t - ladspa-alias - ladspa-alienwah-mono - ladspa-alienwah-stereo - ladspa-allpass-c - ladspa-allpass-l - ladspa-allpass-n - ladspa-am - ladspa-amPitchshift - ladspa-amp - ladspa-amp-gaia-oa - ladspa-amp-gcia-oa ladspa-amp-mono ladspa-amp-stereo - ladspa-analogue - ladspa-analogueOsc - ladspa-artificialLatency - ladspa-autoPhaser - ladspa-bandpass-a-iir - ladspa-bandpass-iir - ladspa-bf-rotate-z - ladspa-bf2cube - ladspa-bf2quad - ladspa-bf2stereo - ladspa-bodeShifter - ladspa-bodeShifterCV - ladspa-branch-ia-oaoa - ladspa-branch-ic-ococ - ladspa-butthigh-iir - ladspa-buttlow-iir - ladspa-bwxover-iir - ladspa-canyon-delay - ladspa-chebstortion - ladspa-clipper - ladspa-comb - ladspa-comb-c - ladspa-comb-l - ladspa-comb-n - ladspa-combSplitter - ladspa-comp-aa - ladspa-comp-ac - ladspa-compress-peak - ladspa-compress-rms - ladspa-const - ladspa-crossoverDist - ladspa-dahdsr-cg+t-control - ladspa-dahdsr-fexp - ladspa-dahdsr-g+t-audio - ladspa-dahdsr-g+t-control - ladspa-dahdsr-hexp - ladspa-dcRemove - ladspa-decay - ladspa-decimator - ladspa-declip - ladspa-delay-0-01s - ladspa-delay-0-1s - ladspa-delay-1s ladspa-delay-5s - ladspa-delay-60s - ladspa-delay-c - ladspa-delay-l - ladspa-delay-n - ladspa-delayorama - ladspa-difference-iama-oa - ladspa-difference-iamc-oa - ladspa-difference-icma-oa - ladspa-difference-icmc-oc - ladspa-diode - ladspa-disintegrator - ladspa-divider - ladspa-dj-eq - ladspa-dj-eq-mono - ladspa-djFlanger - ladspa-dysonCompress - ladspa-eir - ladspa-encode-bformat - ladspa-encode-fmh - ladspa-expand-peak - ladspa-expand-rms - ladspa-fadDelay - ladspa-fast-xfade - ladspa-fastLookaheadLimiter - ladspa-fbdelay-0-01s - ladspa-fbdelay-0-1s - ladspa-fbdelay-1s - ladspa-fbdelay-5s - ladspa-fbdelay-60s - ladspa-flanger - ladspa-floatNoise - ladspa-fmOsc - ladspa-fmh-rotate-z - ladspa-fmh2bf - ladspa-fmh2oct - ladspa-fmod-fama-oa - ladspa-fmod-famc-oa - ladspa-fmod-fcma-oa - ladspa-fmod-fcmc-oc - ladspa-foldover - ladspa-foo-chop-liver - ladspa-foo-driver - ladspa-foo-limiter - ladspa-foo-limiter-v2 - ladspa-foo-saturator - ladspa-foo-transients - ladspa-foo-transients-mono - ladspa-formant-vc - ladspa-fourByFourPole - ladspa-foverdrive - ladspa-freeverb3 - ladspa-freqTracker - ladspa-gate - ladspa-giantFlange - ladspa-gong - ladspa-gongBeater - ladspa-grain-scatter - ladspa-gsm - ladspa-gverb - ladspa-hard-gate - ladspa-hardLimiter - ladspa-harmonicGen - ladspa-hermesFilter - ladspa-highpass-iir - ladspa-hilbert ladspa-hpf - ladspa-hz-voct-ar - ladspa-hz-voct-cr - ladspa-identity-audio - ladspa-identity-control - ladspa-imp - ladspa-impulse-fc - ladspa-intNoise - ladspa-interpolator - ladspa-inv - ladspa-karaoke - ladspa-lcrDelay - ladspa-leet-equalizer-bw2x2 - ladspa-leet-equalizer-bw2x2-1 - ladspa-lfoPhaser - ladspa-limit-peak - ladspa-limit-rms - ladspa-lofi - ladspa-logistic - ladspa-lowpass-iir - ladspa-lp4pole-faraia-oa - ladspa-lp4pole-fcrcia-oa ladspa-lpf - ladspa-lsFilter - ladspa-matched - ladspa-matrixMSSt - ladspa-matrixSpatialiser - ladspa-matrixStMS - ladspa-mbeq - ladspa-mixer - ladspa-modDelay - ladspa-multivoiceChorus - ladspa-mux-ar - ladspa-mux-cr - ladspa-noise-source-white ladspa-noise-white - ladspa-notch-iir - ladspa-null-ai - ladspa-null-ao - ladspa-null-ci - ladspa-null-co - ladspa-organ - ladspa-peak - ladspa-phasemod - ladspa-pink-full-frequency - ladspa-pink-interpolated-audio - ladspa-pink-sh - ladspa-pitchScale - ladspa-pitchScaleHQ - ladspa-plate - ladspa-pointerCastDistortion - ladspa-power - ladspa-power-cr - ladspa-preamp - ladspa-prob-switch-ar - ladspa-prob-switch-cr - ladspa-product-iaia-oa - ladspa-product-iaic-oa - ladspa-product-icic-oc - ladspa-pulse-fapa-oa - ladspa-pulse-fapc-oa - ladspa-pulse-fcpa-oa - ladspa-pulse-fcpc-oa - ladspa-quantiser100 - ladspa-quantiser20 - ladspa-quantiser50 - ladspa-random-fasa-oa - ladspa-random-fasc-oa - ladspa-random-fcsa-oa - ladspa-random-fcsc-oa - ladspa-range-trans-ar - ladspa-range-trans-cr - ladspa-rateShifter - ladspa-ratio-nada-oa - ladspa-ratio-nadc-oa - ladspa-ratio-ncda-oa - ladspa-ratio-ncdc-oc - ladspa-retroFlange - ladspa-revdelay - ladspa-ringmod-1i1o1l - ladspa-ringmod-2i1o - ladspa-rissetScales - ladspa-rubberband-pitchshifter-mono - ladspa-rubberband-pitchshifter-stereo - ladspa-satanMaximiser - ladspa-sawtooth-fa-oa - ladspa-sawtooth-fc-oa - ladspa-sc1 - ladspa-sc2 - ladspa-sc3 - ladspa-sc4 - ladspa-sc4m - ladspa-se4 - ladspa-sequencer16 - ladspa-sequencer32 - ladspa-sequencer64 - ladspa-sh-ar - ladspa-sh-cr - ladspa-shaper - ladspa-sifter - ladspa-signal-abs-ar - ladspa-signal-abs-cr - ladspa-sinCos ladspa-sine-faaa ladspa-sine-faac ladspa-sine-fcaa ladspa-sine-fcac - ladspa-singlePara - ladspa-sinusWavewrapper - ladspa-sledgehammer - ladspa-slew-limiter-ra - ladspa-slew-limiter-rc - ladspa-slide-ta - ladspa-slide-tc - ladspa-smoothDecimate - ladspa-split - ladspa-square-fa-oa - ladspa-square-fc-oa - ladspa-ssm-masher - ladspa-stepMuxer - ladspa-sum-iaia-oa - ladspa-sum-iaic-oa - ladspa-sum-icic-oc - ladspa-super-60 - ladspa-surroundEncoder - ladspa-svf - ladspa-syncpulse-fapaga-oa - ladspa-syncpulse-fcpcga-oa - ladspa-syncsquare-faga-oa - ladspa-syncsquare-fcga-oa - ladspa-syndrum - ladspa-tap-autopan - ladspa-tap-chorusflanger - ladspa-tap-deesser - ladspa-tap-doubler - ladspa-tap-dynamics-m - ladspa-tap-dynamics-st - ladspa-tap-equalizer - ladspa-tap-equalizer-bw - ladspa-tap-limiter - ladspa-tap-pinknoise - ladspa-tap-pitch - ladspa-tap-reflector - ladspa-tap-reverb - ladspa-tap-rotspeak - ladspa-tap-sigmoid - ladspa-tap-stereo-echo - ladspa-tap-tremolo - ladspa-tap-tubewarmth - ladspa-tap-vibrato - ladspa-tapeDelay - ladspa-track-max-peak - ladspa-track-max-rms - ladspa-track-peak - ladspa-track-rms - ladspa-tracker-gaaadaia-oa - ladspa-tracker-gaacdcia-oa - ladspa-transient - ladspa-triangle-fasa-oa - ladspa-triangle-fasc-oa - ladspa-triangle-fcsa-oa - ladspa-triangle-fcsc-oa - ladspa-trigger - ladspa-triplePara - ladspa-unmatched - ladspa-valve - ladspa-valveRect - ladspa-vcf-bp1 - ladspa-vcf-bp2 - ladspa-vcf-hp - ladspa-vcf-hshelf - ladspa-vcf-lp - ladspa-vcf-lshelf - ladspa-vcf-notch - ladspa-vcf-peakeq - ladspa-vcf-reslp - ladspa-vcf303 - ladspa-vlevel-mono - ladspa-vlevel-stereo - ladspa-vocoder - ladspa-vynil - ladspa-waveTerrain - ladspa-wg-mesh-cr - ladspa-wshape-sine - ladspa-xfade - ladspa-xfade4 - ladspa-zm1 GstSirenDec GstSirenEnc GstSpeed GstSrtEnc GstTRM - GstTemplateMatch - GstTimidity GstTtaDec GstTtaParse GstVMncDec GstVcdParse - GstVdpVideoPostProcess - GstVideo3DMerge - GstXvidDec - GstXvidEnc + GstWildmidi GstY4mDec - Gstedgedetect - Gstfaceblur - Gstpyramidsegment MpegPsMux MpegTSBase GstTSDemux @@ -720,14 +236,7 @@ GObject MpegTSParse MpegTsMux MpegVideoParse - SatBaseVideoDecoder - GstVdpDecoder - GstVdpH264Dec - GstVdpMpeg4Dec - GstVdpMpegDec GstPad - GstVdpOutputSrcPad - GstVdpVideoSrcPad GstPadTemplate GstSignalProcessorPadTemplate GstPlugin @@ -741,7 +250,6 @@ GObject GstTask GstTaskPool GstSignalObject - GstVdpDevice MpegTsPatInfo MpegTsPmtInfo GInterface diff --git a/docs/plugins/gst-plugins-bad-plugins.interfaces b/docs/plugins/gst-plugins-bad-plugins.interfaces index 7a3fcee6e9..c62655fbdb 100644 --- a/docs/plugins/gst-plugins-bad-plugins.interfaces +++ b/docs/plugins/gst-plugins-bad-plugins.interfaces @@ -33,6 +33,7 @@ GstMpeg2enc GstPreset GstMythtvSrc GstURIHandler GstNeonhttpSrc GstURIHandler GstPipeline GstChildProxy +GstRTMPSink GstURIHandler GstRTMPSrc GstURIHandler GstSDLVideoSink GstImplementsInterface GstXOverlay GstNavigation GstSDPDemux GstChildProxy diff --git a/docs/plugins/inspect/plugin-debugutilsbad.xml b/docs/plugins/inspect/plugin-debugutilsbad.xml index 1a28a1460d..a9e8270aa5 100644 --- a/docs/plugins/inspect/plugin-debugutilsbad.xml +++ b/docs/plugins/inspect/plugin-debugutilsbad.xml @@ -51,6 +51,33 @@ + + compare + Compare buffers + Filter/Debug + Compares incoming buffers + Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk> + + + check + sink + always +
ANY
+
+ + sink + sink + always +
ANY
+
+ + src + source + always +
ANY
+
+
+
debugspy DebugSpy diff --git a/docs/plugins/inspect/plugin-decklink.xml b/docs/plugins/inspect/plugin-decklink.xml index 8d6937d75f..a72e850795 100644 --- a/docs/plugins/inspect/plugin-decklink.xml +++ b/docs/plugins/inspect/plugin-decklink.xml @@ -26,7 +26,7 @@ videosink sink always -
video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true
+
video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)24000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)50/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)60000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)60/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)50/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60/1, interlaced=(boolean)false
@@ -47,7 +47,7 @@ videosrc source always -
video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)24000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)false; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)50/1, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60000/1001, interlaced=(boolean)true; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60/1, interlaced=(boolean)true
+
video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)10/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)24000/1001, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)10/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)486, framerate=(fraction)30000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)10/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)720, height=(int)576, framerate=(fraction)25/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)sdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)24/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)25/1, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30/1, interlaced=(boolean)true, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)50/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)30000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1920, height=(int)1080, framerate=(fraction)60/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)50/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60000/1001, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2; video/x-raw-yuv, format=(fourcc)UYVY, width=(int)1280, height=(int)720, framerate=(fraction)60/1, interlaced=(boolean)false, pixel-aspect-ratio=(fraction)12/11, color-matrix=(string)hdtv, chroma-site=(string)mpeg2
diff --git a/docs/plugins/inspect/plugin-dtsdec.xml b/docs/plugins/inspect/plugin-dtsdec.xml index f0043c7881..c79cccb3e3 100644 --- a/docs/plugins/inspect/plugin-dtsdec.xml +++ b/docs/plugins/inspect/plugin-dtsdec.xml @@ -3,7 +3,7 @@ Decodes DTS audio streams ../../ext/dts/.libs/libgstdtsdec.so libgstdtsdec.so - 0.10.19.1 + 0.10.22.1 GPL gst-plugins-bad GStreamer Bad Plug-ins git diff --git a/docs/plugins/inspect/plugin-flite.xml b/docs/plugins/inspect/plugin-flite.xml new file mode 100644 index 0000000000..bf1c4d830b --- /dev/null +++ b/docs/plugins/inspect/plugin-flite.xml @@ -0,0 +1,28 @@ + + flite + Flite speech synthesizer plugin + ../../ext/flite/.libs/libgstflite.so + libgstflite.so + 0.10.22.1 + LGPL + gst-plugins-bad + GStreamer Bad Plug-ins git + Unknown package origin + + + flitetestsrc + Flite speech test source + Source/Audio + Creates audio test signals identifying channels + David Schleef <ds@schleef.org> + + + src + source + always +
audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)48000, channels=(int)[ 1, 8 ]
+
+
+
+
+
\ No newline at end of file diff --git a/docs/plugins/inspect/plugin-inter.xml b/docs/plugins/inspect/plugin-inter.xml new file mode 100644 index 0000000000..ce764a22ba --- /dev/null +++ b/docs/plugins/inspect/plugin-inter.xml @@ -0,0 +1,73 @@ + + inter + plugin for inter-pipeline communication + ../../gst/inter/.libs/libgstinter.so + libgstinter.so + 0.10.22.1 + LGPL + gst-plugins-bad + GStreamer Bad Plug-ins + Unknown package origin + + + interaudiosink + FIXME Long name + Generic + FIXME Description + FIXME <fixme@example.com> + + + sink + sink + always +
audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]
+
+
+
+ + interaudiosrc + FIXME Long name + Generic + FIXME Description + FIXME <fixme@example.com> + + + src + source + always +
audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int)[ 1, 2147483647 ], channels=(int)[ 1, 2 ]
+
+
+
+ + intervideosink + FIXME Long name + Generic + FIXME Description + FIXME <fixme@example.com> + + + sink + sink + always +
video/x-raw-yuv, format=(fourcc)I420, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]
+
+
+
+ + intervideosrc + FIXME Long name + Generic + FIXME Description + FIXME <fixme@example.com> + + + src + source + always +
video/x-raw-yuv, format=(fourcc)I420, width=(int)[ 1, 2147483647 ], height=(int)[ 1, 2147483647 ], framerate=(fraction)[ 0/1, 2147483647/1 ]
+
+
+
+
+
\ No newline at end of file diff --git a/docs/plugins/inspect/plugin-modplug.xml b/docs/plugins/inspect/plugin-modplug.xml index f9cac1f48f..637390beca 100644 --- a/docs/plugins/inspect/plugin-modplug.xml +++ b/docs/plugins/inspect/plugin-modplug.xml @@ -1,12 +1,12 @@ modplug .MOD audio decoding - ../../gst/modplug/.libs/libgstmodplug.so + ../../ext/modplug/.libs/libgstmodplug.so libgstmodplug.so - 0.10.10.1 + 0.10.22.1 LGPL gst-plugins-bad - GStreamer Bad Plug-ins CVS/prerelease + GStreamer Bad Plug-ins git Unknown package origin @@ -26,7 +26,7 @@ src source always -
audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)2; audio/x-raw-int, endianness=(int)1234, signed=(boolean)false, width=(int)8, depth=(int)8, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]
+
audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)32, depth=(int)32, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]; audio/x-raw-int, endianness=(int)1234, signed=(boolean)true, width=(int)16, depth=(int)16, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]; audio/x-raw-int, endianness=(int)1234, signed=(boolean)false, width=(int)8, depth=(int)8, rate=(int){ 8000, 11025, 22050, 44100 }, channels=(int)[ 1, 2 ]
diff --git a/docs/plugins/inspect/plugin-rtmpsrc.xml b/docs/plugins/inspect/plugin-rtmp.xml similarity index 59% rename from docs/plugins/inspect/plugin-rtmpsrc.xml rename to docs/plugins/inspect/plugin-rtmp.xml index c85740938f..7d9ae4fdc9 100644 --- a/docs/plugins/inspect/plugin-rtmpsrc.xml +++ b/docs/plugins/inspect/plugin-rtmp.xml @@ -1,6 +1,6 @@ - rtmpsrc - RTMP source + rtmp + RTMP source and sink ../../ext/rtmp/.libs/libgstrtmp.so libgstrtmp.so 0.10.22.1 @@ -9,6 +9,21 @@ GStreamer Bad Plug-ins git Unknown package origin + + rtmpsink + RTMP output sink + Sink/Network + Sends FLV content to a server via RTMP + Jan Schmidt <thaytan@noraisin.net> + + + sink + sink + always +
video/x-flv
+
+
+
rtmpsrc RTMP Source diff --git a/docs/plugins/inspect/plugin-videoparsersbad.xml b/docs/plugins/inspect/plugin-videoparsersbad.xml index 58d93db214..930c55df78 100644 --- a/docs/plugins/inspect/plugin-videoparsersbad.xml +++ b/docs/plugins/inspect/plugin-videoparsersbad.xml @@ -83,7 +83,7 @@ sink sink always -
video/mpeg, mpegversion=(int)4, parsed=(boolean)false, systemstream=(boolean)false
+
video/mpeg, mpegversion=(int)[ 1, 2 ], parsed=(boolean)false, systemstream=(boolean)false
src diff --git a/ext/Makefile.am b/ext/Makefile.am index 70d4c69c31..2a6f8ec760 100644 --- a/ext/Makefile.am +++ b/ext/Makefile.am @@ -262,6 +262,12 @@ else OPENCV_DIR= endif +if USE_OPUS +OPUS_DIR=opus +else +OPUS_DIR= +endif + if USE_RSVG RSVG_DIR=rsvg else @@ -419,6 +425,7 @@ SUBDIRS=\ $(OFA_DIR) \ $(OPENAL_DIR) \ $(OPENCV_DIR) \ + $(OPUS_DIR) \ $(RSVG_DIR) \ $(SCHRO_DIR) \ $(SDL_DIR) \ @@ -471,6 +478,7 @@ DIST_SUBDIRS = \ ofa \ openal \ opencv \ + opus \ rsvg \ resindvd \ schroedinger \ diff --git a/ext/assrender/gstassrender.c b/ext/assrender/gstassrender.c index c5a8e1bcd2..2727365db9 100644 --- a/ext/assrender/gstassrender.c +++ b/ext/assrender/gstassrender.c @@ -570,10 +570,13 @@ blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer) const guint8 *src; guint8 *dst_y, *dst_u, *dst_v; gint x, y, w, h; +/* FIXME ignoring source image stride might be wrong here */ +#if 0 gint w2; + gint src_stride; +#endif gint width = render->width; gint height = render->height; - gint src_stride; gint y_offset, y_stride; gint u_offset, u_stride; gint v_offset, v_stride; @@ -609,9 +612,11 @@ blit_i420 (GstAssRender * render, ASS_Image * ass_image, GstBuffer * buffer) w = MIN (ass_image->w, width - ass_image->dst_x); h = MIN (ass_image->h, height - ass_image->dst_y); +#if 0 w2 = (w + 1) / 2; src_stride = ass_image->stride; +#endif src = ass_image->bitmap; dst_y = diff --git a/ext/cog/cogvirtframe.c b/ext/cog/cogvirtframe.c index 0fc0ad1298..e6d08e551b 100644 --- a/ext/cog/cogvirtframe.c +++ b/ext/cog/cogvirtframe.c @@ -520,12 +520,11 @@ cog_virt_frame_render_resample_vert_1tap (CogFrame * frame, void *_dest, int n_src; int scale = frame->param1; int acc; - int x; int src_i; acc = scale * i; src_i = acc >> 8; - x = acc & 0xff; + /* x = acc & 0xff; */ n_src = frame->virt_frame1->components[component].height; src1 = cog_virt_frame_get_line (frame->virt_frame1, component, @@ -634,10 +633,9 @@ cog_virt_frame_render_resample_horiz_1tap (CogFrame * frame, void *_dest, { uint8_t *dest = _dest; uint8_t *src; - int n_src; int scale = frame->param1; - n_src = frame->virt_frame1->components[component].width; + /* n_src = frame->virt_frame1->components[component].width; */ src = cog_virt_frame_get_line (frame->virt_frame1, component, i); cogorc_resample_horiz_1tap (dest, src, 0, scale, @@ -650,10 +648,9 @@ cog_virt_frame_render_resample_horiz_2tap (CogFrame * frame, void *_dest, { uint8_t *dest = _dest; uint8_t *src; - int n_src; int scale = frame->param1; - n_src = frame->virt_frame1->components[component].width; + /* n_src = frame->virt_frame1->components[component].width; */ src = cog_virt_frame_get_line (frame->virt_frame1, component, i); cogorc_resample_horiz_2tap (dest, src, 0, scale, diff --git a/ext/dirac/gstdiracenc.cc b/ext/dirac/gstdiracenc.cc index 1c499d71f8..c42fc17eb4 100644 --- a/ext/dirac/gstdiracenc.cc +++ b/ext/dirac/gstdiracenc.cc @@ -24,6 +24,7 @@ #include #include #include +#include #include #include #include @@ -149,7 +150,7 @@ static gboolean gst_dirac_enc_set_format (GstBaseVideoEncoder * base_video_encoder, GstVideoState * state); static gboolean gst_dirac_enc_start (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_dirac_enc_stop (GstBaseVideoEncoder * base_video_encoder); -static gboolean gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder); +static GstFlowReturn gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder); static GstFlowReturn gst_dirac_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame); static GstFlowReturn gst_dirac_enc_shape_output (GstBaseVideoEncoder * @@ -223,13 +224,11 @@ static void gst_dirac_enc_class_init (GstDiracEncClass * klass) { GObjectClass *gobject_class; - GstElementClass *gstelement_class; GstBaseVideoEncoderClass *basevideoencoder_class; //int i; gobject_class = G_OBJECT_CLASS (klass); - gstelement_class = GST_ELEMENT_CLASS (klass); basevideoencoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass); gobject_class->set_property = gst_dirac_enc_set_property; @@ -843,7 +842,7 @@ gst_dirac_enc_stop (GstBaseVideoEncoder * base_video_encoder) return TRUE; } -static gboolean +static GstFlowReturn gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder) { GstDiracEnc *dirac_enc = GST_DIRAC_ENC (base_video_encoder); @@ -852,7 +851,7 @@ gst_dirac_enc_finish (GstBaseVideoEncoder * base_video_encoder) gst_dirac_enc_process (dirac_enc, TRUE); - return TRUE; + return GST_FLOW_OK; } static GstFlowReturn @@ -1136,7 +1135,6 @@ gst_dirac_enc_process (GstDiracEnc * dirac_enc, gboolean end_sequence) { GstBuffer *outbuf; GstFlowReturn ret; - int presentation_frame; int parse_code; int state; GstVideoFrame *frame; @@ -1192,8 +1190,6 @@ gst_dirac_enc_process (GstDiracEnc * dirac_enc, gboolean end_sequence) dirac_enc->pull_frame_num++; parse_code = ((guint8 *) GST_BUFFER_DATA (outbuf))[4]; - /* FIXME */ - presentation_frame = 0; if (DIRAC_PARSE_CODE_IS_SEQ_HEADER (parse_code)) { frame->is_sync_point = TRUE; @@ -1230,7 +1226,6 @@ gst_dirac_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstDiracEnc *dirac_enc; - int dpn; int delay; int dist; int pt; @@ -1241,8 +1236,6 @@ gst_dirac_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder, dirac_enc = GST_DIRAC_ENC (base_video_encoder); - dpn = frame->decode_frame_number; - pt = frame->presentation_frame_number * 2 + dirac_enc->granule_offset; dt = frame->decode_frame_number * 2 + dirac_enc->granule_offset; delay = pt - dt; diff --git a/ext/directfb/dfbvideosink.c b/ext/directfb/dfbvideosink.c index a6db910735..c32d27af06 100644 --- a/ext/directfb/dfbvideosink.c +++ b/ext/directfb/dfbvideosink.c @@ -2223,8 +2223,8 @@ gst_dfbvideosink_init (GstDfbVideoSink * dfbvideosink) { dfbvideosink->pool_lock = g_mutex_new (); dfbvideosink->buffer_pool = NULL; - dfbvideosink->video_height = dfbvideosink->out_width = 0; - dfbvideosink->video_width = dfbvideosink->out_height = 0; + dfbvideosink->video_height = dfbvideosink->out_height = 0; + dfbvideosink->video_width = dfbvideosink->out_width = 0; dfbvideosink->fps_d = 0; dfbvideosink->fps_n = 0; diff --git a/ext/jp2k/gstjasperdec.c b/ext/jp2k/gstjasperdec.c index 159eb95c8f..e896142bdb 100644 --- a/ext/jp2k/gstjasperdec.c +++ b/ext/jp2k/gstjasperdec.c @@ -68,11 +68,6 @@ static GstStaticPadTemplate gst_jasper_dec_src_template = GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, Y41B, Y42B, v308 }")) ); -static void gst_jasper_dec_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_jasper_dec_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); - static void gst_jasper_dec_reset (GstJasperDec * dec); static GstStateChangeReturn gst_jasper_dec_change_state (GstElement * element, GstStateChange transition); @@ -114,18 +109,13 @@ gst_jasper_dec_base_init (gpointer g_class) static void gst_jasper_dec_class_init (GstJasperDecClass * klass) { - GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass *) klass; gstelement_class = (GstElementClass *) klass; GST_DEBUG_CATEGORY_INIT (gst_jasper_dec_debug, "jp2kdec", 0, "Jasper JPEG2000 decoder"); - gobject_class->set_property = gst_jasper_dec_set_property; - gobject_class->get_property = gst_jasper_dec_get_property; - gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_jasper_dec_change_state); } @@ -819,36 +809,6 @@ invalid_bytes_segment: } } -static void -gst_jasper_dec_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec) -{ - GstJasperDec *filter; - - filter = GST_JASPER_DEC (object); - - switch (prop_id) { - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - -static void -gst_jasper_dec_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec) -{ - GstJasperDec *filter; - - filter = GST_JASPER_DEC (object); - - switch (prop_id) { - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - static GstStateChangeReturn gst_jasper_dec_change_state (GstElement * element, GstStateChange transition) { diff --git a/ext/jp2k/gstjasperenc.c b/ext/jp2k/gstjasperenc.c index 90fca51598..ae7b92567c 100644 --- a/ext/jp2k/gstjasperenc.c +++ b/ext/jp2k/gstjasperenc.c @@ -65,11 +65,6 @@ static GstStaticPadTemplate gst_jasper_enc_src_template = "image/jp2") ); -static void gst_jasper_enc_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_jasper_enc_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); - static void gst_jasper_enc_reset (GstJasperEnc * enc); static GstStateChangeReturn gst_jasper_enc_change_state (GstElement * element, GstStateChange transition); @@ -118,18 +113,13 @@ gst_jasper_enc_base_init (gpointer g_class) static void gst_jasper_enc_class_init (GstJasperEncClass * klass) { - GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass *) klass; gstelement_class = (GstElementClass *) klass; GST_DEBUG_CATEGORY_INIT (gst_jasper_enc_debug, "jp2kenc", 0, "Jasper JPEG2000 encoder"); - gobject_class->set_property = gst_jasper_enc_set_property; - gobject_class->get_property = gst_jasper_enc_get_property; - /* FIXME add some encoder properties */ gstelement_class->change_state = @@ -535,36 +525,6 @@ not_negotiated: } } -static void -gst_jasper_enc_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec) -{ - GstJasperEnc *filter; - - filter = GST_JASPER_ENC (object); - - switch (prop_id) { - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - -static void -gst_jasper_enc_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec) -{ - GstJasperEnc *filter; - - filter = GST_JASPER_ENC (object); - - switch (prop_id) { - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - static GstStateChangeReturn gst_jasper_enc_change_state (GstElement * element, GstStateChange transition) { diff --git a/ext/lv2/gstlv2.c b/ext/lv2/gstlv2.c index dd1df3df10..f31050fc66 100644 --- a/ext/lv2/gstlv2.c +++ b/ext/lv2/gstlv2.c @@ -622,7 +622,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps) GstLV2Group *group = NULL; GstAudioChannelPosition *positions = NULL; GstPad *pad; - GstCaps *pad_caps; gsp_class = GST_SIGNAL_PROCESSOR_GET_CLASS (gsp); lv2 = (GstLV2 *) gsp; @@ -655,7 +654,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps) slv2_value_as_string (group->symbol)))) { GST_INFO_OBJECT (lv2, "set audio channel positions on sink pad %s", slv2_value_as_string (group->symbol)); - pad_caps = GST_PAD_CAPS (pad); s = gst_caps_get_structure (caps, 0); gst_audio_set_channel_positions (s, positions); gst_object_unref (pad); @@ -674,7 +672,6 @@ gst_lv2_setup (GstSignalProcessor * gsp, GstCaps * caps) slv2_value_as_string (group->symbol)))) { GST_INFO_OBJECT (lv2, "set audio channel positions on src pad %s", slv2_value_as_string (group->symbol)); - pad_caps = GST_PAD_CAPS (pad); s = gst_caps_get_structure (caps, 0); gst_audio_set_channel_positions (s, positions); gst_object_unref (pad); diff --git a/ext/modplug/gstmodplug.cc b/ext/modplug/gstmodplug.cc index b6b59eb122..6faaa0c837 100644 --- a/ext/modplug/gstmodplug.cc +++ b/ext/modplug/gstmodplug.cc @@ -370,15 +370,20 @@ gst_modplug_src_event (GstPad * pad, GstEvent * event) GstSeekType cur_type, stop_type; gboolean flush; gint64 cur, stop; +/* FIXME timestamp is set but not used */ +#if 0 guint64 timestamp; +#endif if (modplug->frequency == 0) { GST_DEBUG_OBJECT (modplug, "no song loaded yet"); break; } +#if 0 timestamp = gst_util_uint64_scale_int (modplug->offset, GST_SECOND, modplug->frequency); +#endif gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur, &stop_type, &stop); diff --git a/ext/neon/gstneonhttpsrc.c b/ext/neon/gstneonhttpsrc.c index 2844dcd648..b295f08375 100644 --- a/ext/neon/gstneonhttpsrc.c +++ b/ext/neon/gstneonhttpsrc.c @@ -98,6 +98,7 @@ static gboolean gst_neonhttp_src_get_size (GstBaseSrc * bsrc, guint64 * size); static gboolean gst_neonhttp_src_is_seekable (GstBaseSrc * bsrc); static gboolean gst_neonhttp_src_do_seek (GstBaseSrc * bsrc, GstSegment * segment); +static gboolean gst_neonhttp_src_query (GstBaseSrc * bsrc, GstQuery * query); static gboolean gst_neonhttp_src_set_proxy (GstNeonhttpSrc * src, const gchar * uri); @@ -268,6 +269,7 @@ gst_neonhttp_src_class_init (GstNeonhttpSrcClass * klass) gstbasesrc_class->is_seekable = GST_DEBUG_FUNCPTR (gst_neonhttp_src_is_seekable); gstbasesrc_class->do_seek = GST_DEBUG_FUNCPTR (gst_neonhttp_src_do_seek); + gstbasesrc_class->query = GST_DEBUG_FUNCPTR (gst_neonhttp_src_query); gstpushsrc_class->create = GST_DEBUG_FUNCPTR (gst_neonhttp_src_create); @@ -777,6 +779,28 @@ gst_neonhttp_src_do_seek (GstBaseSrc * bsrc, GstSegment * segment) return FALSE; } +static gboolean +gst_neonhttp_src_query (GstBaseSrc * bsrc, GstQuery * query) +{ + GstNeonhttpSrc *src = GST_NEONHTTP_SRC (bsrc); + gboolean ret; + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_URI: + gst_query_set_uri (query, src->location); + ret = TRUE; + break; + default: + ret = FALSE; + break; + } + + if (!ret) + ret = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query); + + return ret; +} + static gboolean gst_neonhttp_src_set_location (GstNeonhttpSrc * src, const gchar * uri) { diff --git a/ext/opencv/Makefile.am b/ext/opencv/Makefile.am index d5a70edade..a32e16cfa2 100644 --- a/ext/opencv/Makefile.am +++ b/ext/opencv/Makefile.am @@ -16,7 +16,12 @@ libgstopencv_la_SOURCES = gstopencv.c \ gstfacedetect.c \ gstpyramidsegment.c \ gsttemplatematch.c \ - gsttextoverlay.c + gsttextoverlay.c \ + gstmotioncells.c \ + motioncells_wrapper.cpp \ + MotionCells.cpp + +libgstopencv_la_CXXFLAGS = $(GST_CXXFLAGS) $(OPENCV_CFLAGS) # flags used to compile this facedetect # add other _CFLAGS and _LIBS as needed @@ -46,4 +51,7 @@ noinst_HEADERS = gstopencvvideofilter.h gstopencvutils.h \ gstfacedetect.h \ gstpyramidsegment.h \ gsttemplatematch.h \ - gsttextoverlay.h + gsttextoverlay.h \ + gstmotioncells.h \ + motioncells_wrapper.h \ + MotionCells.h diff --git a/ext/opencv/MotionCells.cpp b/ext/opencv/MotionCells.cpp new file mode 100644 index 0000000000..2b81b305dd --- /dev/null +++ b/ext/opencv/MotionCells.cpp @@ -0,0 +1,593 @@ +/* + * GStreamer + * Copyright (C) 2011 Robert Jobbagy + * Copyright (C) 2011 Nicola Murino + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + * + * Alternatively, the contents of this file may be used under the + * GNU Lesser General Public License Version 2.1 (the "LGPL"), in + * which case the following provisions apply instead of the ones + * mentioned above: + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#include +#include +#include +#include +#include +#include "MotionCells.h" + +uint64_t ntohl64 (uint64_t val); +uint64_t htonl64 (uint64_t val); + +uint64_t +ntohl64 (uint64_t val) +{ + uint64_t res64; + uint32_t low = (uint32_t) (val & 0x00000000FFFFFFFFLL); + uint32_t high = (uint32_t) ((val & 0xFFFFFFFF00000000LL) >> 32); + low = ntohl (low); + high = ntohl (high); + res64 = (uint64_t) high + (((uint64_t) low) << 32); + return res64; +} + + +uint64_t +htonl64 (uint64_t val) +{ + uint64_t res64; + uint32_t low = (uint32_t) (val & 0x00000000FFFFFFFFLL); + uint32_t high = (uint32_t) ((val & 0xFFFFFFFF00000000LL) >> 32); + low = htonl (low); + high = htonl (high); + res64 = (uint64_t) high + (((uint64_t) low) << 32); + return res64; +} + +MotionCells::MotionCells () +{ + m_framecnt = 0; + m_motioncells_idx_count = 0; + m_motioncellsidxcstr = NULL; + m_saveInDatafile = false; + mc_savefile = NULL; + m_pcurFrame = NULL; + m_pprevFrame = NULL; + transparencyimg = NULL; + m_pdifferenceImage = NULL; + m_pbwImage = NULL; + m_initdatafilefailed = new char[BUSMSGLEN]; + m_savedatafilefailed = new char[BUSMSGLEN]; + m_initerrorcode = 0; + m_saveerrorcode = 0; + m_alpha = 0.5; + m_beta = 0.5; + +} + +MotionCells::~MotionCells () +{ + if (mc_savefile) { + fclose (mc_savefile); + mc_savefile = NULL; + } + delete[]m_initdatafilefailed; + delete[]m_savedatafilefailed; + if (m_motioncellsidxcstr) + delete[]m_motioncellsidxcstr; + if (m_pcurFrame) + cvReleaseImage (&m_pcurFrame); + if (m_pprevFrame) + cvReleaseImage (&m_pprevFrame); + if (transparencyimg) + cvReleaseImage (&transparencyimg); + if (m_pdifferenceImage) + cvReleaseImage (&m_pdifferenceImage); + if (m_pbwImage) + cvReleaseImage (&m_pbwImage); +} + +int +MotionCells::performDetectionMotionCells (IplImage * p_frame, + double p_sensitivity, double p_framerate, int p_gridx, int p_gridy, + gint64 timestamp_millisec, bool p_isVisible, bool p_useAlpha, + int motionmaskcoord_count, motionmaskcoordrect * motionmaskcoords, + int motionmaskcells_count, motioncellidx * motionmaskcellsidx, + cellscolor motioncellscolor, int motioncells_count, + motioncellidx * motioncellsidx, gint64 starttime, char *p_datafile, + bool p_changed_datafile, int p_thickness) +{ + + int sumframecnt = 0; + int ret = 0; + p_framerate >= 1 ? p_framerate <= 5 ? sumframecnt = 1 + : p_framerate <= 10 ? sumframecnt = 2 + : p_framerate <= 15 ? sumframecnt = 3 + : p_framerate <= 20 ? sumframecnt = 4 + : p_framerate <= 25 ? sumframecnt = 5 : sumframecnt = 0 : sumframecnt = 0; + + m_framecnt++; + m_changed_datafile = p_changed_datafile; + if (m_framecnt >= sumframecnt) { + m_useAlpha = p_useAlpha; + m_gridx = p_gridx; + m_gridy = p_gridy; + if (m_changed_datafile) { + ret = initDataFile (p_datafile, starttime); + if (ret != 0) + return ret; + } + + m_frameSize = cvGetSize (p_frame); + m_frameSize.width /= 2; + m_frameSize.height /= 2; + setMotionCells (m_frameSize.width, m_frameSize.height); + m_sensitivity = 1 - p_sensitivity; + m_isVisible = p_isVisible; + m_pcurFrame = cvCloneImage (p_frame); + IplImage *m_pcurgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1); + IplImage *m_pprevgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1); + IplImage *m_pgreyImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1); + IplImage *m_pcurDown = + cvCreateImage (m_frameSize, m_pcurFrame->depth, m_pcurFrame->nChannels); + IplImage *m_pprevDown = cvCreateImage (m_frameSize, m_pprevFrame->depth, + m_pprevFrame->nChannels); + m_pbwImage = cvCreateImage (m_frameSize, IPL_DEPTH_8U, 1); + cvPyrDown (m_pprevFrame, m_pprevDown); + cvCvtColor (m_pprevDown, m_pprevgreyImage, CV_RGB2GRAY); + if (m_pprevFrame) + cvReleaseImage (&m_pprevFrame); + cvPyrDown (m_pcurFrame, m_pcurDown); + cvCvtColor (m_pcurDown, m_pcurgreyImage, CV_RGB2GRAY); + m_pdifferenceImage = cvCloneImage (m_pcurgreyImage); + //cvSmooth(m_pcurgreyImage, m_pcurgreyImage, CV_GAUSSIAN, 3, 0);//TODO camera noise reduce,something smoothing, and rethink runningavg weights + + //Minus the current gray frame from the 8U moving average. + cvAbsDiff (m_pprevgreyImage, m_pcurgreyImage, m_pdifferenceImage); + + //Convert the image to black and white. + cvAdaptiveThreshold (m_pdifferenceImage, m_pbwImage, 255, + CV_ADAPTIVE_THRESH_GAUSSIAN_C, CV_THRESH_BINARY_INV, 7); + + // Dilate and erode to get object blobs + cvDilate (m_pbwImage, m_pbwImage, NULL, 2); + cvErode (m_pbwImage, m_pbwImage, NULL, 2); + + //mask-out the overlay on difference image + if (motionmaskcoord_count > 0) + performMotionMaskCoords (motionmaskcoords, motionmaskcoord_count); + if (motionmaskcells_count > 0) + performMotionMask (motionmaskcellsidx, motionmaskcells_count); + if (getIsNonZero (m_pbwImage)) { //detect Motion + GST_DEBUG ("DETECT MOTION \n"); + if (m_MotionCells.size () > 0) //it contains previous motioncells what we used when frames dropped + m_MotionCells.clear (); + if (transparencyimg) + cvReleaseImage (&transparencyimg); + (motioncells_count > 0) ? + calculateMotionPercentInMotionCells (motioncellsidx, + motioncells_count) + : calculateMotionPercentInMotionCells (motionmaskcellsidx, 0); + + transparencyimg = cvCreateImage (cvGetSize (p_frame), p_frame->depth, 3); + cvSetZero (transparencyimg); + if (m_motioncellsidxcstr) + delete[]m_motioncellsidxcstr; + m_motioncells_idx_count = m_MotionCells.size () * MSGLEN; //one motion cell idx: (lin idx : col idx,) it's 4 character except last motion cell idx + m_motioncellsidxcstr = new char[m_motioncells_idx_count]; + char *tmpstr = new char[MSGLEN]; + for (int i = 0; i < MSGLEN; i++) + tmpstr[i] = ' '; + for (unsigned int i = 0; i < m_MotionCells.size (); i++) { + CvPoint pt1, pt2; + pt1.x = m_MotionCells.at (i).cell_pt1.x * 2; + pt1.y = m_MotionCells.at (i).cell_pt1.y * 2; + pt2.x = m_MotionCells.at (i).cell_pt2.x * 2; + pt2.y = m_MotionCells.at (i).cell_pt2.y * 2; + if (m_useAlpha && m_isVisible) { + cvRectangle (transparencyimg, + pt1, + pt2, + CV_RGB (motioncellscolor.B_channel_value, + motioncellscolor.G_channel_value, + motioncellscolor.R_channel_value), CV_FILLED); + } else if (m_isVisible) { + cvRectangle (p_frame, + pt1, + pt2, + CV_RGB (motioncellscolor.B_channel_value, + motioncellscolor.G_channel_value, + motioncellscolor.R_channel_value), p_thickness); + } + + if (i < m_MotionCells.size () - 1) { + snprintf (tmpstr, MSGLEN, "%d:%d,", m_MotionCells.at (i).lineidx, + m_MotionCells.at (i).colidx); + } else { + snprintf (tmpstr, MSGLEN, "%d:%d", m_MotionCells.at (i).lineidx, + m_MotionCells.at (i).colidx); + } + if (i == 0) + strncpy (m_motioncellsidxcstr, tmpstr, m_motioncells_idx_count); + else + strcat (m_motioncellsidxcstr, tmpstr); + } + if (m_MotionCells.size () == 0) + strncpy (m_motioncellsidxcstr, " ", m_motioncells_idx_count); + + if (m_useAlpha && m_isVisible) { + if (m_MotionCells.size () > 0) + blendImages (p_frame, transparencyimg, m_alpha, m_beta); + } + + delete[]tmpstr; + + if (mc_savefile && m_saveInDatafile) { + ret = saveMotionCells (timestamp_millisec); + if (ret != 0) + return ret; + } + } else { + m_motioncells_idx_count = 0; + if (m_MotionCells.size () > 0) + m_MotionCells.clear (); + if (transparencyimg) + cvReleaseImage (&transparencyimg); + } + + m_pprevFrame = cvCloneImage (m_pcurFrame); + m_framecnt = 0; + if (m_pcurFrame) + cvReleaseImage (&m_pcurFrame); + if (m_pdifferenceImage) + cvReleaseImage (&m_pdifferenceImage); + if (m_pcurgreyImage) + cvReleaseImage (&m_pcurgreyImage); + if (m_pprevgreyImage) + cvReleaseImage (&m_pprevgreyImage); + if (m_pgreyImage) + cvReleaseImage (&m_pgreyImage); + if (m_pbwImage) + cvReleaseImage (&m_pbwImage); + if (m_pprevDown) + cvReleaseImage (&m_pprevDown); + if (m_pcurDown) + cvReleaseImage (&m_pcurDown); + if (m_pCells) { + for (int i = 0; i < m_gridy; ++i) { + delete[]m_pCells[i]; + } + delete[]m_pCells; + } + + if (p_framerate <= 5) { + if (m_MotionCells.size () > 0) + m_MotionCells.clear (); + if (transparencyimg) + cvReleaseImage (&transparencyimg); + } + } else { //we do frame drop + m_motioncells_idx_count = 0; + ret = -2; + for (unsigned int i = 0; i < m_MotionCells.size (); i++) { + CvPoint pt1, pt2; + pt1.x = m_MotionCells.at (i).cell_pt1.x * 2; + pt1.y = m_MotionCells.at (i).cell_pt1.y * 2; + pt2.x = m_MotionCells.at (i).cell_pt2.x * 2; + pt2.y = m_MotionCells.at (i).cell_pt2.y * 2; + if (m_useAlpha && m_isVisible) { + cvRectangle (transparencyimg, + pt1, + pt2, + CV_RGB (motioncellscolor.B_channel_value, + motioncellscolor.G_channel_value, + motioncellscolor.R_channel_value), CV_FILLED); + } else if (m_isVisible) { + cvRectangle (p_frame, + pt1, + pt2, + CV_RGB (motioncellscolor.B_channel_value, + motioncellscolor.G_channel_value, + motioncellscolor.R_channel_value), p_thickness); + } + + } + if (m_useAlpha && m_isVisible) { + if (m_MotionCells.size () > 0) + blendImages (p_frame, transparencyimg, m_alpha, m_beta); + } + } + return ret; +} + +int +MotionCells::initDataFile (char *p_datafile, gint64 starttime) //p_date is increased with difference between current and previous buffer ts +{ + MotionCellData mcd; + if (strncmp (p_datafile, " ", 1)) { + mc_savefile = fopen (p_datafile, "w"); + if (mc_savefile == NULL) { + //fprintf(stderr, "%s %d:initDataFile:fopen:%d (%s)\n", __FILE__, __LINE__, errno, + //strerror(errno)); + strncpy (m_initdatafilefailed, strerror (errno), BUSMSGLEN - 1); + m_initerrorcode = errno; + return 1; + } else { + m_saveInDatafile = true; + } + } else + mc_savefile = NULL; + bzero (&m_header, sizeof (MotionCellHeader)); + m_header.headersize = htonl (MC_HEADER); + m_header.type = htonl (MC_TYPE); + m_header.version = htonl (MC_VERSION); + //it needs these bytes + m_header.itemsize = + htonl ((int) ceil (ceil (m_gridx * m_gridy / 8.0) / 4.0) * 4 + + sizeof (mcd.timestamp)); + m_header.gridx = htonl (m_gridx); + m_header.gridy = htonl (m_gridy); + m_header.starttime = htonl64 (starttime); + + snprintf (m_header.name, sizeof (m_header.name), "%s %dx%d", MC_VERSIONTEXT, + ntohl (m_header.gridx), ntohl (m_header.gridy)); + m_changed_datafile = false; + return 0; +} + +int +MotionCells::saveMotionCells (gint64 timestamp_millisec) +{ + + MotionCellData mc_data; + mc_data.timestamp = htonl (timestamp_millisec); + mc_data.data = NULL; + //There is no datafile + if (mc_savefile == NULL) + return 0; + + if (ftello (mc_savefile) == 0) { + //cerr << "Writing out file header"<< m_header.headersize <<":" << sizeof(MotionCellHeader) << " itemsize:" + //<< m_header.itemsize << endl; + if (fwrite (&m_header, sizeof (MotionCellHeader), 1, mc_savefile) != 1) { + //fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno, + //strerror(errno)); + strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1); + m_saveerrorcode = errno; + return -1; + } + } + + mc_data.data = + (char *) calloc (1, + ntohl (m_header.itemsize) - sizeof (mc_data.timestamp)); + if (mc_data.data == NULL) { + //fprintf(stderr, "%s %d:saveMotionCells:calloc:%d (%s)\n", __FILE__, __LINE__, errno, + //strerror(errno)); + strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1); + m_saveerrorcode = errno; + return -1; + } + + for (unsigned int i = 0; i < m_MotionCells.size (); i++) { + int bitnum = + m_MotionCells.at (i).lineidx * ntohl (m_header.gridx) + + m_MotionCells.at (i).colidx; + int bytenum = (int) floor (bitnum / 8.0); + int shift = bitnum - bytenum * 8; + mc_data.data[bytenum] = mc_data.data[bytenum] | (1 << shift); + //cerr << "Motion Detected " << "line:" << m_MotionCells.at(i).lineidx << " col:" << m_MotionCells.at(i).colidx; + //cerr << " bitnum " << bitnum << " bytenum " << bytenum << " shift " << shift << " value " << (int)mc_data.data[bytenum] << endl; + } + + if (fwrite (&mc_data.timestamp, sizeof (mc_data.timestamp), 1, + mc_savefile) != 1) { + //fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno, + //strerror(errno)); + strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1); + m_saveerrorcode = errno; + return -1; + } + + if (fwrite (mc_data.data, + ntohl (m_header.itemsize) - sizeof (mc_data.timestamp), 1, + mc_savefile) != 1) { + //fprintf(stderr, "%s %d:saveMotionCells:fwrite:%d (%s)\n", __FILE__, __LINE__, errno, + //strerror(errno)); + strncpy (m_savedatafilefailed, strerror (errno), BUSMSGLEN - 1); + m_saveerrorcode = errno; + return -1; + } + + free (mc_data.data); + return 0; +} + +double +MotionCells::calculateMotionPercentInCell (int p_row, int p_col, + double *p_cellarea, double *p_motionarea) +{ + double cntpixelsnum = 0; + double cntmotionpixelnum = 0; + + int ybegin = floor ((double) p_row * m_cellheight); + int yend = floor ((double) (p_row + 1) * m_cellheight); + int xbegin = floor ((double) (p_col) * m_cellwidth); + int xend = floor ((double) (p_col + 1) * m_cellwidth); + int cellw = xend - xbegin; + int cellh = yend - ybegin; + int cellarea = cellw * cellh; + *p_cellarea = cellarea; + int thresholdmotionpixelnum = floor ((double) cellarea * m_sensitivity); + + for (int i = ybegin; i < yend; i++) { + for (int j = xbegin; j < xend; j++) { + cntpixelsnum++; + if ((((uchar *) (m_pbwImage->imageData + m_pbwImage->widthStep * i))[j]) > + 0) { + cntmotionpixelnum++; + if (cntmotionpixelnum >= thresholdmotionpixelnum) { //we dont needs calculate anymore + *p_motionarea = cntmotionpixelnum; + return (cntmotionpixelnum / cntpixelsnum); + } + } + int remainingpixelsnum = cellarea - cntpixelsnum; + if ((cntmotionpixelnum + remainingpixelsnum) < thresholdmotionpixelnum) { //moving pixels number will be less than threshold + *p_motionarea = 0; + return 0; + } + } + } + + return (cntmotionpixelnum / cntpixelsnum); +} + +void +MotionCells::calculateMotionPercentInMotionCells (motioncellidx * + p_motioncellsidx, int p_motioncells_count) +{ + if (p_motioncells_count == 0) { + for (int i = 0; i < m_gridy; i++) { + for (int j = 0; j < m_gridx; j++) { + m_pCells[i][j].MotionPercent = calculateMotionPercentInCell (i, j, + &m_pCells[i][j].CellArea, &m_pCells[i][j].MotionArea); + m_pCells[i][j].hasMotion = + m_sensitivity < m_pCells[i][j].MotionPercent ? true : false; + if (m_pCells[i][j].hasMotion) { + MotionCellsIdx mci; + mci.lineidx = i; + mci.colidx = j; + mci.cell_pt1.x = floor ((double) j * m_cellwidth); + mci.cell_pt1.y = floor ((double) i * m_cellheight); + mci.cell_pt2.x = floor ((double) (j + 1) * m_cellwidth); + mci.cell_pt2.y = floor ((double) (i + 1) * m_cellheight); + int w = mci.cell_pt2.x - mci.cell_pt1.x; + int h = mci.cell_pt2.y - mci.cell_pt1.y; + mci.motioncell = cvRect (mci.cell_pt1.x, mci.cell_pt1.y, w, h); + m_MotionCells.push_back (mci); + } + } + } + } else { + for (int k = 0; k < p_motioncells_count; ++k) { + + int i = p_motioncellsidx[k].lineidx; + int j = p_motioncellsidx[k].columnidx; + m_pCells[i][j].MotionPercent = + calculateMotionPercentInCell (i, j, + &m_pCells[i][j].CellArea, &m_pCells[i][j].MotionArea); + m_pCells[i][j].hasMotion = + m_pCells[i][j].MotionPercent > m_sensitivity ? true : false; + if (m_pCells[i][j].hasMotion) { + MotionCellsIdx mci; + mci.lineidx = p_motioncellsidx[k].lineidx; + mci.colidx = p_motioncellsidx[k].columnidx; + mci.cell_pt1.x = floor ((double) j * m_cellwidth); + mci.cell_pt1.y = floor ((double) i * m_cellheight); + mci.cell_pt2.x = floor ((double) (j + 1) * m_cellwidth); + mci.cell_pt2.y = floor ((double) (i + 1) * m_cellheight); + int w = mci.cell_pt2.x - mci.cell_pt1.x; + int h = mci.cell_pt2.y - mci.cell_pt1.y; + mci.motioncell = cvRect (mci.cell_pt1.x, mci.cell_pt1.y, w, h); + m_MotionCells.push_back (mci); + } + } + } +} + +void +MotionCells::performMotionMaskCoords (motionmaskcoordrect * p_motionmaskcoords, + int p_motionmaskcoords_count) +{ + CvPoint upperleft; + upperleft.x = 0; + upperleft.y = 0; + CvPoint lowerright; + lowerright.x = 0; + lowerright.y = 0; + for (int i = 0; i < p_motionmaskcoords_count; i++) { + upperleft.x = p_motionmaskcoords[i].upper_left_x; + upperleft.y = p_motionmaskcoords[i].upper_left_y; + lowerright.x = p_motionmaskcoords[i].lower_right_x; + lowerright.y = p_motionmaskcoords[i].lower_right_y; + cvRectangle (m_pbwImage, upperleft, lowerright, CV_RGB (0, 0, 0), + CV_FILLED); + } +} + +void +MotionCells::performMotionMask (motioncellidx * p_motionmaskcellsidx, + int p_motionmaskcells_count) +{ + for (int k = 0; k < p_motionmaskcells_count; k++) { + int beginy = p_motionmaskcellsidx[k].lineidx * m_cellheight; + int beginx = p_motionmaskcellsidx[k].columnidx * m_cellwidth; + int endx = + (double) p_motionmaskcellsidx[k].columnidx * m_cellwidth + m_cellwidth; + int endy = + (double) p_motionmaskcellsidx[k].lineidx * m_cellheight + m_cellheight; + for (int i = beginy; i < endy; i++) + for (int j = beginx; j < endx; j++) { + ((uchar *) (m_pbwImage->imageData + m_pbwImage->widthStep * i))[j] = 0; + } + } +} + +///BGR if we use only OpenCV +//RGB if we use gst+OpenCV +void +MotionCells::blendImages (IplImage * p_actFrame, IplImage * p_cellsFrame, + float p_alpha, float p_beta) +{ + + int height = p_actFrame->height; + int width = p_actFrame->width; + int step = p_actFrame->widthStep / sizeof (uchar); + int channels = p_actFrame->nChannels; + int cellstep = p_cellsFrame->widthStep / sizeof (uchar); + uchar *curImageData = (uchar *) p_actFrame->imageData; + uchar *cellImageData = (uchar *) p_cellsFrame->imageData; + + for (int i = 0; i < height; i++) + for (int j = 0; j < width; j++) + for (int k = 0; k < channels; k++) + if (cellImageData[i * cellstep + j * channels + k] > 0) { + curImageData[i * step + j * channels + k] = + round ((double) curImageData[i * step + j * channels + + k] * p_alpha + ((double) cellImageData[i * cellstep + + j * channels + k] * p_beta)); + } +} diff --git a/ext/opencv/MotionCells.h b/ext/opencv/MotionCells.h new file mode 100644 index 0000000000..ee84fd6b5b --- /dev/null +++ b/ext/opencv/MotionCells.h @@ -0,0 +1,259 @@ +/* + * GStreamer + * Copyright (C) 2011 Robert Jobbagy + * Copyright (C) 2011 Nicola Murino + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + * + * Alternatively, the contents of this file may be used under the + * GNU Lesser General Public License Version 2.1 (the "LGPL"), in + * which case the following provisions apply instead of the ones + * mentioned above: + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef MOTIONCELLS_H_ +#define MOTIONCELLS_H_ + +#include // includes OpenCV definitions +#include // includes highGUI definitions +#include +#include +#include +#include +#include +#include + +//MotionCells defines +#define MC_HEADER 64 +#define MC_TYPE 1 +#define MC_VERSION 1 +#define MC_VERSIONTEXT "MotionCells-1" +#define MSGLEN 6 +#define BUSMSGLEN 20 + +using namespace std; + +struct MotionCellHeader{ + gint32 headersize; + gint32 type; + gint32 version; + gint32 itemsize; + gint32 gridx; + gint32 gridy; + gint64 starttime; + char name[MC_HEADER - 32]; +}; + +struct MotionCellData{ + gint32 timestamp; + char *data; +}; + +typedef struct { + int upper_left_x; + int upper_left_y; + int lower_right_x; + int lower_right_y; +} motionmaskcoordrect; + +typedef struct { + int R_channel_value; + int G_channel_value; + int B_channel_value; +} cellscolor; + +typedef struct { + int lineidx; + int columnidx; +} motioncellidx; + +struct Cell +{ + double MotionArea; + double CellArea; + double MotionPercent; + bool hasMotion; +}; + +struct MotionCellsIdx +{ + CvRect motioncell; + //Points for the edges of the rectangle. + CvPoint cell_pt1; + CvPoint cell_pt2; + int lineidx; + int colidx; +}; + +struct OverlayRegions +{ + CvPoint upperleft; + CvPoint lowerright; +}; + +class MotionCells +{ +public: + + MotionCells (); + virtual ~ MotionCells (); + + int performDetectionMotionCells (IplImage * p_frame, double p_sensitivity, + double p_framerate, int p_gridx, int p_gridy, gint64 timestamp_millisec, + bool p_isVisble, bool p_useAlpha, int motionmaskcoord_count, + motionmaskcoordrect * motionmaskcoords, int motionmaskcells_count, + motioncellidx * motionmaskcellsidx, cellscolor motioncellscolor, + int motioncells_count, motioncellidx * motioncellsidx, gint64 starttime, + char *datafile, bool p_changed_datafile, int p_thickness); + + void setPrevFrame (IplImage * p_prevframe) + { + m_pprevFrame = cvCloneImage (p_prevframe); + } + char *getMotionCellsIdx () + { + return m_motioncellsidxcstr; + } + + int getMotionCellsIdxCount () + { + return m_motioncells_idx_count; + } + + bool getChangedDataFile () + { + return m_changed_datafile; + } + + char *getDatafileInitFailed () + { + return m_initdatafilefailed; + } + + char *getDatafileSaveFailed () + { + return m_savedatafilefailed; + } + + int getInitErrorCode () + { + return m_initerrorcode; + } + + int getSaveErrorCode () + { + return m_saveerrorcode; + } + + void freeDataFile () + { + if (mc_savefile) { + fclose (mc_savefile); + mc_savefile = NULL; + m_saveInDatafile = false; + } + } + +private: + + double calculateMotionPercentInCell (int p_row, int p_col, double *p_cellarea, + double *p_motionarea); + void performMotionMaskCoords (motionmaskcoordrect * p_motionmaskcoords, + int p_motionmaskcoords_count); + void performMotionMask (motioncellidx * p_motionmaskcellsidx, + int p_motionmaskcells_count); + void calculateMotionPercentInMotionCells (motioncellidx * + p_motionmaskcellsidx, int p_motionmaskcells_count = 0); + int saveMotionCells (gint64 timestamp_millisec); + int initDataFile (char *p_datafile, gint64 starttime); + void blendImages (IplImage * p_actFrame, IplImage * p_cellsFrame, + float p_alpha, float p_beta); + + void setData (IplImage * img, int lin, int col, uchar valor) + { + ((uchar *) (img->imageData + img->widthStep * lin))[col] = valor; + } + + uchar getData (IplImage * img, int lin, int col) + { + return ((uchar *) (img->imageData + img->widthStep * lin))[col]; + } + + bool getIsNonZero (IplImage * img) + { + for (int lin = 0; lin < img->height; lin++) + for (int col = 0; col < img->width; col++) { + if ((((uchar *) (img->imageData + img->widthStep * lin))[col]) > 0) + return true; + } + return false; + } + + void setMotionCells (int p_frameWidth, int p_frameHeight) + { + m_cellwidth = (double) p_frameWidth / (double) m_gridx; + m_cellheight = (double) p_frameHeight / (double) m_gridy; + m_pCells = new Cell *[m_gridy]; + for (int i = 0; i < m_gridy; i++) + m_pCells[i] = new Cell[m_gridx]; + + //init cells + for (int i = 0; i < m_gridy; i++) + for (int j = 0; j < m_gridx; j++) { + m_pCells[i][j].MotionArea = 0; + m_pCells[i][j].CellArea = 0; + m_pCells[i][j].MotionPercent = 0; + m_pCells[i][j].hasMotion = false; + } + } + + IplImage *m_pcurFrame, *m_pprevFrame, *m_pdifferenceImage, + *m_pbwImage,*transparencyimg; + CvSize m_frameSize; + bool m_isVisible, m_changed_datafile, m_useAlpha, m_saveInDatafile; + Cell **m_pCells; + vector < MotionCellsIdx > m_MotionCells; + vector < OverlayRegions > m_OverlayRegions; + int m_gridx, m_gridy; + double m_cellwidth, m_cellheight; + double m_alpha, m_beta; + double m_thresholdBoundingboxArea, m_cellArea, m_sensitivity; + int m_framecnt, m_motioncells_idx_count, m_initerrorcode, m_saveerrorcode; + char *m_motioncellsidxcstr, *m_initdatafilefailed, *m_savedatafilefailed; + FILE *mc_savefile; + MotionCellHeader m_header; + +}; + +#endif /* MOTIONCELLS_H_ */ diff --git a/ext/opencv/gstmotioncells.c b/ext/opencv/gstmotioncells.c new file mode 100644 index 0000000000..a349bcac1d --- /dev/null +++ b/ext/opencv/gstmotioncells.c @@ -0,0 +1,1109 @@ +/* + * GStreamer MotioCells detect areas of motion + * Copyright (C) 2011 Robert Jobbagy + * Copyright (C) 2011 Nicola Murino + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + * + * Alternatively, the contents of this file may be used under the + * GNU Lesser General Public License Version 2.1 (the "LGPL"), in + * which case the following provisions apply instead of the ones + * mentioned above: + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/** + * SECTION:element-motioncells + * + * Performs motion detection on videos. + * + * + * Example launch line + * |[ + * gst-launch-0.10 videotestsrc pattern=18 ! videorate ! videoscale ! video/x-raw-yuv,width=320,height=240,framerate=5/1 ! ffmpegcolorspace ! motioncells ! ffmpegcolorspace ! xvimagesink + * ]| + * + */ + +#ifdef HAVE_CONFIG_H +# include +#endif + +#include +#include +#include +#include "gstmotioncells.h" +#include "motioncells_wrapper.h" +#include +#include +#include + +GST_DEBUG_CATEGORY_STATIC (gst_motion_cells_debug); +#define GST_CAT_DEFAULT gst_motion_cells_debug + +#define GRID_DEF 10 +#define GRID_MIN 8 +#define GRID_MAX 32 +#define SENSITIVITY_DEFAULT 0.5 +#define SENSITIVITY_MIN 0 +#define SENSITIVITY_MAX 1 +#define THRESHOLD_MIN 0 +#define THRESHOLD_DEFAULT 0.01 +#define THRESHOLD_MAX 1.0 +#define GAP_MIN 1 +#define GAP_DEF 5 +#define GAP_MAX 60 +#define POST_NO_MOTION_MIN 0 +#define POST_NO_MOTION_DEF 0 +#define POST_NO_MOTION_MAX 180 +#define MINIMUM_MOTION_FRAMES_MIN 1 +#define MINIMUM_MOTION_FRAMES_DEF 1 +#define MINIMUM_MOTION_FRAMES_MAX 60 +#define THICKNESS_MIN -1 +#define THICKNESS_DEF 1 +#define THICKNESS_MAX 5 +#define DATE_MIN 0 +#define DATE_DEF 1 +#define DATE_MAX LONG_MAX +#define DEF_DATAFILEEXT "vamc" +#define MSGLEN 6 +#define BUSMSGLEN 20 + +#define GFREE(POINTER)\ + {\ + g_free(POINTER);\ + POINTER = NULL;\ + } + +int instanceCounter = 0; +gboolean element_id_was_max = false; + +/* Filter signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + PROP_0, + PROP_GRID_X, + PROP_GRID_Y, + PROP_SENSITIVITY, + PROP_THRESHOLD, + PROP_DISPLAY, + PROP_DATE, + PROP_DATAFILE, + PROP_DATAFILE_EXT, + PROP_MOTIONMASKCOORD, + PROP_MOTIONMASKCELLSPOS, + PROP_CELLSCOLOR, + PROP_MOTIONCELLSIDX, + PROP_GAP, + PROP_POSTNOMOTION, + PROP_MINIMUNMOTIONFRAMES, + PROP_CALCULATEMOTION, + PROP_POSTALLMOTION, + PROP_USEALPHA, + PROP_MOTIONCELLTHICKNESS +}; + +/* the capabilities of the inputs and outputs. + */ +static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw-rgb")); + +static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, GST_PAD_ALWAYS, GST_STATIC_CAPS ("video/x-raw-rgb")); + +GST_BOILERPLATE (GstMotioncells, gst_motion_cells, GstElement, + GST_TYPE_ELEMENT); + +static void gst_motion_cells_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_motion_cells_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + +static gboolean gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps); +static GstFlowReturn gst_motion_cells_chain (GstPad * pad, GstBuffer * buf); + +static void gst_motioncells_update_motion_cells (GstMotioncells * filter); +static void gst_motioncells_update_motion_masks (GstMotioncells * filter); + +/* Clean up */ +static void +gst_motion_cells_finalize (GObject * obj) +{ + GstMotioncells *filter = gst_motion_cells (obj); + + motion_cells_free (filter->id); + + //freeing previously allocated dynamic array + if (filter->motionmaskcoord_count > 0) { + GFREE (filter->motionmaskcoords); + } + + if (filter->motionmaskcells_count > 0) { + GFREE (filter->motionmaskcellsidx); + } + if (filter->motioncells_count > 0) { + GFREE (filter->motioncellsidx); + } + + if (filter->cvImage) { + cvReleaseImage (&filter->cvImage); + } + + GFREE (filter->motioncellscolor); + GFREE (filter->prev_datafile); + GFREE (filter->cur_datafile); + GFREE (filter->basename_datafile); + GFREE (filter->datafile_extension); + + g_mutex_free (filter->propset_mutex); + + G_OBJECT_CLASS (parent_class)->finalize (obj); +} + +/* GObject vmethod implementations */ +static void +gst_motion_cells_base_init (gpointer gclass) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (gclass); + + gst_element_class_set_details_simple (element_class, + "motioncells", + "Filter/Effect/Video", + "Performs motion detection on videos and images, providing detected motion cells index via bus messages", + "Robert Jobbagy , Nicola Murino "); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_factory)); +} + +/* initialize the motioncells's class */ +static void +gst_motion_cells_class_init (GstMotioncellsClass * klass) +{ + GObjectClass *gobject_class; + + gobject_class = (GObjectClass *) klass; + parent_class = g_type_class_peek_parent (klass); + + gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_motion_cells_finalize); + gobject_class->set_property = gst_motion_cells_set_property; + gobject_class->get_property = gst_motion_cells_get_property; + + g_object_class_install_property (gobject_class, PROP_GRID_X, + g_param_spec_int ("gridx", "Number of Horizontal Grids", + "You can give number of horizontal grid cells.", GRID_MIN, GRID_MAX, + GRID_DEF, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_GRID_Y, + g_param_spec_int ("gridy", "Number of Vertical Grids", + "You can give number of vertical grid cells.", GRID_MIN, GRID_MAX, + GRID_DEF, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_SENSITIVITY, + g_param_spec_double ("sensitivity", "Motion Sensitivity", + "You can tunning the element motion sensitivity.", SENSITIVITY_MIN, + SENSITIVITY_MAX, SENSITIVITY_DEFAULT, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_THRESHOLD, + g_param_spec_double ("threshold", "Lower bound of motion cells number", + "Threshold value for motion, when motion cells number greater sum cells * threshold, we show motion.", + THRESHOLD_MIN, THRESHOLD_MAX, THRESHOLD_DEFAULT, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_GAP, + g_param_spec_int ("gap", + "Gap is time in second, elapsed time from last motion timestamp. ", + "If elapsed time minus form last motion timestamp is greater or equal than gap then we post motion finished bus message. ", + GAP_MIN, GAP_MAX, GAP_DEF, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_POSTNOMOTION, + g_param_spec_int ("postnomotion", "POSTNOMOTION", + "If non 0 post a no_motion event is posted on the bus if no motion is detected for N seconds", + POST_NO_MOTION_MIN, POST_NO_MOTION_MAX, POST_NO_MOTION_DEF, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_MINIMUNMOTIONFRAMES, + g_param_spec_int ("minimummotionframes", "MINIMUN MOTION FRAMES", + "Define the minimum number of motion frames that trigger a motion event", + MINIMUM_MOTION_FRAMES_MIN, MINIMUM_MOTION_FRAMES_MAX, + MINIMUM_MOTION_FRAMES_DEF, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_DISPLAY, + g_param_spec_boolean ("display", "Display", + "Motion Cells visible or not on Current Frame", FALSE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_POSTALLMOTION, + g_param_spec_boolean ("postallmotion", "Post All Motion", + "Element post bus msg for every motion frame or just motion start and motion stop", + FALSE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_USEALPHA, + g_param_spec_boolean ("usealpha", "Use alpha", + "Use or not alpha blending on frames with motion cells", TRUE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_DATE, + g_param_spec_long ("date", "Motion Cell Date", + "Current Date in milliseconds", DATE_MIN, DATE_MAX, DATE_DEF, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_DATAFILE, + g_param_spec_string ("datafile", "DataFile", + "Location of motioncells data file (empty string means no saving)", + NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_DATAFILE_EXT, + g_param_spec_string ("datafileextension", "DataFile Extension", + "Extension of datafile", DEF_DATAFILEEXT, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_MOTIONMASKCOORD, + g_param_spec_string ("motionmaskcoords", "Motion Mask with Coordinates", + "The upper left x, y and lower right x, y coordinates separated with \":\", " + "describe a region. Regions separated with \",\"", NULL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_MOTIONMASKCELLSPOS, + g_param_spec_string ("motionmaskcellspos", + "Motion Mask with Cells Position", + "The line and column idx separated with \":\" what cells want we mask-out, " + "describe a cell. Cells separated with \",\"", NULL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_CELLSCOLOR, + g_param_spec_string ("cellscolor", "Color of Motion Cells", + "The color of motion cells separated with \",\"", "255,255,0", + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_MOTIONCELLSIDX, + g_param_spec_string ("motioncellsidx", "Motion Cells Of Interest(MOCI)", + "The line and column idx separated with \":\", " + "describe a cell. Cells separated with \",\"", NULL, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_CALCULATEMOTION, + g_param_spec_boolean ("calculatemotion", "Calculate Motion", + "If needs calculate motion on frame you need this property setting true otherwise false", + TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_MOTIONCELLTHICKNESS, + g_param_spec_int ("motioncellthickness", "Motion Cell Thickness", + "Motion Cell Border Thickness, if it's -1 then motion cell will be fill", + THICKNESS_MIN, THICKNESS_MAX, THICKNESS_DEF, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +} + +/* initialize the new element + * instantiate pads and add them to element + * set pad callback functions + * initialize instance structure + */ +static void +gst_motion_cells_init (GstMotioncells * filter, GstMotioncellsClass * gclass) +{ + filter->propset_mutex = g_mutex_new (); + filter->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink"); + gst_pad_set_setcaps_function (filter->sinkpad, + GST_DEBUG_FUNCPTR (gst_motion_cells_set_caps)); + gst_pad_set_getcaps_function (filter->sinkpad, + GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps)); + gst_pad_set_chain_function (filter->sinkpad, + GST_DEBUG_FUNCPTR (gst_motion_cells_chain)); + + filter->srcpad = gst_pad_new_from_static_template (&src_factory, "src"); + gst_pad_set_getcaps_function (filter->srcpad, + GST_DEBUG_FUNCPTR (gst_pad_proxy_getcaps)); + + gst_element_add_pad (GST_ELEMENT (filter), filter->sinkpad); + gst_element_add_pad (GST_ELEMENT (filter), filter->srcpad); + + filter->display = TRUE; + filter->calculate_motion = TRUE; + + filter->prevgridx = 0; + filter->prevgridy = 0; + filter->gridx = GRID_DEF; + filter->gridy = GRID_DEF; + filter->gap = GAP_DEF; + filter->postnomotion = POST_NO_MOTION_DEF; + filter->minimum_motion_frames = MINIMUM_MOTION_FRAMES_DEF; + + filter->prev_datafile = g_strdup (NULL); + filter->cur_datafile = g_strdup (NULL); + filter->basename_datafile = g_strdup (NULL); + filter->datafile_extension = g_strdup (DEF_DATAFILEEXT); + filter->sensitivity = SENSITIVITY_DEFAULT; + filter->threshold = THRESHOLD_DEFAULT; + + filter->motionmaskcoord_count = 0; + filter->motionmaskcoords = NULL; + filter->motionmaskcells_count = 0; + filter->motionmaskcellsidx = NULL; + filter->motioncellscolor = g_new0 (cellscolor, 1); + filter->motioncellscolor->R_channel_value = 255; + filter->motioncellscolor->G_channel_value = 255; + filter->motioncellscolor->B_channel_value = 0; + filter->motioncellsidx = NULL; + filter->motioncells_count = 0; + filter->motion_begin_timestamp = 0; + filter->last_motion_timestamp = 0; + filter->last_nomotion_notified = 0; + filter->consecutive_motion = 0; + filter->motion_timestamp = 0; + filter->prev_buff_timestamp = 0; + filter->cur_buff_timestamp = 0; + filter->diff_timestamp = -1; + gettimeofday (&filter->tv, NULL); + filter->starttime = 1000 * filter->tv.tv_sec; + filter->previous_motion = false; + filter->changed_datafile = false; + filter->postallmotion = false; + filter->usealpha = true; + filter->firstdatafile = false; + filter->firstgridx = true; + filter->firstgridy = true; + filter->changed_gridx = false; + filter->changed_gridy = false; + filter->firstframe = true; + filter->changed_startime = false; + filter->sent_init_error_msg = false; + filter->sent_save_error_msg = false; + filter->thickness = THICKNESS_DEF; + + filter->datafileidx = 0; + g_mutex_lock (filter->propset_mutex); + filter->id = instanceCounter; + motion_cells_init (); + g_mutex_unlock (filter->propset_mutex); + +} + +static void +gst_motion_cells_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstMotioncells *filter = gst_motion_cells (object); + //variables for overlay regions setup + gchar **strs, **colorstr, **motioncellsstr, **motionmaskcellsstr; + int i, ux, uy, lx, ly; + int r, g, b; + int cellscolorscnt = 0; + int linidx, colidx, masklinidx, maskcolidx; + int tmpux = -1; + int tmpuy = -1; + int tmplx = -1; + int tmply = -1; + GstStateChangeReturn ret; + + g_mutex_lock (filter->propset_mutex); + switch (prop_id) { + case PROP_GRID_X: + ret = gst_element_get_state (GST_ELEMENT (filter), + &filter->state, NULL, 250 * GST_NSECOND); + filter->gridx = g_value_get_int (value); + if (filter->prevgridx != filter->gridx + && ret == GST_STATE_CHANGE_SUCCESS + && filter->state == GST_STATE_PLAYING) { + filter->changed_gridx = true; + } + filter->prevgridx = filter->gridx; + break; + case PROP_GRID_Y: + ret = gst_element_get_state (GST_ELEMENT (filter), + &filter->state, NULL, 250 * GST_NSECOND); + filter->gridy = g_value_get_int (value); + if (filter->prevgridy != filter->gridy + && ret == GST_STATE_CHANGE_SUCCESS + && filter->state == GST_STATE_PLAYING) { + filter->changed_gridy = true; + } + filter->prevgridy = filter->gridy; + break; + case PROP_GAP: + filter->gap = g_value_get_int (value); + break; + case PROP_POSTNOMOTION: + filter->postnomotion = g_value_get_int (value); + break; + case PROP_MINIMUNMOTIONFRAMES: + filter->minimum_motion_frames = g_value_get_int (value); + break; + case PROP_SENSITIVITY: + filter->sensitivity = g_value_get_double (value); + break; + case PROP_THRESHOLD: + filter->threshold = g_value_get_double (value); + break; + case PROP_DISPLAY: + filter->display = g_value_get_boolean (value); + break; + case PROP_POSTALLMOTION: + filter->postallmotion = g_value_get_boolean (value); + break; + case PROP_USEALPHA: + filter->usealpha = g_value_get_boolean (value); + break; + case PROP_CALCULATEMOTION: + filter->calculate_motion = g_value_get_boolean (value); + break; + case PROP_DATE: + ret = gst_element_get_state (GST_ELEMENT (filter), + &filter->state, NULL, 250 * GST_NSECOND); + if (ret == GST_STATE_CHANGE_SUCCESS && filter->state == GST_STATE_PLAYING) { + filter->changed_startime = true; + } + filter->starttime = g_value_get_long (value); + break; + case PROP_DATAFILE: + GFREE (filter->cur_datafile); + GFREE (filter->basename_datafile); + filter->basename_datafile = g_value_dup_string (value); + + if (strlen (filter->basename_datafile) == 0) { + filter->cur_datafile = g_strdup (NULL); + break; + } + filter->cur_datafile = + g_strdup_printf ("%s-0.%s", filter->basename_datafile, + filter->datafile_extension); + if (g_strcmp0 (filter->prev_datafile, filter->basename_datafile) != 0) { + filter->changed_datafile = TRUE; + filter->sent_init_error_msg = FALSE; + filter->sent_save_error_msg = FALSE; + filter->datafileidx = 0; + motion_cells_free_resources (filter->id); + } else { + filter->changed_datafile = FALSE; + } + + GFREE (filter->prev_datafile); + filter->prev_datafile = g_strdup (filter->basename_datafile); + break; + case PROP_DATAFILE_EXT: + GFREE (filter->datafile_extension); + filter->datafile_extension = g_value_dup_string (value); + break; + case PROP_MOTIONMASKCOORD: + strs = g_strsplit (g_value_get_string (value), ",", 255); + GFREE (filter->motionmaskcoords); + //setting number of regions + for (filter->motionmaskcoord_count = 0; + strs[filter->motionmaskcoord_count] != NULL; + ++filter->motionmaskcoord_count); + if (filter->motionmaskcoord_count > 0) { + sscanf (strs[0], "%d:%d:%d:%d", &tmpux, &tmpuy, &tmplx, &tmply); + if (tmpux > -1 && tmpuy > -1 && tmplx > -1 && tmply > -1) { + filter->motionmaskcoords = + g_new0 (motionmaskcoordrect, filter->motionmaskcoord_count); + + for (i = 0; i < filter->motionmaskcoord_count; ++i) { + sscanf (strs[i], "%d:%d:%d:%d", &ux, &uy, &lx, &ly); + ux = CLAMP (ux, 0, filter->width - 1); + uy = CLAMP (uy, 0, filter->height - 1); + lx = CLAMP (lx, 0, filter->width - 1); + ly = CLAMP (ly, 0, filter->height - 1); + filter->motionmaskcoords[i].upper_left_x = ux; + filter->motionmaskcoords[i].upper_left_y = uy; + filter->motionmaskcoords[i].lower_right_x = lx; + filter->motionmaskcoords[i].lower_right_y = ly; + } + } else { + filter->motionmaskcoord_count = 0; + } + } + if (strs) + g_strfreev (strs); + tmpux = -1; + tmpuy = -1; + tmplx = -1; + tmply = -1; + break; + case PROP_MOTIONMASKCELLSPOS: + motionmaskcellsstr = g_strsplit (g_value_get_string (value), ",", 255); + GFREE (filter->motionmaskcellsidx); + //setting number of regions + for (filter->motionmaskcells_count = 0; + motionmaskcellsstr[filter->motionmaskcells_count] != NULL; + ++filter->motionmaskcells_count); + if (filter->motionmaskcells_count > 0) { + sscanf (motionmaskcellsstr[0], "%d:%d", &tmpux, &tmpuy); + if (tmpux > -1 && tmpuy > -1) { + filter->motionmaskcellsidx = + g_new0 (motioncellidx, filter->motionmaskcells_count); + for (i = 0; i < filter->motionmaskcells_count; ++i) { + sscanf (motionmaskcellsstr[i], "%d:%d", &masklinidx, &maskcolidx); + filter->motionmaskcellsidx[i].lineidx = masklinidx; + filter->motionmaskcellsidx[i].columnidx = maskcolidx; + } + } else { + filter->motionmaskcells_count = 0; + } + } + if (motionmaskcellsstr) + g_strfreev (motionmaskcellsstr); + tmpux = -1; + tmpuy = -1; + tmplx = -1; + tmply = -1; + break; + case PROP_CELLSCOLOR: + colorstr = g_strsplit (g_value_get_string (value), ",", 255); + for (cellscolorscnt = 0; colorstr[cellscolorscnt] != NULL; + ++cellscolorscnt); + if (cellscolorscnt == 3) { + sscanf (colorstr[0], "%d", &r); + sscanf (colorstr[1], "%d", &g); + sscanf (colorstr[2], "%d", &b); + //check right RGB color format + r = CLAMP (r, 1, 255); + g = CLAMP (g, 1, 255); + b = CLAMP (b, 1, 255); + filter->motioncellscolor->R_channel_value = r; + filter->motioncellscolor->G_channel_value = g; + filter->motioncellscolor->B_channel_value = b; + } + if (colorstr) + g_strfreev (colorstr); + break; + case PROP_MOTIONCELLSIDX: + motioncellsstr = g_strsplit (g_value_get_string (value), ",", 255); + + //setting number of regions + for (filter->motioncells_count = 0; + motioncellsstr[filter->motioncells_count] != NULL; + ++filter->motioncells_count); + if (filter->motioncells_count > 0) { + sscanf (motioncellsstr[0], "%d:%d", &tmpux, &tmpuy); + if (tmpux > -1 && tmpuy > -1) { + GFREE (filter->motioncellsidx); + + filter->motioncellsidx = + g_new0 (motioncellidx, filter->motioncells_count); + + for (i = 0; i < filter->motioncells_count; ++i) { + sscanf (motioncellsstr[i], "%d:%d", &linidx, &colidx); + filter->motioncellsidx[i].lineidx = linidx; + filter->motioncellsidx[i].columnidx = colidx; + } + } else { + filter->motioncells_count = 0; + } + } + if (motioncellsstr) + g_strfreev (motioncellsstr); + tmpux = -1; + tmpuy = -1; + tmplx = -1; + tmply = -1; + break; + case PROP_MOTIONCELLTHICKNESS: + filter->thickness = g_value_get_int (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } + g_mutex_unlock (filter->propset_mutex); +} + +static void +gst_motion_cells_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstMotioncells *filter = gst_motion_cells (object); + GString *str; + int i; + + switch (prop_id) { + case PROP_GRID_X: + g_value_set_int (value, filter->gridx); + break; + case PROP_GRID_Y: + g_value_set_int (value, filter->gridy); + break; + case PROP_GAP: + g_value_set_int (value, filter->gap); + break; + case PROP_POSTNOMOTION: + g_value_set_int (value, filter->postnomotion); + break; + case PROP_MINIMUNMOTIONFRAMES: + g_value_set_int (value, filter->minimum_motion_frames); + break; + case PROP_SENSITIVITY: + g_value_set_double (value, filter->sensitivity); + break; + case PROP_THRESHOLD: + g_value_set_double (value, filter->threshold); + break; + case PROP_DISPLAY: + g_value_set_boolean (value, filter->display); + break; + case PROP_POSTALLMOTION: + g_value_set_boolean (value, filter->postallmotion); + break; + case PROP_USEALPHA: + g_value_set_boolean (value, filter->usealpha); + break; + case PROP_CALCULATEMOTION: + g_value_set_boolean (value, filter->calculate_motion); + break; + case PROP_DATE: + g_value_set_long (value, filter->starttime); + break; + case PROP_DATAFILE: + g_value_set_string (value, filter->basename_datafile); + break; + case PROP_DATAFILE_EXT: + g_value_set_string (value, filter->datafile_extension); + break; + case PROP_MOTIONMASKCOORD: + str = g_string_new (""); + for (i = 0; i < filter->motionmaskcoord_count; ++i) { + if (i < filter->motionmaskcoord_count - 1) + g_string_append_printf (str, "%d:%d:%d:%d,", + filter->motionmaskcoords[i].upper_left_x, + filter->motionmaskcoords[i].upper_left_y, + filter->motionmaskcoords[i].lower_right_x, + filter->motionmaskcoords[i].lower_right_y); + else + g_string_append_printf (str, "%d:%d:%d:%d", + filter->motionmaskcoords[i].upper_left_x, + filter->motionmaskcoords[i].upper_left_y, + filter->motionmaskcoords[i].lower_right_x, + filter->motionmaskcoords[i].lower_right_y); + + } + g_value_set_string (value, str->str); + g_string_free (str, TRUE); + break; + case PROP_MOTIONMASKCELLSPOS: + str = g_string_new (""); + for (i = 0; i < filter->motionmaskcells_count; ++i) { + if (i < filter->motionmaskcells_count - 1) + g_string_append_printf (str, "%d:%d,", + filter->motionmaskcellsidx[i].lineidx, + filter->motionmaskcellsidx[i].columnidx); + else + g_string_append_printf (str, "%d:%d", + filter->motionmaskcellsidx[i].lineidx, + filter->motionmaskcellsidx[i].columnidx); + } + g_value_set_string (value, str->str); + g_string_free (str, TRUE); + break; + case PROP_CELLSCOLOR: + str = g_string_new (""); + + g_string_printf (str, "%d,%d,%d", + filter->motioncellscolor->R_channel_value, + filter->motioncellscolor->G_channel_value, + filter->motioncellscolor->B_channel_value); + + g_value_set_string (value, str->str); + g_string_free (str, TRUE); + break; + case PROP_MOTIONCELLSIDX: + str = g_string_new (""); + for (i = 0; i < filter->motioncells_count; ++i) { + if (i < filter->motioncells_count - 1) + g_string_append_printf (str, "%d:%d,", + filter->motioncellsidx[i].lineidx, + filter->motioncellsidx[i].columnidx); + else + g_string_append_printf (str, "%d:%d", + filter->motioncellsidx[i].lineidx, + filter->motioncellsidx[i].columnidx); + } + g_value_set_string (value, str->str); + g_string_free (str, TRUE); + break; + case PROP_MOTIONCELLTHICKNESS: + g_value_set_int (value, filter->thickness); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_motioncells_update_motion_cells (GstMotioncells * filter) +{ + int i = 0; + int cellscnt = 0; + int j = 0; + int newcellscnt; + motioncellidx *motioncellsidx; + for (i = 0; i < filter->motioncells_count; i++) { + if ((filter->gridx <= filter->motioncellsidx[i].columnidx) || + (filter->gridy <= filter->motioncellsidx[i].lineidx)) { + cellscnt++; + } + } + newcellscnt = filter->motioncells_count - cellscnt; + motioncellsidx = g_new0 (motioncellidx, newcellscnt); + for (i = 0; i < filter->motioncells_count; i++) { + if ((filter->motioncellsidx[i].lineidx < filter->gridy) && + (filter->motioncellsidx[i].columnidx < filter->gridx)) { + motioncellsidx[j].lineidx = filter->motioncellsidx[i].lineidx; + motioncellsidx[j].columnidx = filter->motioncellsidx[i].columnidx; + j++; + } + } + GFREE (filter->motioncellsidx); + filter->motioncells_count = newcellscnt; + filter->motioncellsidx = g_new0 (motioncellidx, filter->motioncells_count); + j = 0; + for (i = 0; i < filter->motioncells_count; i++) { + filter->motioncellsidx[i].lineidx = motioncellsidx[j].lineidx; + filter->motioncellsidx[i].columnidx = motioncellsidx[j].columnidx; + j++; + } + GFREE (motioncellsidx); +} + +static void +gst_motioncells_update_motion_masks (GstMotioncells * filter) +{ + + int i = 0; + int maskcnt = 0; + int j = 0; + int newmaskcnt; + motioncellidx *motionmaskcellsidx; + for (i = 0; i < filter->motionmaskcells_count; i++) { + if ((filter->gridx <= filter->motionmaskcellsidx[i].columnidx) || + (filter->gridy <= filter->motionmaskcellsidx[i].lineidx)) { + maskcnt++; + } + } + newmaskcnt = filter->motionmaskcells_count - maskcnt; + motionmaskcellsidx = g_new0 (motioncellidx, newmaskcnt); + for (i = 0; i < filter->motionmaskcells_count; i++) { + if ((filter->motionmaskcellsidx[i].lineidx < filter->gridy) && + (filter->motionmaskcellsidx[i].columnidx < filter->gridx)) { + motionmaskcellsidx[j].lineidx = filter->motionmaskcellsidx[i].lineidx; + motionmaskcellsidx[j].columnidx = filter->motionmaskcellsidx[i].columnidx; + j++; + } + } + GFREE (filter->motionmaskcellsidx); + filter->motionmaskcells_count = newmaskcnt; + filter->motionmaskcellsidx = + g_new0 (motioncellidx, filter->motionmaskcells_count); + j = 0; + for (i = 0; i < filter->motionmaskcells_count; i++) { + filter->motionmaskcellsidx[i].lineidx = motionmaskcellsidx[j].lineidx; + filter->motionmaskcellsidx[i].columnidx = motionmaskcellsidx[j].columnidx; + j++; + } + GFREE (motionmaskcellsidx); +} + +/* GstElement vmethod implementations */ + +/* this function handles the link with other elements */ +static gboolean +gst_motion_cells_set_caps (GstPad * pad, GstCaps * caps) +{ + GstMotioncells *filter; + GstPad *otherpad; + GstStructure *structure; + int numerator, denominator; + + filter = gst_motion_cells (gst_pad_get_parent (pad)); + structure = gst_caps_get_structure (caps, 0); + gst_structure_get_int (structure, "width", &filter->width); + gst_structure_get_int (structure, "height", &filter->height); + gst_structure_get_fraction (structure, "framerate", &numerator, &denominator); + filter->framerate = (double) numerator / (double) denominator; + if (filter->cvImage) + cvReleaseImage (&filter->cvImage); + filter->cvImage = + cvCreateImage (cvSize (filter->width, filter->height), IPL_DEPTH_8U, 3); + + otherpad = (pad == filter->srcpad) ? filter->sinkpad : filter->srcpad; + gst_object_unref (filter); + + return gst_pad_set_caps (otherpad, caps); +} + +/* chain function + * this function does the actual processing + */ +static GstFlowReturn +gst_motion_cells_chain (GstPad * pad, GstBuffer * buf) +{ + + GstMotioncells *filter; + + filter = gst_motion_cells (GST_OBJECT_PARENT (pad)); + if (filter->calculate_motion) { + double sensitivity; + int framerate, gridx, gridy, motionmaskcells_count, motionmaskcoord_count, + motioncells_count, i; + int thickness, success, motioncellsidxcnt, numberOfCells, + motioncellsnumber, cellsOfInterestNumber; + int mincellsOfInterestNumber, motiondetect; + char *datafile; + bool display, changed_datafile, useAlpha; + gint64 starttime; + motionmaskcoordrect *motionmaskcoords; + motioncellidx *motionmaskcellsidx; + cellscolor motioncellscolor; + motioncellidx *motioncellsidx; + g_mutex_lock (filter->propset_mutex); + buf = gst_buffer_make_writable (buf); + filter->cvImage->imageData = (char *) GST_BUFFER_DATA (buf); + if (filter->firstframe) { + setPrevFrame (filter->cvImage, filter->id); + filter->firstframe = FALSE; + } + + sensitivity = filter->sensitivity; + framerate = filter->framerate; + gridx = filter->gridx; + gridy = filter->gridy; + display = filter->display; + motionmaskcoord_count = filter->motionmaskcoord_count; + motionmaskcoords = + g_new0 (motionmaskcoordrect, filter->motionmaskcoord_count); + for (i = 0; i < filter->motionmaskcoord_count; i++) { //we need divide 2 because we use gauss pyramid in C++ side + motionmaskcoords[i].upper_left_x = + filter->motionmaskcoords[i].upper_left_x / 2; + motionmaskcoords[i].upper_left_y = + filter->motionmaskcoords[i].upper_left_y / 2; + motionmaskcoords[i].lower_right_x = + filter->motionmaskcoords[i].lower_right_x / 2; + motionmaskcoords[i].lower_right_y = + filter->motionmaskcoords[i].lower_right_y / 2; + } + + motioncellscolor.R_channel_value = + filter->motioncellscolor->R_channel_value; + motioncellscolor.G_channel_value = + filter->motioncellscolor->G_channel_value; + motioncellscolor.B_channel_value = + filter->motioncellscolor->B_channel_value; + + if ((filter->changed_gridx || filter->changed_gridy + || filter->changed_startime)) { + if ((g_strcmp0 (filter->cur_datafile, NULL) != 0)) { + GFREE (filter->cur_datafile); + filter->datafileidx++; + filter->cur_datafile = + g_strdup_printf ("%s-%d.%s", filter->basename_datafile, + filter->datafileidx, filter->datafile_extension); + filter->changed_datafile = TRUE; + motion_cells_free_resources (filter->id); + } + if (filter->motioncells_count > 0) + gst_motioncells_update_motion_cells (filter); + if (filter->motionmaskcells_count > 0) + gst_motioncells_update_motion_masks (filter); + filter->changed_gridx = FALSE; + filter->changed_gridy = FALSE; + filter->changed_startime = FALSE; + } + datafile = g_strdup (filter->cur_datafile); + filter->cur_buff_timestamp = (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND); + filter->starttime += + (filter->cur_buff_timestamp - filter->prev_buff_timestamp); + starttime = filter->starttime; + if (filter->changed_datafile || filter->diff_timestamp < 0) + filter->diff_timestamp = + (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND); + changed_datafile = filter->changed_datafile; + motionmaskcells_count = filter->motionmaskcells_count; + motionmaskcellsidx = g_new0 (motioncellidx, filter->motionmaskcells_count); + for (i = 0; i < filter->motionmaskcells_count; i++) { + motionmaskcellsidx[i].lineidx = filter->motionmaskcellsidx[i].lineidx; + motionmaskcellsidx[i].columnidx = filter->motionmaskcellsidx[i].columnidx; + } + motioncells_count = filter->motioncells_count; + motioncellsidx = g_new0 (motioncellidx, filter->motioncells_count); + for (i = 0; i < filter->motioncells_count; i++) { + motioncellsidx[i].lineidx = filter->motioncellsidx[i].lineidx; + motioncellsidx[i].columnidx = filter->motioncellsidx[i].columnidx; + } + useAlpha = filter->usealpha; + thickness = filter->thickness; + success = + perform_detection_motion_cells (filter->cvImage, sensitivity, framerate, + gridx, gridy, + (gint64) (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND) - + filter->diff_timestamp, display, useAlpha, motionmaskcoord_count, + motionmaskcoords, motionmaskcells_count, motionmaskcellsidx, + motioncellscolor, motioncells_count, motioncellsidx, starttime, + datafile, changed_datafile, thickness, filter->id); + if ((success == 1) && (filter->sent_init_error_msg == false)) { + char *initfailedreason; + int initerrorcode; + GstStructure *s; + GstMessage *m; + initfailedreason = getInitDataFileFailed (filter->id); + initerrorcode = getInitErrorCode (filter->id); + s = gst_structure_new ("motion", "init_error_code", G_TYPE_INT, + initerrorcode, "details", G_TYPE_STRING, initfailedreason, NULL); + m = gst_message_new_element (GST_OBJECT (filter), s); + gst_element_post_message (GST_ELEMENT (filter), m); + filter->sent_init_error_msg = TRUE; + } + if ((success == -1) && (filter->sent_save_error_msg == false)) { + char *savefailedreason; + int saveerrorcode; + GstStructure *s; + GstMessage *m; + savefailedreason = getSaveDataFileFailed (filter->id); + saveerrorcode = getSaveErrorCode (filter->id); + s = gst_structure_new ("motion", "save_error_code", G_TYPE_INT, + saveerrorcode, "details", G_TYPE_STRING, savefailedreason, NULL); + m = gst_message_new_element (GST_OBJECT (filter), s); + gst_element_post_message (GST_ELEMENT (filter), m); + filter->sent_save_error_msg = TRUE; + } + if (success == -2) { //frame dropped + filter->prev_buff_timestamp = filter->cur_buff_timestamp; + //free + GFREE (datafile); + GFREE (motionmaskcoords); + GFREE (motionmaskcellsidx); + GFREE (motioncellsidx); + g_mutex_unlock (filter->propset_mutex); + return gst_pad_push (filter->srcpad, buf); + } + filter->changed_datafile = getChangedDataFile (filter->id); + motioncellsidxcnt = getMotionCellsIdxCnt (filter->id); + numberOfCells = filter->gridx * filter->gridy; + motioncellsnumber = motioncellsidxcnt / MSGLEN; + cellsOfInterestNumber = (filter->motioncells_count > 0) ? //how many cells interest for us + (filter->motioncells_count) : (numberOfCells); + mincellsOfInterestNumber = + floor ((double) cellsOfInterestNumber * filter->threshold); + motiondetect = (motioncellsnumber >= mincellsOfInterestNumber) ? 1 : 0; + if ((motioncellsidxcnt > 0) && (motiondetect == 1)) { + char *detectedmotioncells; + filter->last_motion_timestamp = GST_BUFFER_TIMESTAMP (buf); + detectedmotioncells = getMotionCellsIdx (filter->id); + if (detectedmotioncells) { + filter->consecutive_motion++; + if ((filter->previous_motion == false) + && (filter->consecutive_motion >= filter->minimum_motion_frames)) { + GstStructure *s; + GstMessage *m; + filter->previous_motion = true; + filter->motion_begin_timestamp = GST_BUFFER_TIMESTAMP (buf); + s = gst_structure_new ("motion", "motion_cells_indices", + G_TYPE_STRING, detectedmotioncells, "motion_begin", G_TYPE_UINT64, + filter->motion_begin_timestamp, NULL); + m = gst_message_new_element (GST_OBJECT (filter), s); + gst_element_post_message (GST_ELEMENT (filter), m); + } else if (filter->postallmotion) { + GstStructure *s; + GstMessage *m; + filter->motion_timestamp = GST_BUFFER_TIMESTAMP (buf); + s = gst_structure_new ("motion", "motion_cells_indices", + G_TYPE_STRING, detectedmotioncells, "motion", G_TYPE_UINT64, + filter->motion_timestamp, NULL); + m = gst_message_new_element (GST_OBJECT (filter), s); + gst_element_post_message (GST_ELEMENT (filter), m); + } + } else { + GstStructure *s; + GstMessage *m; + s = gst_structure_new ("motion", "motion_cells_indices", G_TYPE_STRING, + "error", NULL); + m = gst_message_new_element (GST_OBJECT (filter), s); + gst_element_post_message (GST_ELEMENT (filter), m); + } + } else { + filter->consecutive_motion = 0; + if ((((GST_BUFFER_TIMESTAMP (buf) - + filter->last_motion_timestamp) / 1000000000l) >= + filter->gap) + && (filter->last_motion_timestamp > 0)) { + GST_DEBUG ("POST MOTION FINISHED MSG\n"); + if (filter->previous_motion) { + GstStructure *s; + GstMessage *m; + filter->previous_motion = false; + s = gst_structure_new ("motion", "motion_finished", G_TYPE_UINT64, + filter->last_motion_timestamp, NULL); + m = gst_message_new_element (GST_OBJECT (filter), s); + gst_element_post_message (GST_ELEMENT (filter), m); + } + } + } + if (filter->postnomotion > 0) { + guint64 last_buf_timestamp = GST_BUFFER_TIMESTAMP (buf) / 1000000000l; + if ((last_buf_timestamp - + (filter->last_motion_timestamp / 1000000000l)) >= + filter->postnomotion) { + GST_DEBUG ("POST NO MOTION MSG\n"); + if ((last_buf_timestamp - + (filter->last_nomotion_notified / 1000000000l)) >= + filter->postnomotion) { + GstStructure *s; + GstMessage *m; + filter->last_nomotion_notified = GST_BUFFER_TIMESTAMP (buf); + s = gst_structure_new ("motion", "no_motion", G_TYPE_UINT64, + filter->last_motion_timestamp, NULL); + m = gst_message_new_element (GST_OBJECT (filter), s); + gst_element_post_message (GST_ELEMENT (filter), m); + } + } + } + filter->prev_buff_timestamp = filter->cur_buff_timestamp; + //free + GFREE (datafile); + GFREE (motionmaskcoords); + GFREE (motionmaskcellsidx); + GFREE (motioncellsidx); + + g_mutex_unlock (filter->propset_mutex); + } + + return gst_pad_push (filter->srcpad, buf); +} + +/* entry point to initialize the plug-in + * initialize the plug-in itself + * register the element factories and other features + */ +gboolean +gst_motioncells_plugin_init (GstPlugin * plugin) +{ + /* debug category for fltering log messages */ + GST_DEBUG_CATEGORY_INIT (gst_motion_cells_debug, + "motioncells", + 0, + "Performs motion detection on videos, providing detected positions via bus messages"); + + return gst_element_register (plugin, "motioncells", GST_RANK_NONE, + GST_TYPE_MOTIONCELLS); +} diff --git a/ext/opencv/gstmotioncells.h b/ext/opencv/gstmotioncells.h new file mode 100644 index 0000000000..d26a2d6dd2 --- /dev/null +++ b/ext/opencv/gstmotioncells.h @@ -0,0 +1,124 @@ +/* + * GStreamer + * Copyright (C) 2011 Robert Jobbagy + * Copyright (C) 2011 Nicola Murino + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + * + * Alternatively, the contents of this file may be used under the + * GNU Lesser General Public License Version 2.1 (the "LGPL"), in + * which case the following provisions apply instead of the ones + * mentioned above: + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_MOTIONCELLS_H__ +#define __GST_MOTIONCELLS_H__ + +#include +#include + +G_BEGIN_DECLS +/* #defines don't like whitespacey bits */ +#define GST_TYPE_MOTIONCELLS \ + (gst_motion_cells_get_type()) +#define gst_motion_cells(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_MOTIONCELLS,GstMotioncells)) +#define gst_motion_cells_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_MOTIONCELLS,GstMotioncellsClass)) +#define GST_IS_MOTIONCELLS(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_MOTIONCELLS)) +#define GST_IS_MOTIONCELLS_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_MOTIONCELLS)) +typedef struct _GstMotioncells GstMotioncells; +typedef struct _GstMotioncellsClass GstMotioncellsClass; + +typedef struct { + int upper_left_x; + int upper_left_y; + int lower_right_x; + int lower_right_y; +} motionmaskcoordrect; + +typedef struct { + int R_channel_value; + int G_channel_value; + int B_channel_value; +} cellscolor; + +typedef struct { + int lineidx; + int columnidx; +} motioncellidx; + +struct _GstMotioncells +{ + GstElement element; + GstPad *sinkpad, *srcpad; + GstState state; + gboolean display, calculate_motion, firstgridx, firstgridy, changed_gridx, + changed_gridy, changed_startime; + gboolean previous_motion, changed_datafile, postallmotion, usealpha, + firstdatafile, firstframe; + gboolean sent_init_error_msg, sent_save_error_msg; + gchar *prev_datafile, *cur_datafile, *basename_datafile, *datafile_extension; + gint prevgridx, gridx, prevgridy, gridy, id; + gdouble sensitivity, threshold; + IplImage *cvImage; + motionmaskcoordrect *motionmaskcoords; + cellscolor *motioncellscolor; + motioncellidx *motioncellsidx, *motionmaskcellsidx; + int motionmaskcoord_count, motioncells_count, motionmaskcells_count; + int gap, thickness, datafileidx, postnomotion, minimum_motion_frames; + guint64 motion_begin_timestamp, last_motion_timestamp, motion_timestamp, + last_nomotion_notified, prev_buff_timestamp, cur_buff_timestamp; + gint64 diff_timestamp, starttime; + guint64 consecutive_motion; + gint width, height; + //time stuff + struct timeval tv; + GMutex *propset_mutex; + double framerate; +}; + +struct _GstMotioncellsClass +{ + GstElementClass parent_class; +}; + +GType gst_motion_cells_get_type (void); + +gboolean gst_motioncells_plugin_init (GstPlugin * plugin); + +G_END_DECLS +#endif /* __GST_MOTION_CELLS_H__ */ diff --git a/ext/opencv/gstopencv.c b/ext/opencv/gstopencv.c index e12ca78028..8d9def2b27 100644 --- a/ext/opencv/gstopencv.c +++ b/ext/opencv/gstopencv.c @@ -32,6 +32,7 @@ #include "gstedgedetect.h" #include "gstfaceblur.h" #include "gstfacedetect.h" +#include "gstmotioncells.h" #include "gstpyramidsegment.h" #include "gsttemplatematch.h" #include "gsttextoverlay.h" @@ -66,6 +67,9 @@ plugin_init (GstPlugin * plugin) if (!gst_facedetect_plugin_init (plugin)) return FALSE; + if (!gst_motioncells_plugin_init (plugin)) + return FALSE; + if (!gst_pyramidsegment_plugin_init (plugin)) return FALSE; diff --git a/ext/opencv/motioncells_wrapper.cpp b/ext/opencv/motioncells_wrapper.cpp new file mode 100644 index 0000000000..b768f9ece9 --- /dev/null +++ b/ext/opencv/motioncells_wrapper.cpp @@ -0,0 +1,213 @@ +/* + * GStreamer + * Copyright (C) 2011 Robert Jobbagy + * Copyright (C) 2011 Nicola Murino + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + * + * Alternatively, the contents of this file may be used under the + * GNU Lesser General Public License Version 2.1 (the "LGPL"), in + * which case the following provisions apply instead of the ones + * mentioned above: + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#include +#include +#include "motioncells_wrapper.h" + +extern int instanceCounter; +extern bool element_id_was_max; +MotionCells *mc; +char p_str[] = "idx failed"; + +void +motion_cells_init () +{ + mc = new MotionCells (); + instanceOfMC tmpmc; + tmpmc.id = instanceCounter; + tmpmc.mc = mc; + motioncellsvector.push_back (tmpmc); + if ((instanceCounter < INT_MAX) && !element_id_was_max) { + instanceCounter++; + element_id_was_max = false; + } else { + element_id_was_max = true; + instanceCounter = motioncellsfreeids.back (); + motioncellsfreeids.pop_back (); + } +} + +int +perform_detection_motion_cells (IplImage * p_image, double p_sensitivity, + double p_framerate, int p_gridx, int p_gridy, long int p_timestamp_millisec, + bool p_isVisible, bool p_useAlpha, int motionmaskcoord_count, + motionmaskcoordrect * motionmaskcoords, int motionmaskcells_count, + motioncellidx * motionmaskcellsidx, cellscolor motioncellscolor, + int motioncells_count, motioncellidx * motioncellsidx, gint64 starttime, + char *p_datafile, bool p_changed_datafile, int p_thickness, int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + return motioncellsvector.at (idx).mc->performDetectionMotionCells (p_image, + p_sensitivity, p_framerate, p_gridx, p_gridy, p_timestamp_millisec, + p_isVisible, p_useAlpha, motionmaskcoord_count, motionmaskcoords, + motionmaskcells_count, motionmaskcellsidx, motioncellscolor, + motioncells_count, motioncellsidx, starttime, p_datafile, + p_changed_datafile, p_thickness); +} + + +void +setPrevFrame (IplImage * p_prevFrame, int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + motioncellsvector.at (idx).mc->setPrevFrame (p_prevFrame); +} + +char * +getMotionCellsIdx (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + return motioncellsvector.at (idx).mc->getMotionCellsIdx (); + else { + return p_str; + } + +} + +int +getMotionCellsIdxCnt (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + return motioncellsvector.at (idx).mc->getMotionCellsIdxCount (); + else + return 0; +} + +bool +getChangedDataFile (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + return motioncellsvector.at (idx).mc->getChangedDataFile (); + else + return false; +} + +int +searchIdx (int p_id) +{ + for (unsigned int i = 0; i < motioncellsvector.size (); i++) { + instanceOfMC tmpmc; + tmpmc = motioncellsvector.at (i); + if (tmpmc.id == p_id) { + return i; + } + } + return -1; +} + +char * +getInitDataFileFailed (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + return motioncellsvector.at (idx).mc->getDatafileInitFailed (); + else { + return p_str; + } +} + +char * +getSaveDataFileFailed (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + return motioncellsvector.at (idx).mc->getDatafileSaveFailed (); + else { + return p_str; + } +} + +int +getInitErrorCode (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + return motioncellsvector.at (idx).mc->getInitErrorCode (); + else + return -1; +} + +int +getSaveErrorCode (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + return motioncellsvector.at (idx).mc->getSaveErrorCode (); + else + return -1; +} + +void +motion_cells_free (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) { + delete motioncellsvector.at (idx).mc; + motioncellsvector.erase (motioncellsvector.begin () + idx); + motioncellsfreeids.push_back (p_id); + } +} + +void +motion_cells_free_resources (int p_id) +{ + int idx = 0; + idx = searchIdx (p_id); + if (idx > -1) + motioncellsvector.at (idx).mc->freeDataFile (); +} diff --git a/ext/opencv/motioncells_wrapper.h b/ext/opencv/motioncells_wrapper.h new file mode 100644 index 0000000000..0feaafa8bd --- /dev/null +++ b/ext/opencv/motioncells_wrapper.h @@ -0,0 +1,89 @@ +/* + * GStreamer + * Copyright (C) 2011 Robert Jobbagy + * Copyright (C) 2011 Nicola Murino + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + * + * Alternatively, the contents of this file may be used under the + * GNU Lesser General Public License Version 2.1 (the "LGPL"), in + * which case the following provisions apply instead of the ones + * mentioned above: + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef MOTIONCELLS_WRAPPER_H +#define MOTIONCELLS_WRAPPER_H + +#include + +#ifdef __cplusplus +#include "MotionCells.h" +struct instanceOfMC +{ + int id; + MotionCells *mc; +}; +vector < instanceOfMC > motioncellsvector; +vector < int >motioncellsfreeids; + +int searchIdx (int p_id); +extern "C" +{ +#endif + + void motion_cells_init (); + int perform_detection_motion_cells (IplImage * p_image, double p_sensitivity, + double p_framerate, int p_gridx, int p_gridy, + long int p_timestamp_millisec, bool p_isVisible, bool p_useAlpha, + int motionmaskcoord_count, motionmaskcoordrect * motionmaskcoords, + int motionmaskcells_count, motioncellidx * motionmaskcellsidx, + cellscolor motioncellscolor, int motioncells_count, + motioncellidx * motioncellsidx, gint64 starttime, char *datafile, + bool p_changed_datafile, int p_thickness, int p_id); + void setPrevFrame (IplImage * p_prevFrame, int p_id); + void motion_cells_free (int p_id); + void motion_cells_free_resources (int p_id); + char *getMotionCellsIdx (int p_id); + int getMotionCellsIdxCnt (int p_id); + bool getChangedDataFile (int p_id); + char *getInitDataFileFailed (int p_id); + char *getSaveDataFileFailed (int p_id); + int getInitErrorCode (int p_id); + int getSaveErrorCode (int p_id); + +#ifdef __cplusplus +} +#endif + +#endif /* MOTIONCELLS_WRAPPER_H */ diff --git a/ext/opus/Makefile.am b/ext/opus/Makefile.am new file mode 100644 index 0000000000..aa50ba96ef --- /dev/null +++ b/ext/opus/Makefile.am @@ -0,0 +1,16 @@ +plugin_LTLIBRARIES = libgstopus.la + +libgstopus_la_SOURCES = gstopus.c gstopusdec.c gstopusenc.c +libgstopus_la_CFLAGS = \ + $(GST_PLUGINS_BASE_CFLAGS) \ + $(GST_CFLAGS) \ + $(OPUS_CFLAGS) +libgstopus_la_LIBADD = \ + $(GST_PLUGINS_BASE_LIBS) -lgsttag-$(GST_MAJORMINOR) \ + $(GST_BASE_LIBS) \ + $(GST_LIBS) \ + $(OPUS_LIBS) +libgstopus_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) $(LIBM) +libgstopus_la_LIBTOOLFLAGS = --tag=disable-static + +noinst_HEADERS = gstopusenc.h gstopusdec.h diff --git a/ext/opus/gstopus.c b/ext/opus/gstopus.c new file mode 100644 index 0000000000..65e9dcdc58 --- /dev/null +++ b/ext/opus/gstopus.c @@ -0,0 +1,50 @@ +/* GStreamer + * Copyright (C) <1999> Erik Walthinsen + * Copyright (C) <2008> Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ +#ifdef HAVE_CONFIG_H +#include +#endif + +#include "gstopusdec.h" +#include "gstopusenc.h" + +#include + +static gboolean +plugin_init (GstPlugin * plugin) +{ + + if (!gst_element_register (plugin, "opusenc", GST_RANK_NONE, + GST_TYPE_OPUS_ENC)) + return FALSE; + + if (!gst_element_register (plugin, "opusdec", GST_RANK_PRIMARY, + GST_TYPE_OPUS_DEC)) + return FALSE; + + gst_tag_register_musicbrainz_tags (); + + return TRUE; +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "opus", + "OPUS plugin library", + plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) diff --git a/ext/opus/gstopusdec.c b/ext/opus/gstopusdec.c new file mode 100644 index 0000000000..47c06cec0a --- /dev/null +++ b/ext/opus/gstopusdec.c @@ -0,0 +1,865 @@ +/* GStreamer + * Copyright (C) 2004 Wim Taymans + * Copyright (C) 2006 Tim-Philipp Müller + * Copyright (C) 2008 Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/* + * Based on the speexdec element. + */ + +/** + * SECTION:element-opusdec + * @see_also: opusenc, oggdemux + * + * This element decodes a OPUS stream to raw integer audio. + * + * + * Example pipelines + * |[ + * gst-launch -v filesrc location=opus.ogg ! oggdemux ! opusdec ! audioconvert ! audioresample ! alsasink + * ]| Decode an Ogg/Opus file. To create an Ogg/Opus file refer to the documentation of opusenc. + * + */ + +#ifdef HAVE_CONFIG_H +# include "config.h" +#endif + +#include "gstopusdec.h" +#include +#include + +GST_DEBUG_CATEGORY_STATIC (opusdec_debug); +#define GST_CAT_DEFAULT opusdec_debug + +#define DEC_MAX_FRAME_SIZE 2000 + +static GstStaticPadTemplate opus_dec_src_factory = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("audio/x-raw-int, " + "rate = (int) [ 32000, 64000 ], " + "channels = (int) [ 1, 2 ], " + "endianness = (int) BYTE_ORDER, " + "signed = (boolean) true, " "width = (int) 16, " "depth = (int) 16") + ); + +static GstStaticPadTemplate opus_dec_sink_factory = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("audio/x-opus") + ); + +GST_BOILERPLATE (GstOpusDec, gst_opus_dec, GstElement, GST_TYPE_ELEMENT); + +static gboolean opus_dec_sink_event (GstPad * pad, GstEvent * event); +static GstFlowReturn opus_dec_chain (GstPad * pad, GstBuffer * buf); +static gboolean opus_dec_sink_setcaps (GstPad * pad, GstCaps * caps); +static GstStateChangeReturn opus_dec_change_state (GstElement * element, + GstStateChange transition); + +static gboolean opus_dec_src_event (GstPad * pad, GstEvent * event); +static gboolean opus_dec_src_query (GstPad * pad, GstQuery * query); +static gboolean opus_dec_sink_query (GstPad * pad, GstQuery * query); +static const GstQueryType *opus_get_src_query_types (GstPad * pad); +static const GstQueryType *opus_get_sink_query_types (GstPad * pad); +static gboolean opus_dec_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value); + +static GstFlowReturn opus_dec_chain_parse_data (GstOpusDec * dec, + GstBuffer * buf, GstClockTime timestamp, GstClockTime duration); +static GstFlowReturn opus_dec_chain_parse_header (GstOpusDec * dec, + GstBuffer * buf); +#if 0 +static GstFlowReturn opus_dec_chain_parse_comments (GstOpusDec * dec, + GstBuffer * buf); +#endif + +static void +gst_opus_dec_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&opus_dec_src_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&opus_dec_sink_factory)); + gst_element_class_set_details_simple (element_class, "Opus audio decoder", + "Codec/Decoder/Audio", + "decode opus streams to audio", + "Sebastian Dröge "); +} + +static void +gst_opus_dec_class_init (GstOpusDecClass * klass) +{ + GstElementClass *gstelement_class; + + gstelement_class = (GstElementClass *) klass; + + gstelement_class->change_state = GST_DEBUG_FUNCPTR (opus_dec_change_state); + + GST_DEBUG_CATEGORY_INIT (opusdec_debug, "opusdec", 0, + "opus decoding element"); +} + +static void +gst_opus_dec_reset (GstOpusDec * dec) +{ + gst_segment_init (&dec->segment, GST_FORMAT_UNDEFINED); + dec->granulepos = -1; + dec->packetno = 0; + dec->frame_size = 0; + dec->frame_samples = 960; + dec->frame_duration = 0; + if (dec->state) { + opus_decoder_destroy (dec->state); + dec->state = NULL; + } +#if 0 + if (dec->mode) { + opus_mode_destroy (dec->mode); + dec->mode = NULL; + } +#endif + + gst_buffer_replace (&dec->streamheader, NULL); + gst_buffer_replace (&dec->vorbiscomment, NULL); + g_list_foreach (dec->extra_headers, (GFunc) gst_mini_object_unref, NULL); + g_list_free (dec->extra_headers); + dec->extra_headers = NULL; + +#if 0 + memset (&dec->header, 0, sizeof (dec->header)); +#endif +} + +static void +gst_opus_dec_init (GstOpusDec * dec, GstOpusDecClass * g_class) +{ + dec->sinkpad = + gst_pad_new_from_static_template (&opus_dec_sink_factory, "sink"); + gst_pad_set_chain_function (dec->sinkpad, GST_DEBUG_FUNCPTR (opus_dec_chain)); + gst_pad_set_event_function (dec->sinkpad, + GST_DEBUG_FUNCPTR (opus_dec_sink_event)); + gst_pad_set_query_type_function (dec->sinkpad, + GST_DEBUG_FUNCPTR (opus_get_sink_query_types)); + gst_pad_set_query_function (dec->sinkpad, + GST_DEBUG_FUNCPTR (opus_dec_sink_query)); + gst_pad_set_setcaps_function (dec->sinkpad, + GST_DEBUG_FUNCPTR (opus_dec_sink_setcaps)); + gst_element_add_pad (GST_ELEMENT (dec), dec->sinkpad); + + dec->srcpad = gst_pad_new_from_static_template (&opus_dec_src_factory, "src"); + gst_pad_use_fixed_caps (dec->srcpad); + gst_pad_set_event_function (dec->srcpad, + GST_DEBUG_FUNCPTR (opus_dec_src_event)); + gst_pad_set_query_type_function (dec->srcpad, + GST_DEBUG_FUNCPTR (opus_get_src_query_types)); + gst_pad_set_query_function (dec->srcpad, + GST_DEBUG_FUNCPTR (opus_dec_src_query)); + gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad); + + dec->sample_rate = 48000; + dec->n_channels = 2; + + gst_opus_dec_reset (dec); +} + +static gboolean +opus_dec_sink_setcaps (GstPad * pad, GstCaps * caps) +{ + GstOpusDec *dec = GST_OPUS_DEC (gst_pad_get_parent (pad)); + gboolean ret = TRUE; + GstStructure *s; + const GValue *streamheader; + + s = gst_caps_get_structure (caps, 0); + if ((streamheader = gst_structure_get_value (s, "streamheader")) && + G_VALUE_HOLDS (streamheader, GST_TYPE_ARRAY) && + gst_value_array_get_size (streamheader) >= 2) { + const GValue *header; + GstBuffer *buf; + GstFlowReturn res = GST_FLOW_OK; + + header = gst_value_array_get_value (streamheader, 0); + if (header && G_VALUE_HOLDS (header, GST_TYPE_BUFFER)) { + buf = gst_value_get_buffer (header); + res = opus_dec_chain_parse_header (dec, buf); + if (res != GST_FLOW_OK) + goto done; + gst_buffer_replace (&dec->streamheader, buf); + } +#if 0 + vorbiscomment = gst_value_array_get_value (streamheader, 1); + if (vorbiscomment && G_VALUE_HOLDS (vorbiscomment, GST_TYPE_BUFFER)) { + buf = gst_value_get_buffer (vorbiscomment); + res = opus_dec_chain_parse_comments (dec, buf); + if (res != GST_FLOW_OK) + goto done; + gst_buffer_replace (&dec->vorbiscomment, buf); + } +#endif + + g_list_foreach (dec->extra_headers, (GFunc) gst_mini_object_unref, NULL); + g_list_free (dec->extra_headers); + dec->extra_headers = NULL; + + if (gst_value_array_get_size (streamheader) > 2) { + gint i, n; + + n = gst_value_array_get_size (streamheader); + for (i = 2; i < n; i++) { + header = gst_value_array_get_value (streamheader, i); + buf = gst_value_get_buffer (header); + dec->extra_headers = + g_list_prepend (dec->extra_headers, gst_buffer_ref (buf)); + } + } + } + +done: + gst_object_unref (dec); + return ret; +} + +static gboolean +opus_dec_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstOpusDec *dec; + guint64 scale = 1; + + dec = GST_OPUS_DEC (gst_pad_get_parent (pad)); + + if (dec->packetno < 1) { + res = FALSE; + goto cleanup; + } + + if (src_format == *dest_format) { + *dest_value = src_value; + res = TRUE; + goto cleanup; + } + + if (pad == dec->sinkpad && + (src_format == GST_FORMAT_BYTES || *dest_format == GST_FORMAT_BYTES)) { + res = FALSE; + goto cleanup; + } + + switch (src_format) { + case GST_FORMAT_TIME: + switch (*dest_format) { + case GST_FORMAT_BYTES: + scale = sizeof (gint16) * dec->n_channels; + case GST_FORMAT_DEFAULT: + *dest_value = + gst_util_uint64_scale_int (scale * src_value, + dec->sample_rate, GST_SECOND); + break; + default: + res = FALSE; + break; + } + break; + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_BYTES: + *dest_value = src_value * sizeof (gint16) * dec->n_channels; + break; + case GST_FORMAT_TIME: + *dest_value = + gst_util_uint64_scale_int (src_value, GST_SECOND, + dec->sample_rate); + break; + default: + res = FALSE; + break; + } + break; + case GST_FORMAT_BYTES: + switch (*dest_format) { + case GST_FORMAT_DEFAULT: + *dest_value = src_value / (sizeof (gint16) * dec->n_channels); + break; + case GST_FORMAT_TIME: + *dest_value = gst_util_uint64_scale_int (src_value, GST_SECOND, + dec->sample_rate * sizeof (gint16) * dec->n_channels); + break; + default: + res = FALSE; + break; + } + break; + default: + res = FALSE; + break; + } + +cleanup: + gst_object_unref (dec); + return res; +} + +static const GstQueryType * +opus_get_sink_query_types (GstPad * pad) +{ + static const GstQueryType opus_dec_sink_query_types[] = { + GST_QUERY_CONVERT, + 0 + }; + + return opus_dec_sink_query_types; +} + +static gboolean +opus_dec_sink_query (GstPad * pad, GstQuery * query) +{ + GstOpusDec *dec; + gboolean res; + + dec = GST_OPUS_DEC (gst_pad_get_parent (pad)); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = opus_dec_convert (pad, src_fmt, src_val, &dest_fmt, &dest_val); + if (res) { + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + } + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } + + gst_object_unref (dec); + return res; +} + +static const GstQueryType * +opus_get_src_query_types (GstPad * pad) +{ + static const GstQueryType opus_dec_src_query_types[] = { + GST_QUERY_POSITION, + GST_QUERY_DURATION, + 0 + }; + + return opus_dec_src_query_types; +} + +static gboolean +opus_dec_src_query (GstPad * pad, GstQuery * query) +{ + GstOpusDec *dec; + gboolean res = FALSE; + + dec = GST_OPUS_DEC (gst_pad_get_parent (pad)); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_POSITION:{ + GstSegment segment; + GstFormat format; + gint64 cur; + + gst_query_parse_position (query, &format, NULL); + + GST_PAD_STREAM_LOCK (dec->sinkpad); + segment = dec->segment; + GST_PAD_STREAM_UNLOCK (dec->sinkpad); + + if (segment.format != GST_FORMAT_TIME) { + GST_DEBUG_OBJECT (dec, "segment not initialised yet"); + break; + } + + if ((res = opus_dec_convert (dec->srcpad, GST_FORMAT_TIME, + segment.last_stop, &format, &cur))) { + gst_query_set_position (query, format, cur); + } + break; + } + case GST_QUERY_DURATION:{ + GstFormat format = GST_FORMAT_TIME; + gint64 dur; + + /* get duration from demuxer */ + if (!gst_pad_query_peer_duration (dec->sinkpad, &format, &dur)) + break; + + gst_query_parse_duration (query, &format, NULL); + + /* and convert it into the requested format */ + if ((res = opus_dec_convert (dec->srcpad, GST_FORMAT_TIME, + dur, &format, &dur))) { + gst_query_set_duration (query, format, dur); + } + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } + + gst_object_unref (dec); + return res; +} + +static gboolean +opus_dec_src_event (GstPad * pad, GstEvent * event) +{ + gboolean res = FALSE; + GstOpusDec *dec = GST_OPUS_DEC (gst_pad_get_parent (pad)); + + GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_SEEK:{ + GstFormat format, tformat; + gdouble rate; + GstEvent *real_seek; + GstSeekFlags flags; + GstSeekType cur_type, stop_type; + gint64 cur, stop; + gint64 tcur, tstop; + + gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, &cur, + &stop_type, &stop); + + /* we have to ask our peer to seek to time here as we know + * nothing about how to generate a granulepos from the src + * formats or anything. + * + * First bring the requested format to time + */ + tformat = GST_FORMAT_TIME; + if (!(res = opus_dec_convert (pad, format, cur, &tformat, &tcur))) + break; + if (!(res = opus_dec_convert (pad, format, stop, &tformat, &tstop))) + break; + + /* then seek with time on the peer */ + real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME, + flags, cur_type, tcur, stop_type, tstop); + + GST_LOG_OBJECT (dec, "seek to %" GST_TIME_FORMAT, GST_TIME_ARGS (tcur)); + + res = gst_pad_push_event (dec->sinkpad, real_seek); + gst_event_unref (event); + break; + } + default: + res = gst_pad_event_default (pad, event); + break; + } + + gst_object_unref (dec); + return res; +} + +static gboolean +opus_dec_sink_event (GstPad * pad, GstEvent * event) +{ + GstOpusDec *dec; + gboolean ret = FALSE; + + dec = GST_OPUS_DEC (gst_pad_get_parent (pad)); + + GST_LOG_OBJECT (dec, "handling %s event", GST_EVENT_TYPE_NAME (event)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_NEWSEGMENT:{ + GstFormat format; + gdouble rate, arate; + gint64 start, stop, time; + gboolean update; + + gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format, + &start, &stop, &time); + + if (format != GST_FORMAT_TIME) + goto newseg_wrong_format; + + if (rate <= 0.0) + goto newseg_wrong_rate; + + if (update) { + /* time progressed without data, see if we can fill the gap with + * some concealment data */ + if (dec->segment.last_stop < start) { + GstClockTime duration; + + duration = start - dec->segment.last_stop; + opus_dec_chain_parse_data (dec, NULL, dec->segment.last_stop, + duration); + } + } + + /* now configure the values */ + gst_segment_set_newsegment_full (&dec->segment, update, + rate, arate, GST_FORMAT_TIME, start, stop, time); + + dec->granulepos = -1; + + GST_DEBUG_OBJECT (dec, "segment now: cur = %" GST_TIME_FORMAT " [%" + GST_TIME_FORMAT " - %" GST_TIME_FORMAT "]", + GST_TIME_ARGS (dec->segment.last_stop), + GST_TIME_ARGS (dec->segment.start), + GST_TIME_ARGS (dec->segment.stop)); + + ret = gst_pad_push_event (dec->srcpad, event); + break; + } + default: + ret = gst_pad_event_default (pad, event); + break; + } + + gst_object_unref (dec); + return ret; + + /* ERRORS */ +newseg_wrong_format: + { + GST_DEBUG_OBJECT (dec, "received non TIME newsegment"); + gst_object_unref (dec); + return FALSE; + } +newseg_wrong_rate: + { + GST_DEBUG_OBJECT (dec, "negative rates not supported yet"); + gst_object_unref (dec); + return FALSE; + } +} + +static GstFlowReturn +opus_dec_chain_parse_header (GstOpusDec * dec, GstBuffer * buf) +{ + GstCaps *caps; + //gint error = OPUS_OK; + +#if 0 + dec->samples_per_frame = opus_packet_get_samples_per_frame ( + (const unsigned char *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf)); +#endif + +#if 0 + if (memcmp (dec->header.codec_id, "OPUS ", 8) != 0) + goto invalid_header; +#endif + +#if 0 +#ifdef HAVE_OPUS_0_7 + dec->mode = + opus_mode_create (dec->sample_rate, dec->header.frame_size, &error); +#else + dec->mode = + opus_mode_create (dec->sample_rate, dec->header.nb_channels, + dec->header.frame_size, &error); +#endif + if (!dec->mode) + goto mode_init_failed; + + /* initialize the decoder */ +#ifdef HAVE_OPUS_0_11 + dec->state = + opus_decoder_create_custom (dec->mode, dec->header.nb_channels, &error); +#else +#ifdef HAVE_OPUS_0_7 + dec->state = opus_decoder_create (dec->mode, dec->header.nb_channels, &error); +#else + dec->state = opus_decoder_create (dec->mode); +#endif +#endif +#endif + dec->state = opus_decoder_create (dec->sample_rate, dec->n_channels); + if (!dec->state) + goto init_failed; + +#if 0 +#ifdef HAVE_OPUS_0_8 + dec->frame_size = dec->header.frame_size; +#else + opus_mode_info (dec->mode, OPUS_GET_FRAME_SIZE, &dec->frame_size); +#endif +#endif + + dec->frame_duration = gst_util_uint64_scale_int (dec->frame_size, + GST_SECOND, dec->sample_rate); + + /* set caps */ + caps = gst_caps_new_simple ("audio/x-raw-int", + "rate", G_TYPE_INT, dec->sample_rate, + "channels", G_TYPE_INT, dec->n_channels, + "signed", G_TYPE_BOOLEAN, TRUE, + "endianness", G_TYPE_INT, G_BYTE_ORDER, + "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL); + + GST_DEBUG_OBJECT (dec, "rate=%d channels=%d frame-size=%d", + dec->sample_rate, dec->n_channels, dec->frame_size); + + if (!gst_pad_set_caps (dec->srcpad, caps)) + goto nego_failed; + + gst_caps_unref (caps); + return GST_FLOW_OK; + + /* ERRORS */ +#if 0 +invalid_header: + { + GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, + (NULL), ("Invalid header")); + return GST_FLOW_ERROR; + } +mode_init_failed: + { + GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, + (NULL), ("Mode initialization failed: %d", error)); + return GST_FLOW_ERROR; + } +#endif +init_failed: + { + GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, + (NULL), ("couldn't initialize decoder")); + return GST_FLOW_ERROR; + } +nego_failed: + { + GST_ELEMENT_ERROR (GST_ELEMENT (dec), STREAM, DECODE, + (NULL), ("couldn't negotiate format")); + gst_caps_unref (caps); + return GST_FLOW_NOT_NEGOTIATED; + } +} + +#if 0 +static GstFlowReturn +opus_dec_chain_parse_comments (GstOpusDec * dec, GstBuffer * buf) +{ + GstTagList *list; + gchar *encoder = NULL; + + list = gst_tag_list_from_vorbiscomment_buffer (buf, NULL, 0, &encoder); + + if (!list) { + GST_WARNING_OBJECT (dec, "couldn't decode comments"); + list = gst_tag_list_new (); + } + + if (encoder) { + gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, + GST_TAG_ENCODER, encoder, NULL); + } + + gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, + GST_TAG_AUDIO_CODEC, "Opus", NULL); + + if (dec->header.bytes_per_packet > 0) { + gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, + GST_TAG_BITRATE, (guint) dec->header.bytes_per_packet * 8, NULL); + } + + GST_INFO_OBJECT (dec, "tags: %" GST_PTR_FORMAT, list); + + gst_element_found_tags_for_pad (GST_ELEMENT (dec), dec->srcpad, list); + + g_free (encoder); + g_free (ver); + + return GST_FLOW_OK; +} +#endif + +static GstFlowReturn +opus_dec_chain_parse_data (GstOpusDec * dec, GstBuffer * buf, + GstClockTime timestamp, GstClockTime duration) +{ + GstFlowReturn res = GST_FLOW_OK; + gint size; + guint8 *data; + GstBuffer *outbuf; + gint16 *out_data; + int n; + + if (timestamp != -1) { + dec->segment.last_stop = timestamp; + dec->granulepos = -1; + } + + if (dec->state == NULL) { + GstCaps *caps; + + dec->state = opus_decoder_create (dec->sample_rate, dec->n_channels); + + /* set caps */ + caps = gst_caps_new_simple ("audio/x-raw-int", + "rate", G_TYPE_INT, dec->sample_rate, + "channels", G_TYPE_INT, dec->n_channels, + "signed", G_TYPE_BOOLEAN, TRUE, + "endianness", G_TYPE_INT, G_BYTE_ORDER, + "width", G_TYPE_INT, 16, "depth", G_TYPE_INT, 16, NULL); + + GST_DEBUG_OBJECT (dec, "rate=%d channels=%d frame-size=%d", + dec->sample_rate, dec->n_channels, dec->frame_size); + + if (!gst_pad_set_caps (dec->srcpad, caps)) + GST_ERROR ("nego failure"); + + gst_caps_unref (caps); + } + + if (buf) { + data = GST_BUFFER_DATA (buf); + size = GST_BUFFER_SIZE (buf); + + GST_DEBUG_OBJECT (dec, "received buffer of size %u", size); + + /* copy timestamp */ + } else { + /* concealment data, pass NULL as the bits parameters */ + GST_DEBUG_OBJECT (dec, "creating concealment data"); + data = NULL; + size = 0; + } + + GST_DEBUG ("bandwidth %d", opus_packet_get_bandwidth (data)); + GST_DEBUG ("samples_per_frame %d", opus_packet_get_samples_per_frame (data, + 48000)); + GST_DEBUG ("channels %d", opus_packet_get_nb_channels (data)); + + res = gst_pad_alloc_buffer_and_set_caps (dec->srcpad, + GST_BUFFER_OFFSET_NONE, dec->frame_samples * dec->n_channels * 2, + GST_PAD_CAPS (dec->srcpad), &outbuf); + + if (res != GST_FLOW_OK) { + GST_DEBUG_OBJECT (dec, "buf alloc flow: %s", gst_flow_get_name (res)); + return res; + } + + out_data = (gint16 *) GST_BUFFER_DATA (outbuf); + + GST_LOG_OBJECT (dec, "decoding frame"); + + n = opus_decode (dec->state, data, size, out_data, dec->frame_samples, TRUE); + if (n < 0) { + GST_ELEMENT_ERROR (dec, STREAM, DECODE, ("Decoding error: %d", n), (NULL)); + return GST_FLOW_ERROR; + } + + if (!GST_CLOCK_TIME_IS_VALID (timestamp)) { + timestamp = gst_util_uint64_scale_int (dec->granulepos - dec->frame_size, + GST_SECOND, dec->sample_rate); + } + + GST_DEBUG_OBJECT (dec, "timestamp=%" GST_TIME_FORMAT, + GST_TIME_ARGS (timestamp)); + + GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (buf); + GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (buf); + if (dec->discont) { + GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DISCONT); + dec->discont = 0; + } + + dec->segment.last_stop += dec->frame_duration; + + GST_LOG_OBJECT (dec, "pushing buffer with ts=%" GST_TIME_FORMAT ", dur=%" + GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), + GST_TIME_ARGS (dec->frame_duration)); + + res = gst_pad_push (dec->srcpad, outbuf); + + if (res != GST_FLOW_OK) + GST_DEBUG_OBJECT (dec, "flow: %s", gst_flow_get_name (res)); + + return res; +} + +static GstFlowReturn +opus_dec_chain (GstPad * pad, GstBuffer * buf) +{ + GstFlowReturn res; + GstOpusDec *dec; + + dec = GST_OPUS_DEC (gst_pad_get_parent (pad)); + + if (GST_BUFFER_IS_DISCONT (buf)) { + dec->discont = TRUE; + } + + res = opus_dec_chain_parse_data (dec, buf, GST_BUFFER_TIMESTAMP (buf), + GST_BUFFER_DURATION (buf)); + +//done: + dec->packetno++; + + gst_buffer_unref (buf); + gst_object_unref (dec); + + return res; +} + +static GstStateChangeReturn +opus_dec_change_state (GstElement * element, GstStateChange transition) +{ + GstStateChangeReturn ret; + GstOpusDec *dec = GST_OPUS_DEC (element); + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + case GST_STATE_CHANGE_READY_TO_PAUSED: + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + default: + break; + } + + ret = parent_class->change_state (element, transition); + if (ret != GST_STATE_CHANGE_SUCCESS) + return ret; + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_opus_dec_reset (dec); + break; + case GST_STATE_CHANGE_READY_TO_NULL: + break; + default: + break; + } + + return ret; +} diff --git a/ext/opus/gstopusdec.h b/ext/opus/gstopusdec.h new file mode 100644 index 0000000000..886a907532 --- /dev/null +++ b/ext/opus/gstopusdec.h @@ -0,0 +1,77 @@ +/* GStreamer + * Copyright (C) <1999> Erik Walthinsen + * Copyright (C) <2008> Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_OPUS_DEC_H__ +#define __GST_OPUS_DEC_H__ + +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_OPUS_DEC \ + (gst_opus_dec_get_type()) +#define GST_OPUS_DEC(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPUS_DEC,GstOpusDec)) +#define GST_OPUS_DEC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPUS_DEC,GstOpusDecClass)) +#define GST_IS_OPUS_DEC(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPUS_DEC)) +#define GST_IS_OPUS_DEC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPUS_DEC)) + +typedef struct _GstOpusDec GstOpusDec; +typedef struct _GstOpusDecClass GstOpusDecClass; + +struct _GstOpusDec { + GstElement element; + + /* pads */ + GstPad *sinkpad; + GstPad *srcpad; + + OpusDecoder *state; + int frame_samples; + + gint frame_size; + GstClockTime frame_duration; + guint64 packetno; + + GstSegment segment; /* STREAM LOCK */ + gint64 granulepos; /* -1 = needs to be set from current time */ + gboolean discont; + + GstBuffer *streamheader; + GstBuffer *vorbiscomment; + GList *extra_headers; + + int sample_rate; + int n_channels; +}; + +struct _GstOpusDecClass { + GstElementClass parent_class; +}; + +GType gst_opus_dec_get_type (void); + +G_END_DECLS + +#endif /* __GST_OPUS_DEC_H__ */ diff --git a/ext/opus/gstopusenc.c b/ext/opus/gstopusenc.c new file mode 100644 index 0000000000..db57ff75d1 --- /dev/null +++ b/ext/opus/gstopusenc.c @@ -0,0 +1,1198 @@ +/* GStreamer Opus Encoder + * Copyright (C) <1999> Erik Walthinsen + * Copyright (C) <2008> Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/* + * Based on the speexenc element + */ + +/** + * SECTION:element-opusenc + * @see_also: opusdec, oggmux + * + * This element encodes raw audio to OPUS. + * + * + * Example pipelines + * |[ + * gst-launch -v audiotestsrc wave=sine num-buffers=100 ! audioconvert ! opusenc ! oggmux ! filesink location=sine.ogg + * ]| Encode a test sine signal to Ogg/OPUS. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif +#include +#include +#include +#include +#include + +#include +#include +#include +#include "gstopusenc.h" + +GST_DEBUG_CATEGORY_STATIC (opusenc_debug); +#define GST_CAT_DEFAULT opusenc_debug + +#define GST_OPUS_ENC_TYPE_BANDWIDTH (gst_opus_enc_bandwidth_get_type()) +static GType +gst_opus_enc_bandwidth_get_type (void) +{ + static const GEnumValue values[] = { + {OPUS_BANDWIDTH_NARROWBAND, "Narrow band", "narrowband"}, + {OPUS_BANDWIDTH_MEDIUMBAND, "Medium band", "mediumband"}, + {OPUS_BANDWIDTH_WIDEBAND, "Wide band", "wideband"}, + {OPUS_BANDWIDTH_SUPERWIDEBAND, "Super wide band", "superwideband"}, + {OPUS_BANDWIDTH_FULLBAND, "Full band", "fullband"}, + {OPUS_BANDWIDTH_AUTO, "Auto", "auto"}, + {0, NULL, NULL} + }; + static volatile GType id = 0; + + if (g_once_init_enter ((gsize *) & id)) { + GType _id; + + _id = g_enum_register_static ("GstOpusEncBandwidth", values); + + g_once_init_leave ((gsize *) & id, _id); + } + + return id; +} + +static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("audio/x-raw-int, " + "rate = (int) { 8000, 12000, 16000, 24000, 48000 }, " + "channels = (int) [ 1, 2 ], " + "endianness = (int) BYTE_ORDER, " + "signed = (boolean) TRUE, " "width = (int) 16, " "depth = (int) 16") + ); + +static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("audio/x-opus, " + "rate = (int) { 8000, 12000, 16000, 24000, 48000 }, " + "channels = (int) [ 1, 2 ], " "frame-size = (int) [ 2, 60 ]") + ); + +#define DEFAULT_AUDIO TRUE +#define DEFAULT_BITRATE 64000 +#define DEFAULT_BANDWIDTH OPUS_BANDWIDTH_FULLBAND +#define DEFAULT_FRAMESIZE 20 +#define DEFAULT_CBR TRUE +#define DEFAULT_CONSTRAINED_VBR TRUE +#define DEFAULT_COMPLEXITY 10 +#define DEFAULT_INBAND_FEC FALSE +#define DEFAULT_DTX FALSE +#define DEFAULT_PACKET_LOSS_PERCENT 0 + +enum +{ + PROP_0, + PROP_AUDIO, + PROP_BITRATE, + PROP_BANDWIDTH, + PROP_FRAME_SIZE, + PROP_CBR, + PROP_CONSTRAINED_VBR, + PROP_COMPLEXITY, + PROP_INBAND_FEC, + PROP_DTX, + PROP_PACKET_LOSS_PERCENT +}; + +static void gst_opus_enc_finalize (GObject * object); + +static gboolean gst_opus_enc_sinkevent (GstPad * pad, GstEvent * event); +static GstFlowReturn gst_opus_enc_chain (GstPad * pad, GstBuffer * buf); +static gboolean gst_opus_enc_setup (GstOpusEnc * enc); + +static void gst_opus_enc_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); +static void gst_opus_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static GstStateChangeReturn gst_opus_enc_change_state (GstElement * element, + GstStateChange transition); + +static GstFlowReturn gst_opus_enc_encode (GstOpusEnc * enc, gboolean flush); + +static void +gst_opus_enc_setup_interfaces (GType opusenc_type) +{ + static const GInterfaceInfo tag_setter_info = { NULL, NULL, NULL }; + const GInterfaceInfo preset_interface_info = { + NULL, /* interface_init */ + NULL, /* interface_finalize */ + NULL /* interface_data */ + }; + + g_type_add_interface_static (opusenc_type, GST_TYPE_TAG_SETTER, + &tag_setter_info); + g_type_add_interface_static (opusenc_type, GST_TYPE_PRESET, + &preset_interface_info); + + GST_DEBUG_CATEGORY_INIT (opusenc_debug, "opusenc", 0, "Opus encoder"); +} + +GST_BOILERPLATE_FULL (GstOpusEnc, gst_opus_enc, GstElement, GST_TYPE_ELEMENT, + gst_opus_enc_setup_interfaces); + +static void +gst_opus_enc_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_factory)); + gst_element_class_set_details_simple (element_class, "Opus audio encoder", + "Codec/Encoder/Audio", + "Encodes audio in Opus format", + "Sebastian Dröge "); +} + +static void +gst_opus_enc_class_init (GstOpusEncClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + + gobject_class->set_property = gst_opus_enc_set_property; + gobject_class->get_property = gst_opus_enc_get_property; + + g_object_class_install_property (gobject_class, PROP_AUDIO, + g_param_spec_boolean ("audio", "Audio or voice", + "Audio or voice", DEFAULT_AUDIO, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_BITRATE, + g_param_spec_int ("bitrate", "Encoding Bit-rate", + "Specify an encoding bit-rate (in bps).", + 1, 320000, DEFAULT_BITRATE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_BANDWIDTH, + g_param_spec_enum ("bandwidth", "Band Width", + "Audio Band Width", GST_OPUS_ENC_TYPE_BANDWIDTH, DEFAULT_BANDWIDTH, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_FRAME_SIZE, + g_param_spec_int ("frame-size", "Frame Size", + "The duration of an audio frame, in ms", 2, 60, DEFAULT_FRAMESIZE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_CBR, + g_param_spec_boolean ("cbr", "Constant bit rate", + "Constant bit rate", DEFAULT_CBR, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_CONSTRAINED_VBR, + g_param_spec_boolean ("constrained-cbr", "Constrained VBR", + "Constrained VBR", DEFAULT_CONSTRAINED_VBR, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_COMPLEXITY, + g_param_spec_int ("complexity", "Complexity", + "Complexity", 0, 10, DEFAULT_COMPLEXITY, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_INBAND_FEC, + g_param_spec_boolean ("inband-fec", "In-band FEC", + "Enable forward error correction", DEFAULT_INBAND_FEC, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_DTX, + g_param_spec_boolean ("dtx", "DTX", + "DTX", DEFAULT_DTX, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (G_OBJECT_CLASS (klass), + PROP_PACKET_LOSS_PERCENT, g_param_spec_int ("packet-loss-percentage", + "Loss percentage", "Packet loss percentage", 0, 100, + DEFAULT_PACKET_LOSS_PERCENT, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_opus_enc_finalize); + + gstelement_class->change_state = + GST_DEBUG_FUNCPTR (gst_opus_enc_change_state); +} + +static void +gst_opus_enc_finalize (GObject * object) +{ + GstOpusEnc *enc; + + enc = GST_OPUS_ENC (object); + + g_object_unref (enc->adapter); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static gboolean +gst_opus_enc_sink_setcaps (GstPad * pad, GstCaps * caps) +{ + GstOpusEnc *enc; + GstStructure *structure; + GstCaps *otherpadcaps; + + enc = GST_OPUS_ENC (GST_PAD_PARENT (pad)); + enc->setup = FALSE; + enc->frame_size = DEFAULT_FRAMESIZE; + otherpadcaps = gst_pad_get_allowed_caps (pad); + + structure = gst_caps_get_structure (caps, 0); + gst_structure_get_int (structure, "channels", &enc->n_channels); + gst_structure_get_int (structure, "rate", &enc->sample_rate); + + if (otherpadcaps) { + if (!gst_caps_is_empty (otherpadcaps)) { + GstStructure *ps = gst_caps_get_structure (otherpadcaps, 0); + gst_structure_get_int (ps, "frame-size", &enc->frame_size); + } + gst_caps_unref (otherpadcaps); + } + + GST_ERROR_OBJECT (pad, "channels=%d rate=%d frame-size=%d", + enc->n_channels, enc->sample_rate, enc->frame_size); + switch (enc->frame_size) { + case 2: + enc->frame_samples = enc->sample_rate / 400; + break; + case 5: + enc->frame_samples = enc->sample_rate / 200; + break; + case 10: + enc->frame_samples = enc->sample_rate / 100; + break; + case 20: + enc->frame_samples = enc->sample_rate / 50; + break; + case 40: + enc->frame_samples = enc->sample_rate / 20; + break; + case 60: + enc->frame_samples = 3 * enc->sample_rate / 50; + break; + default: + return FALSE; + break; + } + GST_ERROR ("frame_samples %d", enc->frame_samples); + + gst_opus_enc_setup (enc); + + return TRUE; +} + + +static GstCaps * +gst_opus_enc_sink_getcaps (GstPad * pad) +{ + GstCaps *caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); + GstCaps *peercaps = NULL; + GstOpusEnc *enc = GST_OPUS_ENC (gst_pad_get_parent_element (pad)); + + peercaps = gst_pad_peer_get_caps (enc->srcpad); + + if (peercaps) { + if (!gst_caps_is_empty (peercaps) && !gst_caps_is_any (peercaps)) { + GstStructure *ps = gst_caps_get_structure (peercaps, 0); + GstStructure *s = gst_caps_get_structure (caps, 0); + gint rate, channels; + + if (gst_structure_get_int (ps, "rate", &rate)) { + gst_structure_fixate_field_nearest_int (s, "rate", rate); + } + + if (gst_structure_get_int (ps, "channels", &channels)) { + gst_structure_fixate_field_nearest_int (s, "channels", channels); + } + } + gst_caps_unref (peercaps); + } + + gst_object_unref (enc); + + return caps; +} + + +static gboolean +gst_opus_enc_convert_src (GstPad * pad, GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstOpusEnc *enc; + gint64 avg; + + enc = GST_OPUS_ENC (GST_PAD_PARENT (pad)); + + if (enc->samples_in == 0 || enc->bytes_out == 0 || enc->sample_rate == 0) + return FALSE; + + avg = (enc->bytes_out * enc->sample_rate) / (enc->samples_in); + + switch (src_format) { + case GST_FORMAT_BYTES: + switch (*dest_format) { + case GST_FORMAT_TIME: + *dest_value = src_value * GST_SECOND / avg; + break; + default: + res = FALSE; + } + break; + case GST_FORMAT_TIME: + switch (*dest_format) { + case GST_FORMAT_BYTES: + *dest_value = src_value * avg / GST_SECOND; + break; + default: + res = FALSE; + } + break; + default: + res = FALSE; + } + return res; +} + +static gboolean +gst_opus_enc_convert_sink (GstPad * pad, GstFormat src_format, + gint64 src_value, GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + guint scale = 1; + gint bytes_per_sample; + GstOpusEnc *enc; + + enc = GST_OPUS_ENC (GST_PAD_PARENT (pad)); + + bytes_per_sample = enc->n_channels * 2; + + switch (src_format) { + case GST_FORMAT_BYTES: + switch (*dest_format) { + case GST_FORMAT_DEFAULT: + if (bytes_per_sample == 0) + return FALSE; + *dest_value = src_value / bytes_per_sample; + break; + case GST_FORMAT_TIME: + { + gint byterate = bytes_per_sample * enc->sample_rate; + + if (byterate == 0) + return FALSE; + *dest_value = src_value * GST_SECOND / byterate; + break; + } + default: + res = FALSE; + } + break; + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_BYTES: + *dest_value = src_value * bytes_per_sample; + break; + case GST_FORMAT_TIME: + if (enc->sample_rate == 0) + return FALSE; + *dest_value = src_value * GST_SECOND / enc->sample_rate; + break; + default: + res = FALSE; + } + break; + case GST_FORMAT_TIME: + switch (*dest_format) { + case GST_FORMAT_BYTES: + scale = bytes_per_sample; + /* fallthrough */ + case GST_FORMAT_DEFAULT: + *dest_value = src_value * scale * enc->sample_rate / GST_SECOND; + break; + default: + res = FALSE; + } + break; + default: + res = FALSE; + } + return res; +} + +static gint64 +gst_opus_enc_get_latency (GstOpusEnc * enc) +{ + return gst_util_uint64_scale (enc->frame_samples, GST_SECOND, + enc->sample_rate); +} + +static const GstQueryType * +gst_opus_enc_get_query_types (GstPad * pad) +{ + static const GstQueryType gst_opus_enc_src_query_types[] = { + GST_QUERY_POSITION, + GST_QUERY_DURATION, + GST_QUERY_CONVERT, + GST_QUERY_LATENCY, + 0 + }; + + return gst_opus_enc_src_query_types; +} + +static gboolean +gst_opus_enc_src_query (GstPad * pad, GstQuery * query) +{ + gboolean res = TRUE; + GstOpusEnc *enc; + + enc = GST_OPUS_ENC (gst_pad_get_parent (pad)); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_POSITION: + { + GstFormat fmt, req_fmt; + gint64 pos, val; + + gst_query_parse_position (query, &req_fmt, NULL); + if ((res = gst_pad_query_peer_position (enc->sinkpad, &req_fmt, &val))) { + gst_query_set_position (query, req_fmt, val); + break; + } + + fmt = GST_FORMAT_TIME; + if (!(res = gst_pad_query_peer_position (enc->sinkpad, &fmt, &pos))) + break; + + if ((res = + gst_pad_query_peer_convert (enc->sinkpad, fmt, pos, &req_fmt, + &val))) + gst_query_set_position (query, req_fmt, val); + + break; + } + case GST_QUERY_DURATION: + { + GstFormat fmt, req_fmt; + gint64 dur, val; + + gst_query_parse_duration (query, &req_fmt, NULL); + if ((res = gst_pad_query_peer_duration (enc->sinkpad, &req_fmt, &val))) { + gst_query_set_duration (query, req_fmt, val); + break; + } + + fmt = GST_FORMAT_TIME; + if (!(res = gst_pad_query_peer_duration (enc->sinkpad, &fmt, &dur))) + break; + + if ((res = + gst_pad_query_peer_convert (enc->sinkpad, fmt, dur, &req_fmt, + &val))) { + gst_query_set_duration (query, req_fmt, val); + } + break; + } + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + if (!(res = gst_opus_enc_convert_src (pad, src_fmt, src_val, &dest_fmt, + &dest_val))) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + case GST_QUERY_LATENCY: + { + gboolean live; + GstClockTime min_latency, max_latency; + gint64 latency; + + if ((res = gst_pad_peer_query (pad, query))) { + gst_query_parse_latency (query, &live, &min_latency, &max_latency); + + latency = gst_opus_enc_get_latency (enc); + + /* add our latency */ + min_latency += latency; + if (max_latency != -1) + max_latency += latency; + + gst_query_set_latency (query, live, min_latency, max_latency); + } + break; + } + default: + res = gst_pad_peer_query (pad, query); + break; + } + +error: + + gst_object_unref (enc); + + return res; +} + +static gboolean +gst_opus_enc_sink_query (GstPad * pad, GstQuery * query) +{ + gboolean res = TRUE; + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + if (!(res = + gst_opus_enc_convert_sink (pad, src_fmt, src_val, &dest_fmt, + &dest_val))) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } + +error: + return res; +} + +static void +gst_opus_enc_init (GstOpusEnc * enc, GstOpusEncClass * klass) +{ + enc->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink"); + gst_element_add_pad (GST_ELEMENT (enc), enc->sinkpad); + gst_pad_set_event_function (enc->sinkpad, + GST_DEBUG_FUNCPTR (gst_opus_enc_sinkevent)); + gst_pad_set_chain_function (enc->sinkpad, + GST_DEBUG_FUNCPTR (gst_opus_enc_chain)); + gst_pad_set_setcaps_function (enc->sinkpad, + GST_DEBUG_FUNCPTR (gst_opus_enc_sink_setcaps)); + gst_pad_set_getcaps_function (enc->sinkpad, + GST_DEBUG_FUNCPTR (gst_opus_enc_sink_getcaps)); + gst_pad_set_query_function (enc->sinkpad, + GST_DEBUG_FUNCPTR (gst_opus_enc_sink_query)); + + enc->srcpad = gst_pad_new_from_static_template (&src_factory, "src"); + gst_pad_set_query_function (enc->srcpad, + GST_DEBUG_FUNCPTR (gst_opus_enc_src_query)); + gst_pad_set_query_type_function (enc->srcpad, + GST_DEBUG_FUNCPTR (gst_opus_enc_get_query_types)); + gst_element_add_pad (GST_ELEMENT (enc), enc->srcpad); + + enc->n_channels = -1; + enc->sample_rate = -1; + enc->frame_samples = 0; + + enc->bitrate = DEFAULT_BITRATE; + enc->bandwidth = DEFAULT_BANDWIDTH; + enc->frame_size = DEFAULT_FRAMESIZE; + enc->cbr = DEFAULT_CBR; + enc->constrained_vbr = DEFAULT_CONSTRAINED_VBR; + enc->complexity = DEFAULT_COMPLEXITY; + enc->inband_fec = DEFAULT_INBAND_FEC; + enc->dtx = DEFAULT_DTX; + enc->packet_loss_percentage = DEFAULT_PACKET_LOSS_PERCENT; + + enc->setup = FALSE; + enc->header_sent = FALSE; + + enc->adapter = gst_adapter_new (); +} + +#if 0 +static GstBuffer * +gst_opus_enc_create_metadata_buffer (GstOpusEnc * enc) +{ + const GstTagList *tags; + GstTagList *empty_tags = NULL; + GstBuffer *comments = NULL; + + tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (enc)); + + GST_DEBUG_OBJECT (enc, "tags = %" GST_PTR_FORMAT, tags); + + if (tags == NULL) { + /* FIXME: better fix chain of callers to not write metadata at all, + * if there is none */ + empty_tags = gst_tag_list_new (); + tags = empty_tags; + } + comments = gst_tag_list_to_vorbiscomment_buffer (tags, NULL, + 0, "Encoded with GStreamer Opusenc"); + + GST_BUFFER_OFFSET (comments) = enc->bytes_out; + GST_BUFFER_OFFSET_END (comments) = 0; + + if (empty_tags) + gst_tag_list_free (empty_tags); + + return comments; +} +#endif + +static gboolean +gst_opus_enc_setup (GstOpusEnc * enc) +{ + //gint error = OPUS_OK; + + enc->setup = FALSE; + +#if 0 +#ifdef HAVE_OPUS_0_7 + enc->mode = opus_mode_create (enc->rate, enc->frame_size, &error); +#else + enc->mode = + opus_mode_create (enc->rate, enc->n_channels, enc->frame_size, &error); +#endif + if (!enc->mode) + goto mode_initialization_failed; + +#ifdef HAVE_OPUS_0_11 + opus_header_init (&enc->header, enc->mode, enc->frame_size, enc->n_channels); +#else +#ifdef HAVE_OPUS_0_7 + opus_header_init (&enc->header, enc->mode, enc->n_channels); +#else + opus_header_init (&enc->header, enc->mode); +#endif +#endif + enc->header.nb_channels = enc->n_channels; + +#ifdef HAVE_OPUS_0_8 + enc->frame_size = enc->header.frame_size; +#else + opus_mode_info (enc->mode, OPUS_GET_FRAME_SIZE, &enc->frame_size); +#endif +#endif + +#if 0 +#ifdef HAVE_OPUS_0_11 + enc->state = opus_encoder_create_custom (enc->mode, enc->n_channels, &error); +#else +#ifdef HAVE_OPUS_0_7 + enc->state = opus_encoder_create (enc->mode, enc->n_channels, &error); +#else + enc->state = opus_encoder_create (enc->mode); +#endif +#endif +#endif + enc->state = opus_encoder_create (enc->sample_rate, enc->n_channels, + enc->audio_or_voip ? OPUS_APPLICATION_AUDIO : OPUS_APPLICATION_VOIP); + if (!enc->state) + goto encoder_creation_failed; + + opus_encoder_ctl (enc->state, OPUS_SET_BITRATE (enc->bitrate), 0); + opus_encoder_ctl (enc->state, OPUS_SET_BANDWIDTH (enc->bandwidth), 0); + opus_encoder_ctl (enc->state, OPUS_SET_VBR_FLAG (!enc->cbr), 0); + opus_encoder_ctl (enc->state, OPUS_SET_VBR_CONSTRAINT (enc->constrained_vbr), + 0); + opus_encoder_ctl (enc->state, OPUS_SET_COMPLEXITY (enc->complexity), 0); + opus_encoder_ctl (enc->state, OPUS_SET_INBAND_FEC_FLAG (enc->inband_fec), 0); + opus_encoder_ctl (enc->state, OPUS_SET_DTX_FLAG (enc->dtx), 0); + opus_encoder_ctl (enc->state, + OPUS_SET_PACKET_LOSS_PERC (enc->packet_loss_percentage), 0); + + GST_LOG_OBJECT (enc, "we have frame size %d", enc->frame_size); + + enc->setup = TRUE; + + return TRUE; + +#if 0 +mode_initialization_failed: + GST_ERROR_OBJECT (enc, "Mode initialization failed: %d", error); + return FALSE; +#endif + +encoder_creation_failed: + GST_ERROR_OBJECT (enc, "Encoder creation failed"); + return FALSE; +} + + +/* push out the buffer and do internal bookkeeping */ +static GstFlowReturn +gst_opus_enc_push_buffer (GstOpusEnc * enc, GstBuffer * buffer) +{ + guint size; + + size = GST_BUFFER_SIZE (buffer); + + enc->bytes_out += size; + + GST_DEBUG_OBJECT (enc, "pushing output buffer of size %u", size); + + return gst_pad_push (enc->srcpad, buffer); +} + +#if 0 +static GstCaps * +gst_opus_enc_set_header_on_caps (GstCaps * caps, GstBuffer * buf1, + GstBuffer * buf2) +{ + GstStructure *structure = NULL; + GstBuffer *buf; + GValue array = { 0 }; + GValue value = { 0 }; + + caps = gst_caps_make_writable (caps); + structure = gst_caps_get_structure (caps, 0); + + g_assert (gst_buffer_is_metadata_writable (buf1)); + g_assert (gst_buffer_is_metadata_writable (buf2)); + + /* mark buffers */ + GST_BUFFER_FLAG_SET (buf1, GST_BUFFER_FLAG_IN_CAPS); + GST_BUFFER_FLAG_SET (buf2, GST_BUFFER_FLAG_IN_CAPS); + + /* put buffers in a fixed list */ + g_value_init (&array, GST_TYPE_ARRAY); + g_value_init (&value, GST_TYPE_BUFFER); + buf = gst_buffer_copy (buf1); + gst_value_set_buffer (&value, buf); + gst_buffer_unref (buf); + gst_value_array_append_value (&array, &value); + g_value_unset (&value); + g_value_init (&value, GST_TYPE_BUFFER); + buf = gst_buffer_copy (buf2); + gst_value_set_buffer (&value, buf); + gst_buffer_unref (buf); + gst_value_array_append_value (&array, &value); + gst_structure_set_value (structure, "streamheader", &array); + g_value_unset (&value); + g_value_unset (&array); + + return caps; +} +#endif + + +static gboolean +gst_opus_enc_sinkevent (GstPad * pad, GstEvent * event) +{ + gboolean res = TRUE; + GstOpusEnc *enc; + + enc = GST_OPUS_ENC (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_EOS: + gst_opus_enc_encode (enc, TRUE); + res = gst_pad_event_default (pad, event); + break; + case GST_EVENT_TAG: + { + GstTagList *list; + GstTagSetter *setter = GST_TAG_SETTER (enc); + const GstTagMergeMode mode = gst_tag_setter_get_tag_merge_mode (setter); + + gst_event_parse_tag (event, &list); + gst_tag_setter_merge_tags (setter, list, mode); + res = gst_pad_event_default (pad, event); + break; + } + default: + res = gst_pad_event_default (pad, event); + break; + } + + gst_object_unref (enc); + + return res; +} + +static GstFlowReturn +gst_opus_enc_encode (GstOpusEnc * enc, gboolean flush) +{ + + GstFlowReturn ret = GST_FLOW_OK; + gint bytes = enc->frame_samples * 2 * enc->n_channels; + gint bytes_per_packet; + + bytes_per_packet = + (enc->bitrate * enc->frame_samples / enc->sample_rate + 4) / 8; + + if (flush && gst_adapter_available (enc->adapter) % bytes != 0) { + guint diff = gst_adapter_available (enc->adapter) % bytes; + GstBuffer *buf = gst_buffer_new_and_alloc (diff); + + memset (GST_BUFFER_DATA (buf), 0, diff); + gst_adapter_push (enc->adapter, buf); + } + + + while (gst_adapter_available (enc->adapter) >= bytes) { + gint16 *data; + gint outsize; + GstBuffer *outbuf; + + ret = gst_pad_alloc_buffer_and_set_caps (enc->srcpad, + GST_BUFFER_OFFSET_NONE, bytes_per_packet, GST_PAD_CAPS (enc->srcpad), + &outbuf); + + if (GST_FLOW_OK != ret) + goto done; + + data = (gint16 *) gst_adapter_take (enc->adapter, bytes); + enc->samples_in += enc->frame_samples; + + GST_DEBUG_OBJECT (enc, "encoding %d samples (%d bytes)", + enc->frame_samples, bytes); + + outsize = opus_encode (enc->state, data, enc->frame_samples, + GST_BUFFER_DATA (outbuf), bytes_per_packet); + + g_free (data); + + if (outsize < 0) { + GST_ERROR_OBJECT (enc, "Encoding failed: %d", outsize); + ret = GST_FLOW_ERROR; + goto done; + } + + GST_BUFFER_TIMESTAMP (outbuf) = enc->start_ts + + gst_util_uint64_scale_int (enc->frameno_out * enc->frame_samples, + GST_SECOND, enc->sample_rate); + GST_BUFFER_DURATION (outbuf) = + gst_util_uint64_scale_int (enc->frame_samples, GST_SECOND, + enc->sample_rate); + GST_BUFFER_OFFSET (outbuf) = + gst_util_uint64_scale_int (GST_BUFFER_OFFSET_END (outbuf), GST_SECOND, + enc->sample_rate); + + enc->frameno++; + enc->frameno_out++; + + ret = gst_opus_enc_push_buffer (enc, outbuf); + + if ((GST_FLOW_OK != ret) && (GST_FLOW_NOT_LINKED != ret)) + goto done; + } + +done: + + return ret; +} + +static GstFlowReturn +gst_opus_enc_chain (GstPad * pad, GstBuffer * buf) +{ + GstOpusEnc *enc; + GstFlowReturn ret = GST_FLOW_OK; + + enc = GST_OPUS_ENC (GST_PAD_PARENT (pad)); + + if (!enc->setup) + goto not_setup; + +#if 0 + if (!enc->header_sent) { + /* Opus streams begin with two headers; the initial header (with + most of the codec setup parameters) which is mandated by the Ogg + bitstream spec. The second header holds any comment fields. + We merely need to make the headers, then pass them to libopus + one at a time; libopus handles the additional Ogg bitstream + constraints */ + GstBuffer *buf1, *buf2; + GstCaps *caps; + guchar data[100]; + + /* create header buffer */ + opus_header_to_packet (&enc->header, data, 100); + buf1 = gst_opus_enc_buffer_from_data (enc, data, 100, 0); + + /* create comment buffer */ + buf2 = gst_opus_enc_create_metadata_buffer (enc); + + /* mark and put on caps */ + caps = gst_pad_get_caps (enc->srcpad); + caps = gst_opus_enc_set_header_on_caps (caps, buf1, buf2); + + gst_caps_set_simple (caps, + "rate", G_TYPE_INT, enc->sample_rate, + "channels", G_TYPE_INT, enc->n_channels, + "frame-size", G_TYPE_INT, enc->frame_size, NULL); + + /* negotiate with these caps */ + GST_DEBUG_OBJECT (enc, "here are the caps: %" GST_PTR_FORMAT, caps); + GST_LOG_OBJECT (enc, "rate=%d channels=%d frame-size=%d", + enc->sample_rate, enc->n_channels, enc->frame_size); + gst_pad_set_caps (enc->srcpad, caps); + + gst_buffer_set_caps (buf1, caps); + gst_buffer_set_caps (buf2, caps); + gst_caps_unref (caps); + + /* push out buffers */ + ret = gst_opus_enc_push_buffer (enc, buf1); + + if (ret != GST_FLOW_OK) { + gst_buffer_unref (buf2); + goto done; + } + + ret = gst_opus_enc_push_buffer (enc, buf2); + + if (ret != GST_FLOW_OK) + goto done; + + enc->header_sent = TRUE; + } +#endif + + GST_DEBUG_OBJECT (enc, "received buffer of %u bytes", GST_BUFFER_SIZE (buf)); + + /* Save the timestamp of the first buffer. This will be later + * used as offset for all following buffers */ + if (enc->start_ts == GST_CLOCK_TIME_NONE) { + if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { + enc->start_ts = GST_BUFFER_TIMESTAMP (buf); + } else { + enc->start_ts = 0; + } + } + + + /* Check if we have a continous stream, if not drop some samples or the buffer or + * insert some silence samples */ + if (enc->next_ts != GST_CLOCK_TIME_NONE && + GST_BUFFER_TIMESTAMP (buf) < enc->next_ts) { + guint64 diff = enc->next_ts - GST_BUFFER_TIMESTAMP (buf); + guint64 diff_bytes; + + GST_WARNING_OBJECT (enc, "Buffer is older than previous " + "timestamp + duration (%" GST_TIME_FORMAT "< %" GST_TIME_FORMAT + "), cannot handle. Clipping buffer.", + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)), + GST_TIME_ARGS (enc->next_ts)); + + diff_bytes = + GST_CLOCK_TIME_TO_FRAMES (diff, enc->sample_rate) * enc->n_channels * 2; + if (diff_bytes >= GST_BUFFER_SIZE (buf)) { + gst_buffer_unref (buf); + return GST_FLOW_OK; + } + buf = gst_buffer_make_metadata_writable (buf); + GST_BUFFER_DATA (buf) += diff_bytes; + GST_BUFFER_SIZE (buf) -= diff_bytes; + + GST_BUFFER_TIMESTAMP (buf) += diff; + if (GST_BUFFER_DURATION_IS_VALID (buf)) + GST_BUFFER_DURATION (buf) -= diff; + } + + if (enc->next_ts != GST_CLOCK_TIME_NONE + && GST_BUFFER_TIMESTAMP_IS_VALID (buf)) { + guint64 max_diff = + gst_util_uint64_scale (enc->frame_size, GST_SECOND, enc->sample_rate); + + if (GST_BUFFER_TIMESTAMP (buf) != enc->next_ts && + GST_BUFFER_TIMESTAMP (buf) - enc->next_ts > max_diff) { + GST_WARNING_OBJECT (enc, + "Discontinuity detected: %" G_GUINT64_FORMAT " > %" G_GUINT64_FORMAT, + GST_BUFFER_TIMESTAMP (buf) - enc->next_ts, max_diff); + + gst_opus_enc_encode (enc, TRUE); + + enc->frameno_out = 0; + enc->start_ts = GST_BUFFER_TIMESTAMP (buf); + } + } + + if (GST_BUFFER_TIMESTAMP_IS_VALID (buf) + && GST_BUFFER_DURATION_IS_VALID (buf)) + enc->next_ts = GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf); + else + enc->next_ts = GST_CLOCK_TIME_NONE; + + /* push buffer to adapter */ + gst_adapter_push (enc->adapter, buf); + buf = NULL; + + ret = gst_opus_enc_encode (enc, FALSE); + +done: + + if (buf) + gst_buffer_unref (buf); + + return ret; + + /* ERRORS */ +not_setup: + { + GST_ELEMENT_ERROR (enc, CORE, NEGOTIATION, (NULL), + ("encoder not initialized (input is not audio?)")); + ret = GST_FLOW_NOT_NEGOTIATED; + goto done; + } + +} + + +static void +gst_opus_enc_get_property (GObject * object, guint prop_id, GValue * value, + GParamSpec * pspec) +{ + GstOpusEnc *enc; + + enc = GST_OPUS_ENC (object); + + switch (prop_id) { + case PROP_AUDIO: + g_value_set_boolean (value, enc->audio_or_voip); + break; + case PROP_BITRATE: + g_value_set_int (value, enc->bitrate); + break; + case PROP_BANDWIDTH: + g_value_set_int (value, enc->bandwidth); + break; + case PROP_FRAME_SIZE: + g_value_set_int (value, enc->frame_size); + break; + case PROP_CBR: + g_value_set_boolean (value, enc->cbr); + break; + case PROP_CONSTRAINED_VBR: + g_value_set_boolean (value, enc->constrained_vbr); + break; + case PROP_COMPLEXITY: + g_value_set_int (value, enc->complexity); + break; + case PROP_INBAND_FEC: + g_value_set_boolean (value, enc->inband_fec); + break; + case PROP_DTX: + g_value_set_boolean (value, enc->dtx); + break; + case PROP_PACKET_LOSS_PERCENT: + g_value_set_int (value, enc->packet_loss_percentage); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_opus_enc_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstOpusEnc *enc; + + enc = GST_OPUS_ENC (object); + + switch (prop_id) { + case PROP_AUDIO: + enc->audio_or_voip = g_value_get_boolean (value); + break; + case PROP_BITRATE: + enc->bitrate = g_value_get_int (value); + break; + case PROP_BANDWIDTH: + enc->bandwidth = g_value_get_int (value); + break; + case PROP_FRAME_SIZE: + enc->frame_size = g_value_get_int (value); + break; + case PROP_CBR: + enc->cbr = g_value_get_boolean (value); + break; + case PROP_CONSTRAINED_VBR: + enc->constrained_vbr = g_value_get_boolean (value); + break; + case PROP_COMPLEXITY: + enc->complexity = g_value_get_int (value); + break; + case PROP_INBAND_FEC: + enc->inband_fec = g_value_get_boolean (value); + break; + case PROP_DTX: + enc->dtx = g_value_get_boolean (value); + break; + case PROP_PACKET_LOSS_PERCENT: + enc->packet_loss_percentage = g_value_get_int (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static GstStateChangeReturn +gst_opus_enc_change_state (GstElement * element, GstStateChange transition) +{ + GstOpusEnc *enc = GST_OPUS_ENC (element); + GstStateChangeReturn res; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + enc->frameno = 0; + enc->samples_in = 0; + enc->frameno_out = 0; + enc->start_ts = GST_CLOCK_TIME_NONE; + enc->next_ts = GST_CLOCK_TIME_NONE; + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + /* fall through */ + default: + break; + } + + res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + if (res == GST_STATE_CHANGE_FAILURE) + return res; + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + enc->setup = FALSE; + enc->header_sent = FALSE; + if (enc->state) { + opus_encoder_destroy (enc->state); + enc->state = NULL; + } + break; + case GST_STATE_CHANGE_READY_TO_NULL: + gst_tag_setter_reset_tags (GST_TAG_SETTER (enc)); + default: + break; + } + + return res; +} diff --git a/ext/opus/gstopusenc.h b/ext/opus/gstopusenc.h new file mode 100644 index 0000000000..5cb54598af --- /dev/null +++ b/ext/opus/gstopusenc.h @@ -0,0 +1,105 @@ +/* GStreamer Opus Encoder + * Copyright (C) <1999> Erik Walthinsen + * Copyright (C) <2008> Sebastian Dröge + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + + +#ifndef __GST_OPUS_ENC_H__ +#define __GST_OPUS_ENC_H__ + + +#include +#include + +#include + +G_BEGIN_DECLS + +#define GST_TYPE_OPUS_ENC \ + (gst_opus_enc_get_type()) +#define GST_OPUS_ENC(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_OPUS_ENC,GstOpusEnc)) +#define GST_OPUS_ENC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_OPUS_ENC,GstOpusEncClass)) +#define GST_IS_OPUS_ENC(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_OPUS_ENC)) +#define GST_IS_OPUS_ENC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_OPUS_ENC)) + +#define MAX_FRAME_SIZE 2000*2 +#define MAX_FRAME_BYTES 2000 + +typedef struct _GstOpusEnc GstOpusEnc; +typedef struct _GstOpusEncClass GstOpusEncClass; + +struct _GstOpusEnc { + GstElement element; + + /* pads */ + GstPad *sinkpad; + GstPad *srcpad; + + //OpusHeader header; + //OpusMode *mode; + OpusEncoder *state; + GstAdapter *adapter; + + /* properties */ + gboolean audio_or_voip; + gint bitrate; + gint bandwidth; + gint frame_size; + gboolean cbr; + gboolean constrained_vbr; + gint complexity; + gboolean inband_fec; + gboolean dtx; + gint packet_loss_percentage; + + int frame_samples; + + gint n_channels; + gint sample_rate; + + gboolean setup; + gboolean header_sent; + gboolean eos; + + guint64 samples_in; + guint64 bytes_out; + + guint64 frameno; + guint64 frameno_out; + + GstClockTime start_ts; + GstClockTime next_ts; + guint64 granulepos_offset; +}; + +struct _GstOpusEncClass { + GstElementClass parent_class; + + /* signals */ + void (*frame_encoded) (GstElement *element); +}; + +GType gst_opus_enc_get_type (void); + +G_END_DECLS + +#endif /* __GST_OPUS_ENC_H__ */ diff --git a/ext/resindvd/gstpesfilter.c b/ext/resindvd/gstpesfilter.c index f0a8cb5492..f2ccc7849d 100644 --- a/ext/resindvd/gstpesfilter.c +++ b/ext/resindvd/gstpesfilter.c @@ -101,7 +101,6 @@ gst_pes_filter_parse (GstPESFilter * filter) GstFlowReturn ret; guint32 start_code; - gboolean STD_buffer_bound_scale; guint16 STD_buffer_size_bound; const guint8 *data; gint avail, datalen; @@ -213,7 +212,7 @@ gst_pes_filter_parse (GstPESFilter * filter) if (datalen < 3) goto need_more_data; - STD_buffer_bound_scale = *data & 0x20; + /* STD_buffer_bound_scale = *data & 0x20; */ STD_buffer_size_bound = ((guint16) (*data++ & 0x1F)) << 8; STD_buffer_size_bound |= *data++; diff --git a/ext/resindvd/rsnstreamselector.c b/ext/resindvd/rsnstreamselector.c index 3bb6071577..eaae4f61c4 100644 --- a/ext/resindvd/rsnstreamselector.c +++ b/ext/resindvd/rsnstreamselector.c @@ -424,7 +424,6 @@ ignore: } static void rsn_stream_selector_dispose (GObject * object); -static void rsn_stream_selector_finalize (GObject * object); static void rsn_stream_selector_init (RsnStreamSelector * sel); static void rsn_stream_selector_base_init (RsnStreamSelectorClass * klass); @@ -497,7 +496,6 @@ rsn_stream_selector_class_init (RsnStreamSelectorClass * klass) parent_class = g_type_class_peek_parent (klass); gobject_class->dispose = rsn_stream_selector_dispose; - gobject_class->finalize = rsn_stream_selector_finalize; gobject_class->set_property = GST_DEBUG_FUNCPTR (rsn_stream_selector_set_property); @@ -545,16 +543,6 @@ rsn_stream_selector_dispose (GObject * object) G_OBJECT_CLASS (parent_class)->dispose (object); } -static void -rsn_stream_selector_finalize (GObject * object) -{ - RsnStreamSelector *sel; - - sel = RSN_STREAM_SELECTOR (object); - - G_OBJECT_CLASS (parent_class)->finalize (object); -} - static void rsn_stream_selector_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) @@ -653,11 +641,8 @@ rsn_stream_selector_getcaps (GstPad * pad) static gboolean rsn_stream_selector_is_active_sinkpad (RsnStreamSelector * sel, GstPad * pad) { - RsnSelectorPad *selpad; gboolean res; - selpad = GST_SELECTOR_PAD_CAST (pad); - GST_OBJECT_LOCK (sel); res = (pad == sel->active_sinkpad); GST_OBJECT_UNLOCK (sel); diff --git a/ext/rtmp/Makefile.am b/ext/rtmp/Makefile.am index e97c7a7589..bd2398cab0 100644 --- a/ext/rtmp/Makefile.am +++ b/ext/rtmp/Makefile.am @@ -1,8 +1,8 @@ plugin_LTLIBRARIES = libgstrtmp.la -libgstrtmp_la_SOURCES = gstrtmpsrc.c +libgstrtmp_la_SOURCES = gstrtmpsrc.c gstrtmpsink.c gstrtmp.c -noinst_HEADERS = gstrtmpsrc.h +noinst_HEADERS = gstrtmpsrc.h gstrtmpsink.h libgstrtmp_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(RTMP_CFLAGS) libgstrtmp_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) $(RTMP_LIBS) libgstrtmp_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) diff --git a/ext/rtmp/gstrtmp.c b/ext/rtmp/gstrtmp.c new file mode 100644 index 0000000000..7acbea4a90 --- /dev/null +++ b/ext/rtmp/gstrtmp.c @@ -0,0 +1,54 @@ +/* GStreamer + * Copyright (C) 1999,2000 Erik Walthinsen + * 2000 Wim Taymans + * 2002 Kristian Rietveld + * 2002,2003 Colin Walters + * 2001,2010 Bastien Nocera + * 2010 Sebastian Dröge + * 2010 Jan Schmidt + * + * rtmpsrc.c: + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include + +#include "gstrtmpsrc.h" +#include "gstrtmpsink.h" + +static gboolean +plugin_init (GstPlugin * plugin) +{ + gboolean ret; + + ret = gst_element_register (plugin, "rtmpsrc", GST_RANK_PRIMARY, + GST_TYPE_RTMP_SRC); + ret &= gst_element_register (plugin, "rtmpsink", GST_RANK_PRIMARY, + GST_TYPE_RTMP_SINK); + + return ret; +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "rtmp", + "RTMP source and sink", + plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN); diff --git a/ext/rtmp/gstrtmpsink.c b/ext/rtmp/gstrtmpsink.c new file mode 100644 index 0000000000..e3933b1503 --- /dev/null +++ b/ext/rtmp/gstrtmpsink.c @@ -0,0 +1,347 @@ +/* + * GStreamer + * Copyright (C) 2010 Jan Schmidt + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +/** + * SECTION:element-rtmpsink + * + * This element delivers data to a streaming server via RTMP. It uses + * librtmp, and supports any protocols/urls that librtmp supports. + * The URL/location can contain extra connection or session parameters + * for librtmp, such as 'flashver=version'. See the librtmp documentation + * for more detail + * + * + * Example launch line + * |[ + * gst-launch -v videotestsrc ! ffenc_flv ! flvmux ! rtmpsink location='rtmp://localhost/path/to/stream live=1' + * ]| Encode a test video stream to FLV video format and stream it via RTMP. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include + +#include "gstrtmpsink.h" + +GST_DEBUG_CATEGORY_STATIC (gst_rtmp_sink_debug); +#define GST_CAT_DEFAULT gst_rtmp_sink_debug + +/* Filter signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + PROP_0, + PROP_LOCATION +}; + +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("video/x-flv") + ); + +static void gst_rtmp_sink_uri_handler_init (gpointer g_iface, + gpointer iface_data); +static void gst_rtmp_sink_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_rtmp_sink_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); +static gboolean gst_rtmp_sink_stop (GstBaseSink * sink); +static gboolean gst_rtmp_sink_start (GstBaseSink * sink); +static GstFlowReturn gst_rtmp_sink_render (GstBaseSink * sink, GstBuffer * buf); + +static void +_do_init (GType gtype) +{ + static const GInterfaceInfo urihandler_info = { + gst_rtmp_sink_uri_handler_init, + NULL, + NULL + }; + + g_type_add_interface_static (gtype, GST_TYPE_URI_HANDLER, &urihandler_info); + + GST_DEBUG_CATEGORY_INIT (gst_rtmp_sink_debug, "rtmpsink", 0, + "RTMP server element"); +} + +GST_BOILERPLATE_FULL (GstRTMPSink, gst_rtmp_sink, GstBaseSink, + GST_TYPE_BASE_SINK, _do_init); + + +static void +gst_rtmp_sink_base_init (gpointer klass) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + + gst_element_class_set_details_simple (element_class, + "RTMP output sink", + "Sink/Network", "Sends FLV content to a server via RTMP", + "Jan Schmidt "); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_template)); +} + +/* initialize the plugin's class */ +static void +gst_rtmp_sink_class_init (GstRTMPSinkClass * klass) +{ + GObjectClass *gobject_class; + GstBaseSinkClass *gstbasesink_class = (GstBaseSinkClass *) klass; + + gobject_class = (GObjectClass *) klass; + gobject_class->set_property = gst_rtmp_sink_set_property; + gobject_class->get_property = gst_rtmp_sink_get_property; + + gstbasesink_class->start = GST_DEBUG_FUNCPTR (gst_rtmp_sink_start); + gstbasesink_class->stop = GST_DEBUG_FUNCPTR (gst_rtmp_sink_stop); + gstbasesink_class->render = GST_DEBUG_FUNCPTR (gst_rtmp_sink_render); + + gst_element_class_install_std_props (GST_ELEMENT_CLASS (klass), + "location", PROP_LOCATION, G_PARAM_READWRITE, NULL); +} + +/* initialize the new element + * initialize instance structure + */ +static void +gst_rtmp_sink_init (GstRTMPSink * sink, GstRTMPSinkClass * klass) +{ +} + +static gboolean +gst_rtmp_sink_start (GstBaseSink * basesink) +{ + GstRTMPSink *sink = GST_RTMP_SINK (basesink); + + if (!sink->uri) { + GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, + ("Please set URI for RTMP output"), ("No URI set before starting")); + return FALSE; + } + + sink->rtmp_uri = g_strdup (sink->uri); + sink->rtmp = RTMP_Alloc (); + RTMP_Init (sink->rtmp); + if (!RTMP_SetupURL (sink->rtmp, sink->rtmp_uri)) { + GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL), + ("Failed to setup URL '%s'", sink->uri)); + RTMP_Free (sink->rtmp); + sink->rtmp = NULL; + g_free (sink->rtmp_uri); + sink->rtmp_uri = NULL; + return FALSE; + } + + GST_DEBUG_OBJECT (sink, "Created RTMP object"); + + /* Mark this as an output connection */ + RTMP_EnableWrite (sink->rtmp); + + /* open the connection */ + if (!RTMP_IsConnected (sink->rtmp)) { + if (!RTMP_Connect (sink->rtmp, NULL) || !RTMP_ConnectStream (sink->rtmp, 0)) { + GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, (NULL), + ("Could not connect to RTMP stream \"%s\" for writing", sink->uri)); + RTMP_Free (sink->rtmp); + sink->rtmp = NULL; + g_free (sink->rtmp_uri); + sink->rtmp_uri = NULL; + return FALSE; + } + GST_DEBUG_OBJECT (sink, "Opened connection to %s", sink->rtmp_uri); + } + + sink->first = TRUE; + + return TRUE; +} + +static gboolean +gst_rtmp_sink_stop (GstBaseSink * basesink) +{ + GstRTMPSink *sink = GST_RTMP_SINK (basesink); + + gst_buffer_replace (&sink->cache, NULL); + + if (sink->rtmp) { + RTMP_Close (sink->rtmp); + RTMP_Free (sink->rtmp); + sink->rtmp = NULL; + } + if (sink->rtmp_uri) { + g_free (sink->rtmp_uri); + sink->rtmp_uri = NULL; + } + + return TRUE; +} + +static GstFlowReturn +gst_rtmp_sink_render (GstBaseSink * bsink, GstBuffer * buf) +{ + GstRTMPSink *sink = GST_RTMP_SINK (bsink); + GstBuffer *reffed_buf = NULL; + + if (sink->first) { + /* FIXME: Parse the first buffer and see if it contains a header plus a packet instead + * of just assuming it's only the header */ + GST_LOG_OBJECT (sink, "Caching first buffer of size %d for concatenation", + GST_BUFFER_SIZE (buf)); + gst_buffer_replace (&sink->cache, buf); + sink->first = FALSE; + return GST_FLOW_OK; + } + + if (sink->cache) { + GST_LOG_OBJECT (sink, "Joining 2nd buffer of size %d to cached buf", + GST_BUFFER_SIZE (buf)); + gst_buffer_ref (buf); + reffed_buf = buf = gst_buffer_join (sink->cache, buf); + sink->cache = NULL; + } + + GST_LOG_OBJECT (sink, "Sending %d bytes to RTMP server", + GST_BUFFER_SIZE (buf)); + + if (!RTMP_Write (sink->rtmp, + (char *) GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf))) { + GST_ELEMENT_ERROR (sink, RESOURCE, WRITE, (NULL), ("Failed to write data")); + if (reffed_buf) + gst_buffer_unref (reffed_buf); + return GST_FLOW_ERROR; + } + + if (reffed_buf) + gst_buffer_unref (reffed_buf); + + return GST_FLOW_OK; +} + +/* + * URI interface support. + */ +static GstURIType +gst_rtmp_sink_uri_get_type (void) +{ + return GST_URI_SINK; +} + +static gchar ** +gst_rtmp_sink_uri_get_protocols (void) +{ + static gchar *protocols[] = + { (char *) "rtmp", (char *) "rtmpt", (char *) "rtmps", (char *) "rtmpe", + (char *) "rtmfp", (char *) "rtmpte", (char *) "rtmpts", NULL + }; + return protocols; +} + +static const gchar * +gst_rtmp_sink_uri_get_uri (GstURIHandler * handler) +{ + GstRTMPSink *sink = GST_RTMP_SINK (handler); + + return sink->uri; +} + +static gboolean +gst_rtmp_sink_uri_set_uri (GstURIHandler * handler, const gchar * uri) +{ + GstRTMPSink *sink = GST_RTMP_SINK (handler); + + if (GST_STATE (sink) >= GST_STATE_PAUSED) + return FALSE; + + g_free (sink->uri); + sink->uri = NULL; + + if (uri != NULL) { + int protocol; + AVal host; + unsigned int port; + AVal playpath, app; + + if (!RTMP_ParseURL (uri, &protocol, &host, &port, &playpath, &app) || + !host.av_len || !playpath.av_len) { + GST_ELEMENT_ERROR (sink, RESOURCE, OPEN_WRITE, + ("Failed to parse URI %s", uri), (NULL)); + return FALSE; + } + sink->uri = g_strdup (uri); + } + + GST_DEBUG_OBJECT (sink, "Changed URI to %s", GST_STR_NULL (uri)); + + return TRUE; +} + +static void +gst_rtmp_sink_uri_handler_init (gpointer g_iface, gpointer iface_data) +{ + GstURIHandlerInterface *iface = (GstURIHandlerInterface *) g_iface; + + iface->get_type = gst_rtmp_sink_uri_get_type; + iface->get_protocols = gst_rtmp_sink_uri_get_protocols; + iface->get_uri = gst_rtmp_sink_uri_get_uri; + iface->set_uri = gst_rtmp_sink_uri_set_uri; +} + +static void +gst_rtmp_sink_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstRTMPSink *sink = GST_RTMP_SINK (object); + + switch (prop_id) { + case PROP_LOCATION: + gst_rtmp_sink_uri_set_uri (GST_URI_HANDLER (sink), + g_value_get_string (value)); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_rtmp_sink_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstRTMPSink *sink = GST_RTMP_SINK (object); + + switch (prop_id) { + case PROP_LOCATION: + g_value_set_string (value, sink->uri); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} diff --git a/ext/rtmp/gstrtmpsink.h b/ext/rtmp/gstrtmpsink.h new file mode 100644 index 0000000000..cb9315ebb2 --- /dev/null +++ b/ext/rtmp/gstrtmpsink.h @@ -0,0 +1,68 @@ +/* + * GStreamer + * Copyright (C) 2010 Jan Schmidt + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __GST_RTMP_SINK_H__ +#define __GST_RTMP_SINK_H__ + +#include +#include + +#include +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_RTMP_SINK \ + (gst_rtmp_sink_get_type()) +#define GST_RTMP_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTMP_SINK,GstRTMPSink)) +#define GST_RTMP_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTMP_SINK,GstRTMPSinkClass)) +#define GST_IS_RTMP_SINK(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTMP_SINK)) +#define GST_IS_RTMP_SINK_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTMP_SINK)) + +typedef struct _GstRTMPSink GstRTMPSink; +typedef struct _GstRTMPSinkClass GstRTMPSinkClass; + +struct _GstRTMPSink { + GstBaseSink parent; + + /* < private > */ + gchar *uri; + + RTMP *rtmp; + gchar *rtmp_uri; /* copy of url for librtmp */ + + GstBuffer *cache; /* Cached buffer */ + gboolean first; +}; + +struct _GstRTMPSinkClass { + GstBaseSinkClass parent_class; +}; + +GType gst_rtmp_sink_get_type (void); + +G_END_DECLS + +#endif /* __GST_RTMP_SINK_H__ */ diff --git a/ext/rtmp/gstrtmpsrc.c b/ext/rtmp/gstrtmpsrc.c index 2376ccef14..e37ac06b73 100644 --- a/ext/rtmp/gstrtmpsrc.c +++ b/ext/rtmp/gstrtmpsrc.c @@ -98,6 +98,8 @@ _do_init (GType gtype) }; g_type_add_interface_static (gtype, GST_TYPE_URI_HANDLER, &urihandler_info); + + GST_DEBUG_CATEGORY_INIT (rtmpsrc_debug, "rtmpsrc", 0, "RTMP Source"); } GST_BOILERPLATE_FULL (GstRTMPSrc, gst_rtmp_src, GstPushSrc, GST_TYPE_PUSH_SRC, @@ -581,18 +583,3 @@ gst_rtmp_src_stop (GstBaseSrc * basesrc) return TRUE; } - -static gboolean -plugin_init (GstPlugin * plugin) -{ - GST_DEBUG_CATEGORY_INIT (rtmpsrc_debug, "rtmpsrc", 0, "RTMP Source"); - - return gst_element_register (plugin, "rtmpsrc", GST_RANK_PRIMARY, - GST_TYPE_RTMP_SRC); -} - -GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, - GST_VERSION_MINOR, - "rtmpsrc", - "RTMP source", - plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN); diff --git a/ext/schroedinger/gstschrodec.c b/ext/schroedinger/gstschrodec.c index ab2c45a2c3..126ef1fed4 100644 --- a/ext/schroedinger/gstschrodec.c +++ b/ext/schroedinger/gstschrodec.c @@ -78,10 +78,6 @@ enum }; static void gst_schro_dec_finalize (GObject * object); -static void gst_schro_dec_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec); -static void gst_schro_dec_get_property (GObject * object, guint prop_id, - GValue * value, GParamSpec * pspec); static gboolean gst_schro_dec_sink_query (GstPad * pad, GstQuery * query); @@ -137,8 +133,6 @@ gst_schro_dec_class_init (GstSchroDecClass * klass) gobject_class = G_OBJECT_CLASS (klass); base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass); - gobject_class->set_property = gst_schro_dec_set_property; - gobject_class->get_property = gst_schro_dec_get_property; gobject_class->finalize = gst_schro_dec_finalize; base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start); @@ -172,21 +166,16 @@ static gint64 granulepos_to_frame (gint64 granulepos) { guint64 pt; - int dist_h; - int dist_l; - int dist; - int delay; - guint64 dt; if (granulepos == -1) return -1; pt = ((granulepos >> 22) + (granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9; - dist_h = (granulepos >> 22) & 0xff; - dist_l = granulepos & 0xff; - dist = (dist_h << 8) | dist_l; - delay = (granulepos >> 9) & 0x1fff; - dt = pt - delay; + /* dist_h = (granulepos >> 22) & 0xff; + * dist_l = granulepos & 0xff; + * dist = (dist_h << 8) | dist_l; + * delay = (granulepos >> 9) & 0x1fff; + * dt = pt - delay; */ return pt >> 1; } @@ -308,38 +297,6 @@ gst_schro_dec_finalize (GObject * object) G_OBJECT_CLASS (parent_class)->finalize (object); } -static void -gst_schro_dec_set_property (GObject * object, guint prop_id, - const GValue * value, GParamSpec * pspec) -{ - GstSchroDec *src; - - g_return_if_fail (GST_IS_SCHRO_DEC (object)); - src = GST_SCHRO_DEC (object); - - GST_DEBUG ("gst_schro_dec_set_property"); - switch (prop_id) { - default: - break; - } -} - -static void -gst_schro_dec_get_property (GObject * object, guint prop_id, GValue * value, - GParamSpec * pspec) -{ - GstSchroDec *src; - - g_return_if_fail (GST_IS_SCHRO_DEC (object)); - src = GST_SCHRO_DEC (object); - - switch (prop_id) { - default: - G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); - break; - } -} - static void parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size) { @@ -642,7 +599,6 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame) { GstSchroDec *schro_dec; - int schro_ret; SchroBuffer *input_buffer; schro_dec = GST_SCHRO_DEC (base_video_decoder); @@ -654,7 +610,7 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, input_buffer->tag = schro_tag_new (frame, NULL); - schro_ret = schro_decoder_autoparse_push (schro_dec->decoder, input_buffer); + schro_decoder_autoparse_push (schro_dec->decoder, input_buffer); return gst_schro_dec_process (schro_dec, FALSE); } diff --git a/ext/schroedinger/gstschroenc.c b/ext/schroedinger/gstschroenc.c index 16a3af95a2..669a612648 100644 --- a/ext/schroedinger/gstschroenc.c +++ b/ext/schroedinger/gstschroenc.c @@ -24,6 +24,7 @@ #include #include #include +#include #include #include @@ -107,7 +108,8 @@ static gboolean gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder, GstVideoState * state); static gboolean gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder); -static gboolean gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder); +static GstFlowReturn gst_schro_enc_finish (GstBaseVideoEncoder * + base_video_encoder); static GstFlowReturn gst_schro_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame); static GstFlowReturn gst_schro_enc_shape_output (GstBaseVideoEncoder * @@ -439,7 +441,7 @@ gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder) return TRUE; } -static gboolean +static GstFlowReturn gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder) { GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder); @@ -449,7 +451,7 @@ gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder) schro_encoder_end_of_stream (schro_enc->encoder); gst_schro_enc_process (schro_enc); - return TRUE; + return GST_FLOW_OK; } static GstFlowReturn @@ -612,7 +614,6 @@ gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { GstSchroEnc *schro_enc; - int dpn; int delay; int dist; int pt; @@ -623,8 +624,6 @@ gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder, schro_enc = GST_SCHRO_ENC (base_video_encoder); - dpn = frame->decode_frame_number; - pt = frame->presentation_frame_number * 2 + schro_enc->granule_offset; dt = frame->decode_frame_number * 2 + schro_enc->granule_offset; delay = pt - dt; diff --git a/ext/sndfile/gstsfsrc.c b/ext/sndfile/gstsfsrc.c index f725d3f5b4..226f54085b 100644 --- a/ext/sndfile/gstsfsrc.c +++ b/ext/sndfile/gstsfsrc.c @@ -200,7 +200,10 @@ gst_sf_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, { GstSFSrc *this; GstBuffer *buf; +/* FIXME discont is set but not used */ +#if 0 gboolean discont = FALSE; +#endif sf_count_t bytes_read; this = GST_SF_SRC (bsrc); @@ -221,7 +224,9 @@ gst_sf_src_create (GstBaseSrc * bsrc, guint64 offset, guint length, goto seek_failed; this->offset = offset; +#if 0 discont = TRUE; +#endif } buf = gst_buffer_new_and_alloc (length); diff --git a/ext/timidity/gsttimidity.c b/ext/timidity/gsttimidity.c index 997b0b20b4..cbbcc0da16 100644 --- a/ext/timidity/gsttimidity.c +++ b/ext/timidity/gsttimidity.c @@ -108,12 +108,9 @@ gst_timidity_base_init (gpointer gclass) static void gst_timidity_class_init (GstTimidityClass * klass) { - GObjectClass *gobject_class; GstElementClass *gstelement_class; - gobject_class = (GObjectClass *) klass; gstelement_class = (GstElementClass *) klass; - gstelement_class->change_state = gst_timidity_change_state; } diff --git a/ext/timidity/gstwildmidi.c b/ext/timidity/gstwildmidi.c index 4d5c0e36db..6def9f9460 100644 --- a/ext/timidity/gstwildmidi.c +++ b/ext/timidity/gstwildmidi.c @@ -443,7 +443,10 @@ gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event) GstSeekFlags flags; GstSeekType start_type, stop_type; gint64 start, stop; - gboolean flush, update, accurate; + gboolean flush, update; +#ifdef HAVE_WILDMIDI_0_2_2 + gboolean accurate; +#endif gboolean res; unsigned long int sample; GstSegment *segment; @@ -472,7 +475,9 @@ gst_wildmidi_do_seek (GstWildmidi * wildmidi, GstEvent * event) return res; flush = ((flags & GST_SEEK_FLAG_FLUSH) == GST_SEEK_FLAG_FLUSH); +#ifdef HAVE_WILDMIDI_0_2_2 accurate = ((flags & GST_SEEK_FLAG_ACCURATE) == GST_SEEK_FLAG_ACCURATE); +#endif if (flush) { GST_DEBUG ("performing flush"); diff --git a/ext/vp8/gstvp8dec.c b/ext/vp8/gstvp8dec.c index a945717be4..4376f4be70 100644 --- a/ext/vp8/gstvp8dec.c +++ b/ext/vp8/gstvp8dec.c @@ -405,8 +405,10 @@ gst_vp8_dec_handle_frame (GstBaseVideoDecoder * decoder, GstVideoFrame * frame) state->width = stream_info.w; state->height = stream_info.h; state->format = GST_VIDEO_FORMAT_I420; - state->par_n = 1; - state->par_d = 1; + if (state->par_n == 0 || state->par_d == 0) { + state->par_n = 1; + state->par_d = 1; + } gst_vp8_dec_send_tags (dec); gst_base_video_decoder_set_src_caps (decoder); diff --git a/ext/vp8/gstvp8enc.c b/ext/vp8/gstvp8enc.c index ad6a282be2..e3903d97fc 100644 --- a/ext/vp8/gstvp8enc.c +++ b/ext/vp8/gstvp8enc.c @@ -65,6 +65,24 @@ typedef struct GList *invisible; } GstVP8EncCoderHook; +static void +_gst_mini_object_unref0 (GstMiniObject * obj) +{ + if (obj) + gst_mini_object_unref (obj); +} + +static void +gst_vp8_enc_coder_hook_free (GstVP8EncCoderHook * hook) +{ + if (hook->image) + g_slice_free (vpx_image_t, hook->image); + + g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL); + g_list_free (hook->invisible); + g_slice_free (GstVP8EncCoderHook, hook); +} + #define DEFAULT_BITRATE 0 #define DEFAULT_MODE VPX_VBR #define DEFAULT_MIN_QUANTIZER 0 @@ -283,7 +301,7 @@ gst_vp8_enc_class_init (GstVP8EncClass * klass) g_object_class_install_property (gobject_class, PROP_SPEED, g_param_spec_int ("speed", "Speed", "Speed", - 0, 2, DEFAULT_SPEED, + 0, 7, DEFAULT_SPEED, (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS))); g_object_class_install_property (gobject_class, PROP_THREADS, @@ -586,7 +604,9 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder, return FALSE; } - status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, 0); + /* FIXME move this to a set_speed() function */ + status = vpx_codec_control (&encoder->encoder, VP8E_SET_CPUUSED, + (encoder->speed == 0) ? 0 : (encoder->speed - 1)); if (status != VPX_CODEC_OK) { GST_WARNING_OBJECT (encoder, "Failed to set VP8E_SET_CPUUSED to 0: %s", gst_vpx_error_name (status)); @@ -779,7 +799,7 @@ gst_vp8_enc_process (GstVP8Enc * encoder) return ret; } -static gboolean +static GstFlowReturn gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) { GstVP8Enc *encoder; @@ -796,7 +816,7 @@ gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) if (status != 0) { GST_ERROR_OBJECT (encoder, "encode returned %d %s", status, gst_vpx_error_name (status)); - return FALSE; + return GST_FLOW_ERROR; } /* dispatch remaining frames */ @@ -815,7 +835,7 @@ gst_vp8_enc_finish (GstBaseVideoEncoder * base_video_encoder) } } - return TRUE; + return GST_FLOW_OK; } static vpx_image_t * @@ -823,9 +843,6 @@ gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer) { vpx_image_t *image = g_slice_new (vpx_image_t); guint8 *data = GST_BUFFER_DATA (buffer); - const GstVideoState *state; - - state = gst_base_video_encoder_get_state (GST_BASE_VIDEO_ENCODER (enc)); memcpy (image, &enc->image, sizeof (*image)); @@ -837,12 +854,6 @@ gst_vp8_enc_buffer_to_image (GstVP8Enc * enc, GstBuffer * buffer) return image; } -static const int speed_table[] = { - VPX_DL_BEST_QUALITY, - VPX_DL_GOOD_QUALITY, - VPX_DL_REALTIME, -}; - static GstFlowReturn gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) @@ -853,6 +864,7 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, int flags = 0; vpx_image_t *image; GstVP8EncCoderHook *hook; + int quality; GST_DEBUG_OBJECT (base_video_encoder, "handle_frame"); @@ -869,13 +881,17 @@ gst_vp8_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, hook = g_slice_new0 (GstVP8EncCoderHook); hook->image = image; frame->coder_hook = hook; + frame->coder_hook_destroy_notify = + (GDestroyNotify) gst_vp8_enc_coder_hook_free; if (frame->force_keyframe) { flags |= VPX_EFLAG_FORCE_KF; } + quality = (encoder->speed == 0) ? VPX_DL_BEST_QUALITY : VPX_DL_GOOD_QUALITY; + status = vpx_codec_encode (&encoder->encoder, image, - encoder->n_frames, 1, flags, speed_table[encoder->speed]); + encoder->n_frames, 1, flags, quality); if (status != 0) { GST_ELEMENT_ERROR (encoder, LIBRARY, ENCODE, ("Failed to encode frame"), ("%s", gst_vpx_error_name (status))); @@ -900,13 +916,6 @@ _to_granulepos (guint64 frame_end_number, guint inv_count, guint keyframe_dist) return granulepos; } -static void -_gst_mini_object_unref0 (GstMiniObject * obj) -{ - if (obj) - gst_mini_object_unref (obj); -} - static GstFlowReturn gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) @@ -939,6 +948,8 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, encoder->keyframe_distance++; } + GST_BUFFER_TIMESTAMP (buf) = GST_BUFFER_TIMESTAMP (frame->src_buffer); + GST_BUFFER_DURATION (buf) = 0; GST_BUFFER_OFFSET_END (buf) = _to_granulepos (frame->presentation_frame_number + 1, inv_count, encoder->keyframe_distance); @@ -980,13 +991,6 @@ gst_vp8_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, } done: - if (hook) { - g_list_foreach (hook->invisible, (GFunc) _gst_mini_object_unref0, NULL); - g_list_free (hook->invisible); - g_slice_free (GstVP8EncCoderHook, hook); - frame->coder_hook = NULL; - } - return ret; } diff --git a/ext/zbar/gstzbar.c b/ext/zbar/gstzbar.c index 9ffb8a0529..cdeb898890 100644 --- a/ext/zbar/gstzbar.c +++ b/ext/zbar/gstzbar.c @@ -277,7 +277,7 @@ gst_zbar_transform_ip (GstBaseTransform * base, GstBuffer * outbuf) { GstZBar *zbar = GST_ZBAR (base); guint8 *data; - guint size, rowstride; + guint rowstride; zbar_image_t *image; const zbar_symbol_t *symbol; int n; @@ -286,7 +286,6 @@ gst_zbar_transform_ip (GstBaseTransform * base, GstBuffer * outbuf) goto done; data = GST_BUFFER_DATA (outbuf); - size = GST_BUFFER_SIZE (outbuf); image = zbar_image_create (); diff --git a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c index b2ae99298d..56bdc6cf68 100644 --- a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c +++ b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.c @@ -222,24 +222,6 @@ gst_base_camera_src_setup_preview (GstBaseCameraSrc * self, bclass->set_preview (self, preview_caps); } -/** - * gst_base_camera_src_get_allowed_input_caps: - * @self: the camerasrc bin - * - * Retrieve caps from videosrc describing formats it supports - * - * Returns: caps object from videosrc - */ -GstCaps * -gst_base_camera_src_get_allowed_input_caps (GstBaseCameraSrc * self) -{ - GstBaseCameraSrcClass *bclass = GST_BASE_CAMERA_SRC_GET_CLASS (self); - - g_return_val_if_fail (bclass->get_allowed_input_caps, NULL); - - return bclass->get_allowed_input_caps (self); -} - static void gst_base_camera_src_start_capture (GstBaseCameraSrc * src) { @@ -476,6 +458,8 @@ gst_base_camera_src_change_state (GstElement * element, case GST_STATE_CHANGE_READY_TO_PAUSED: if (!setup_pipeline (self)) return GST_STATE_CHANGE_FAILURE; + /* without this the preview pipeline will not post buffer + * messages on the pipeline */ gst_element_set_state (self->preview_pipeline->pipeline, GST_STATE_PLAYING); break; @@ -589,26 +573,19 @@ gst_base_camera_src_class_init (GstBaseCameraSrcClass * klass) /* Signals */ basecamerasrc_signals[START_CAPTURE_SIGNAL] = - g_signal_new ("start-capture", + g_signal_new_class_handler ("start-capture", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, - G_STRUCT_OFFSET (GstBaseCameraSrcClass, private_start_capture), + G_CALLBACK (gst_base_camera_src_start_capture), NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); basecamerasrc_signals[STOP_CAPTURE_SIGNAL] = - g_signal_new ("stop-capture", + g_signal_new_class_handler ("stop-capture", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, - G_STRUCT_OFFSET (GstBaseCameraSrcClass, private_stop_capture), + G_CALLBACK (gst_base_camera_src_stop_capture), NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); - /* TODO these should be moved to a private struct - * that is allocated sequentially to the main struct as said at: - * http://library.gnome.org/devel/gobject/unstable/gobject-Type-Information.html#g-type-add-class-private - */ - klass->private_start_capture = gst_base_camera_src_start_capture; - klass->private_stop_capture = gst_base_camera_src_stop_capture; - gstelement_class->change_state = gst_base_camera_src_change_state; } diff --git a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h index 1c412e493f..e5e19535ab 100644 --- a/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h +++ b/gst-libs/gst/basecamerabinsrc/gstbasecamerasrc.h @@ -92,7 +92,7 @@ struct _GstBaseCameraSrc /** * GstBaseCameraSrcClass: - * @construct_pipeline: construct pipeline must be implemented by derived class + * @construct_pipeline: construct pipeline * @setup_pipeline: configure pipeline for the chosen settings * @set_zoom: set the zoom * @set_mode: set the mode @@ -101,29 +101,27 @@ struct _GstBaseCameraSrcClass { GstBinClass parent; - /* construct pipeline must be implemented by derived class */ + /* Construct pipeline. (called in GST_STATE_CHANGE_NULL_TO_READY) Optional. */ gboolean (*construct_pipeline) (GstBaseCameraSrc *self); - /* optional */ + /* (called in GST_STATE_CHANGE_READY_TO_PAUSED). Optional. */ gboolean (*setup_pipeline) (GstBaseCameraSrc *self); - /* set the zoom */ + /* Set the zoom. If set, called when changing 'zoom' property. Optional. */ void (*set_zoom) (GstBaseCameraSrc *self, gfloat zoom); - /* set the mode */ + /* Set the mode. If set, called when changing 'mode' property. Optional. */ gboolean (*set_mode) (GstBaseCameraSrc *self, GstCameraBinMode mode); - /* set preview caps */ + /* Set preview caps. If set, called called when setting new 'preview-caps'. Optional. */ gboolean (*set_preview) (GstBaseCameraSrc *self, GstCaps *preview_caps); - /* */ - GstCaps * (*get_allowed_input_caps) (GstBaseCameraSrc * self); - - void (*private_start_capture) (GstBaseCameraSrc * src); - void (*private_stop_capture) (GstBaseCameraSrc * src); + /* Called by the handler for 'start-capture'. Mandatory. */ gboolean (*start_capture) (GstBaseCameraSrc * src); + + /* Called by the handler for 'stop-capture'. Mandatory. */ void (*stop_capture) (GstBaseCameraSrc * src); gpointer _gst_reserved[GST_PADDING_LARGE]; @@ -140,7 +138,6 @@ GstColorBalance * gst_base_camera_src_get_color_balance (GstBaseCameraSrc *self) gboolean gst_base_camera_src_set_mode (GstBaseCameraSrc *self, GstCameraBinMode mode); void gst_base_camera_src_setup_zoom (GstBaseCameraSrc * self); void gst_base_camera_src_setup_preview (GstBaseCameraSrc * self, GstCaps * preview_caps); -GstCaps * gst_base_camera_src_get_allowed_input_caps (GstBaseCameraSrc * self); void gst_base_camera_src_finish_capture (GstBaseCameraSrc *self); diff --git a/gst-libs/gst/basecamerabinsrc/gstcamerabinpreview.c b/gst-libs/gst/basecamerabinsrc/gstcamerabinpreview.c index 2c3bee582f..6b854197dd 100644 --- a/gst-libs/gst/basecamerabinsrc/gstcamerabinpreview.c +++ b/gst-libs/gst/basecamerabinsrc/gstcamerabinpreview.c @@ -75,18 +75,6 @@ bus_callback (GstBus * bus, GstMessage * message, gpointer user_data) return TRUE; } -static GstFlowReturn -gst_camerabin_preview_pipeline_new_preroll (GstAppSink * appsink, - gpointer user_data) -{ - GstBuffer *buffer; - - buffer = gst_app_sink_pull_preroll (appsink); - gst_buffer_unref (buffer); - - return GST_FLOW_OK; -} - static GstFlowReturn gst_camerabin_preview_pipeline_new_buffer (GstAppSink * appsink, gpointer user_data) @@ -175,7 +163,6 @@ gst_camerabin_create_preview_pipeline (GstElement * element, goto error; } - callbacks.new_preroll = gst_camerabin_preview_pipeline_new_preroll; callbacks.new_buffer = gst_camerabin_preview_pipeline_new_buffer; gst_app_sink_set_callbacks ((GstAppSink *) data->appsink, &callbacks, data, NULL); diff --git a/gst-libs/gst/video/Makefile.am b/gst-libs/gst/video/Makefile.am index a31276b1bd..cca3fefcf9 100644 --- a/gst-libs/gst/video/Makefile.am +++ b/gst-libs/gst/video/Makefile.am @@ -4,14 +4,15 @@ lib_LTLIBRARIES = libgstbasevideo-@GST_MAJORMINOR@.la CLEANFILES = $(BUILT_SOURCES) libgstbasevideo_@GST_MAJORMINOR@_la_SOURCES = \ - gstbasevideoutils.c \ gstbasevideocodec.c \ + gstbasevideoutils.c \ gstbasevideodecoder.c \ gstbasevideoencoder.c libgstbasevideo_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/video libgstbasevideo_@GST_MAJORMINOR@include_HEADERS = \ gstbasevideocodec.h \ + gstbasevideoutils.h \ gstbasevideodecoder.h \ gstbasevideoencoder.h diff --git a/gst-libs/gst/video/gstbasevideocodec.c b/gst-libs/gst/video/gstbasevideocodec.c index 07085f7304..31fa5e5df6 100644 --- a/gst-libs/gst/video/gstbasevideocodec.c +++ b/gst-libs/gst/video/gstbasevideocodec.c @@ -106,14 +106,19 @@ gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec) GST_DEBUG_OBJECT (base_video_codec, "reset"); + GST_OBJECT_LOCK (base_video_codec); for (g = base_video_codec->frames; g; g = g_list_next (g)) { gst_base_video_codec_free_frame ((GstVideoFrame *) g->data); } g_list_free (base_video_codec->frames); base_video_codec->frames = NULL; + GST_OBJECT_UNLOCK (base_video_codec); base_video_codec->bytes = 0; base_video_codec->time = 0; + + gst_buffer_replace (&base_video_codec->state.codec_data, NULL); + gst_caps_replace (&base_video_codec->state.caps, NULL); } static void @@ -163,7 +168,7 @@ gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec) { GstVideoFrame *frame; - frame = g_malloc0 (sizeof (GstVideoFrame)); + frame = g_slice_new0 (GstVideoFrame); frame->system_frame_number = base_video_codec->system_frame_number; base_video_codec->system_frame_number++; @@ -174,6 +179,8 @@ gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec) void gst_base_video_codec_free_frame (GstVideoFrame * frame) { + g_return_if_fail (frame != NULL); + if (frame->sink_buffer) { gst_buffer_unref (frame->sink_buffer); } @@ -182,5 +189,11 @@ gst_base_video_codec_free_frame (GstVideoFrame * frame) gst_buffer_unref (frame->src_buffer); } - g_free (frame); + g_list_foreach (frame->events, (GFunc) gst_event_unref, NULL); + g_list_free (frame->events); + + if (frame->coder_hook_destroy_notify && frame->coder_hook) + frame->coder_hook_destroy_notify (frame->coder_hook); + + g_slice_free (GstVideoFrame, frame); } diff --git a/gst-libs/gst/video/gstbasevideocodec.h b/gst-libs/gst/video/gstbasevideocodec.h index 9bada934b3..c1e37120d9 100644 --- a/gst-libs/gst/video/gstbasevideocodec.h +++ b/gst-libs/gst/video/gstbasevideocodec.h @@ -86,6 +86,7 @@ typedef struct _GstBaseVideoCodecClass GstBaseVideoCodecClass; struct _GstVideoState { + GstCaps *caps; GstVideoFormat format; int width, height; int fps_n, fps_d; @@ -125,9 +126,15 @@ struct _GstVideoFrame int n_fields; void *coder_hook; + GDestroyNotify coder_hook_destroy_notify; + GstClockTime deadline; gboolean force_keyframe; + + /* Events that should be pushed downstream *before* + * the next src_buffer */ + GList *events; }; struct _GstBaseVideoCodec @@ -140,7 +147,7 @@ struct _GstBaseVideoCodec guint64 system_frame_number; - GList *frames; + GList *frames; /* Protected with OBJECT_LOCK */ GstVideoState state; GstSegment segment; @@ -168,17 +175,6 @@ GType gst_base_video_codec_get_type (void); GstVideoFrame * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec); void gst_base_video_codec_free_frame (GstVideoFrame *frame); - -gboolean gst_base_video_rawvideo_convert (GstVideoState *state, - GstFormat src_format, gint64 src_value, - GstFormat * dest_format, gint64 *dest_value); -gboolean gst_base_video_encoded_video_convert (GstVideoState * state, - gint64 bytes, gint64 time, GstFormat src_format, - gint64 src_value, GstFormat * dest_format, gint64 * dest_value); - -GstClockTime gst_video_state_get_timestamp (const GstVideoState *state, - GstSegment *segment, int frame_number); - G_END_DECLS #endif diff --git a/gst-libs/gst/video/gstbasevideodecoder.c b/gst-libs/gst/video/gstbasevideodecoder.c index aca8d5cdfa..0278abc83e 100644 --- a/gst-libs/gst/video/gstbasevideodecoder.c +++ b/gst-libs/gst/video/gstbasevideodecoder.c @@ -128,6 +128,7 @@ #endif #include "gstbasevideodecoder.h" +#include "gstbasevideoutils.h" #include @@ -168,7 +169,6 @@ static guint64 gst_base_video_decoder_get_field_duration (GstBaseVideoDecoder * base_video_decoder, int n_fields); static GstVideoFrame *gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder); -static void gst_base_video_decoder_free_frame (GstVideoFrame * frame); static void gst_base_video_decoder_clear_queues (GstBaseVideoDecoder * dec); @@ -237,6 +237,32 @@ gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, base_video_decoder->sink_clipping = TRUE; } +static gboolean +gst_base_video_decoder_push_src_event (GstBaseVideoDecoder * decoder, + GstEvent * event) +{ + /* Forward non-serialized events and EOS/FLUSH_STOP immediately. + * For EOS this is required because no buffer or serialized event + * will come after EOS and nothing could trigger another + * _finish_frame() call. * + * If the subclass handles sending of EOS manually it can return + * _DROPPED from ::finish() and all other subclasses should have + * decoded/flushed all remaining data before this + * + * For FLUSH_STOP this is required because it is expected + * to be forwarded immediately and no buffers are queued anyway. + */ + if (!GST_EVENT_IS_SERIALIZED (event) + || GST_EVENT_TYPE (event) == GST_EVENT_EOS + || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) + return gst_pad_push_event (decoder->base_video_codec.srcpad, event); + + decoder->current_frame_events = + g_list_prepend (decoder->current_frame_events, event); + + return TRUE; +} + static gboolean gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) { @@ -244,7 +270,7 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) GstBaseVideoDecoderClass *base_video_decoder_class; GstStructure *structure; const GValue *codec_data; - GstVideoState *state; + GstVideoState state; gboolean ret = TRUE; base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); @@ -253,37 +279,47 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) GST_DEBUG_OBJECT (base_video_decoder, "setcaps %" GST_PTR_FORMAT, caps); - state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; + memset (&state, 0, sizeof (state)); - memset (state, 0, sizeof (GstVideoState)); + state.caps = gst_caps_ref (caps); structure = gst_caps_get_structure (caps, 0); - gst_video_format_parse_caps (caps, NULL, &state->width, &state->height); + gst_video_format_parse_caps (caps, NULL, &state.width, &state.height); /* this one fails if no framerate in caps */ - if (!gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d)) { - state->fps_n = 0; - state->fps_d = 1; + if (!gst_video_parse_caps_framerate (caps, &state.fps_n, &state.fps_d)) { + state.fps_n = 0; + state.fps_d = 1; } /* but the p-a-r sets 1/1 instead, which is not quite informative ... */ if (!gst_structure_has_field (structure, "pixel-aspect-ratio") || !gst_video_parse_caps_pixel_aspect_ratio (caps, - &state->par_n, &state->par_d)) { - state->par_n = 0; - state->par_d = 1; + &state.par_n, &state.par_d)) { + state.par_n = 0; + state.par_d = 1; } - state->have_interlaced = - gst_video_format_parse_caps_interlaced (caps, &state->interlaced); + state.have_interlaced = + gst_video_format_parse_caps_interlaced (caps, &state.interlaced); codec_data = gst_structure_get_value (structure, "codec_data"); if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) { - state->codec_data = gst_value_get_buffer (codec_data); + state.codec_data = GST_BUFFER (gst_value_dup_mini_object (codec_data)); } if (base_video_decoder_class->set_format) { - ret = base_video_decoder_class->set_format (base_video_decoder, - &GST_BASE_VIDEO_CODEC (base_video_decoder)->state); + ret = base_video_decoder_class->set_format (base_video_decoder, &state); + } + + if (ret) { + gst_buffer_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)-> + state.codec_data, NULL); + gst_caps_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->state.caps, + NULL); + GST_BASE_VIDEO_CODEC (base_video_decoder)->state = state; + } else { + gst_buffer_replace (&state.codec_data, NULL); + gst_caps_replace (&state.caps, NULL); } g_object_unref (base_video_decoder); @@ -323,6 +359,11 @@ gst_base_video_decoder_flush (GstBaseVideoDecoder * dec, gboolean hard) GST_LOG_OBJECT (dec, "flush hard %d", hard); + /* Inform subclass */ + /* FIXME ? only if hard, or tell it if hard ? */ + if (klass->reset) + klass->reset (dec); + /* FIXME make some more distinction between hard and soft, * but subclass may not be prepared for that */ /* FIXME perhaps also clear pending frames ?, @@ -334,15 +375,13 @@ gst_base_video_decoder_flush (GstBaseVideoDecoder * dec, gboolean hard) GST_FORMAT_UNDEFINED); gst_base_video_decoder_clear_queues (dec); dec->error_count = 0; + g_list_foreach (dec->current_frame_events, (GFunc) gst_event_unref, NULL); + g_list_free (dec->current_frame_events); + dec->current_frame_events = NULL; } /* and get (re)set for the sequel */ gst_base_video_decoder_reset (dec, FALSE); - /* also inform subclass */ - /* FIXME ? only if hard, or tell it if hard ? */ - if (klass->reset) - klass->reset (dec); - return ret; } @@ -364,9 +403,9 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: { + GstFlowReturn flow_ret; + ; if (!base_video_decoder->packetized) { - GstFlowReturn flow_ret; - do { flow_ret = base_video_decoder_class->parse_data (base_video_decoder, TRUE); @@ -374,12 +413,13 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) } if (base_video_decoder_class->finish) { - base_video_decoder_class->finish (base_video_decoder); + flow_ret = base_video_decoder_class->finish (base_video_decoder); + } else { + flow_ret = GST_FLOW_OK; } - ret = - gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), - event); + if (flow_ret == GST_FLOW_OK) + ret = gst_base_video_decoder_push_src_event (base_video_decoder, event); } break; case GST_EVENT_NEWSEGMENT: @@ -441,9 +481,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) gst_segment_set_newsegment_full (segment, update, rate, arate, format, start, stop, pos); - ret = - gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), - event); + ret = gst_base_video_decoder_push_src_event (base_video_decoder, event); break; } case GST_EVENT_FLUSH_STOP: @@ -453,9 +491,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) } default: /* FIXME this changes the order of events */ - ret = - gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), - event); + ret = gst_base_video_decoder_push_src_event (base_video_decoder, event); break; } @@ -876,16 +912,16 @@ gst_base_video_decoder_clear_queues (GstBaseVideoDecoder * dec) g_list_foreach (dec->gather, (GFunc) gst_mini_object_unref, NULL); g_list_free (dec->gather); dec->gather = NULL; - g_list_foreach (dec->decode, (GFunc) gst_base_video_decoder_free_frame, NULL); + g_list_foreach (dec->decode, (GFunc) gst_base_video_codec_free_frame, NULL); g_list_free (dec->decode); dec->decode = NULL; g_list_foreach (dec->parse, (GFunc) gst_mini_object_unref, NULL); g_list_free (dec->parse); - dec->decode = NULL; - g_list_foreach (dec->parse_gather, (GFunc) gst_base_video_decoder_free_frame, + dec->parse = NULL; + g_list_foreach (dec->parse_gather, (GFunc) gst_base_video_codec_free_frame, NULL); g_list_free (dec->parse_gather); - dec->decode = NULL; + dec->parse_gather = NULL; } static void @@ -917,7 +953,7 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder, base_video_decoder->timestamps = NULL; if (base_video_decoder->current_frame) { - gst_base_video_decoder_free_frame (base_video_decoder->current_frame); + gst_base_video_codec_free_frame (base_video_decoder->current_frame); base_video_decoder->current_frame = NULL; } @@ -1042,7 +1078,7 @@ gst_base_video_decoder_flush_decode (GstBaseVideoDecoder * dec) next = g_list_next (walk); if (dec->current_frame) - gst_base_video_decoder_free_frame (dec->current_frame); + gst_base_video_codec_free_frame (dec->current_frame); dec->current_frame = frame; /* decode buffer, resulting data prepended to queue */ res = gst_base_video_decoder_have_frame_2 (dec); @@ -1203,9 +1239,7 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0); - ret = - gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), - event); + ret = gst_base_video_decoder_push_src_event (base_video_decoder, event); if (!ret) { GST_ERROR_OBJECT (base_video_decoder, "new segment event ret=%d", ret); return GST_FLOW_ERROR; @@ -1216,12 +1250,13 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) gint64 ts, index; GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer"); - gst_base_video_decoder_flush (base_video_decoder, FALSE); /* track present position */ ts = base_video_decoder->timestamp_offset; index = base_video_decoder->field_index; + gst_base_video_decoder_flush (base_video_decoder, FALSE); + /* buffer may claim DISCONT loudly, if it can't tell us where we are now, * we'll stick to where we were ... * Particularly useful/needed for upstream BYTE based */ @@ -1268,6 +1303,10 @@ gst_base_video_decoder_change_state (GstElement * element, base_video_decoder_class->stop (base_video_decoder); } gst_base_video_decoder_reset (base_video_decoder, TRUE); + g_list_foreach (base_video_decoder->current_frame_events, + (GFunc) gst_event_unref, NULL); + g_list_free (base_video_decoder->current_frame_events); + base_video_decoder->current_frame_events = NULL; break; default: break; @@ -1276,31 +1315,14 @@ gst_base_video_decoder_change_state (GstElement * element, return ret; } -static void -gst_base_video_decoder_free_frame (GstVideoFrame * frame) -{ - g_return_if_fail (frame != NULL); - - if (frame->sink_buffer) { - gst_buffer_unref (frame->sink_buffer); - } - if (frame->src_buffer) { - gst_buffer_unref (frame->src_buffer); - } - - g_free (frame); -} - static GstVideoFrame * gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) { GstVideoFrame *frame; - frame = g_malloc0 (sizeof (GstVideoFrame)); - - frame->system_frame_number = - GST_BASE_VIDEO_CODEC (base_video_decoder)->system_frame_number; - GST_BASE_VIDEO_CODEC (base_video_decoder)->system_frame_number++; + frame = + gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC + (base_video_decoder)); frame->decode_frame_number = frame->system_frame_number - base_video_decoder->reorder_depth; @@ -1310,6 +1332,9 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) frame->presentation_duration = GST_CLOCK_TIME_NONE; frame->n_fields = 2; + frame->events = base_video_decoder->current_frame_events; + base_video_decoder->current_frame_events = NULL; + return frame; } @@ -1332,17 +1357,46 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state; GstBuffer *src_buffer; GstFlowReturn ret = GST_FLOW_OK; + GList *l, *events = NULL; GST_LOG_OBJECT (base_video_decoder, "finish frame"); +#ifndef GST_DISABLE_GST_DEBUG + GST_OBJECT_LOCK (base_video_decoder); GST_LOG_OBJECT (base_video_decoder, "n %d in %d out %d", g_list_length (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames), gst_adapter_available (base_video_decoder->input_adapter), gst_adapter_available (base_video_decoder->output_adapter)); + GST_OBJECT_UNLOCK (base_video_decoder); +#endif GST_LOG_OBJECT (base_video_decoder, "finish frame sync=%d pts=%" GST_TIME_FORMAT, frame->is_sync_point, GST_TIME_ARGS (frame->presentation_timestamp)); + /* Push all pending events that arrived before this frame */ + GST_OBJECT_LOCK (base_video_decoder); + for (l = base_video_decoder->base_video_codec.frames; l; l = l->next) { + GstVideoFrame *tmp = l->data; + + if (tmp->events) { + GList *k; + + for (k = g_list_last (tmp->events); k; k = k->prev) + events = g_list_prepend (events, k->data); + g_list_free (tmp->events); + tmp->events = NULL; + } + + if (tmp == frame) + break; + } + GST_OBJECT_UNLOCK (base_video_decoder); + + for (l = g_list_last (events); l; l = l->next) + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), + l->data); + g_list_free (events); + if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { GST_DEBUG_OBJECT (base_video_decoder, @@ -1508,9 +1562,11 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, } done: + GST_OBJECT_LOCK (base_video_decoder); GST_BASE_VIDEO_CODEC (base_video_decoder)->frames = g_list_remove (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame); - gst_base_video_decoder_free_frame (frame); + GST_OBJECT_UNLOCK (base_video_decoder); + gst_base_video_codec_free_frame (frame); return ret; } @@ -1673,8 +1729,10 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder) GST_TIME_ARGS (frame->decode_timestamp)); GST_LOG_OBJECT (base_video_decoder, "dist %d", frame->distance_from_sync); + GST_OBJECT_LOCK (base_video_decoder); GST_BASE_VIDEO_CODEC (base_video_decoder)->frames = g_list_append (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame); + GST_OBJECT_UNLOCK (base_video_decoder); frame->deadline = gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC @@ -1756,7 +1814,9 @@ gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder * { GList *g; + GST_OBJECT_LOCK (base_video_decoder); g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames); + GST_OBJECT_UNLOCK (base_video_decoder); if (g == NULL) return NULL; @@ -1775,17 +1835,21 @@ gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder, int frame_number) { GList *g; + GstVideoFrame *frame = NULL; + GST_OBJECT_LOCK (base_video_decoder); for (g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames); g; g = g_list_next (g)) { - GstVideoFrame *frame = g->data; + GstVideoFrame *tmp = g->data; if (frame->system_frame_number == frame_number) { - return frame; + frame = tmp; + break; } } + GST_OBJECT_UNLOCK (base_video_decoder); - return NULL; + return frame; } /** diff --git a/gst-libs/gst/video/gstbasevideodecoder.h b/gst-libs/gst/video/gstbasevideodecoder.h index 235dcb11e1..98c29e5ebd 100644 --- a/gst-libs/gst/video/gstbasevideodecoder.h +++ b/gst-libs/gst/video/gstbasevideodecoder.h @@ -65,6 +65,13 @@ G_BEGIN_DECLS **/ #define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS +/** + * GST_BASE_VIDEO_DECODER_FLOW_DROPPED: + * + * Returned when the event/buffer should be dropped. + */ +#define GST_BASE_VIDEO_DECODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1 + typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder; typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass; @@ -136,6 +143,8 @@ struct _GstBaseVideoDecoder * only available during parsing */ /* FIXME remove and add parameter to method */ GstVideoFrame *current_frame; + /* events that should apply to the current frame */ + GList *current_frame_events; /* relative offset of input data */ guint64 input_offset; /* relative offset of frame */ diff --git a/gst-libs/gst/video/gstbasevideoencoder.c b/gst-libs/gst/video/gstbasevideoencoder.c index 1c27a72ef1..e4efe6d39f 100644 --- a/gst-libs/gst/video/gstbasevideoencoder.c +++ b/gst-libs/gst/video/gstbasevideoencoder.c @@ -106,6 +106,9 @@ #endif #include "gstbasevideoencoder.h" +#include "gstbasevideoutils.h" + +#include GST_DEBUG_CATEGORY (basevideoencoder_debug); #define GST_CAT_DEFAULT basevideoencoder_debug @@ -185,6 +188,11 @@ gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder) gst_event_unref (base_video_encoder->force_keyunit_event); base_video_encoder->force_keyunit_event = NULL; } + + g_list_foreach (base_video_encoder->current_frame_events, + (GFunc) gst_event_unref, NULL); + g_list_free (base_video_encoder->current_frame_events); + base_video_encoder->current_frame_events = NULL; } static void @@ -236,9 +244,9 @@ gst_base_video_encoder_drain (GstBaseVideoEncoder * enc) return TRUE; } - if (enc_class->finish) { + if (enc_class->reset) { GST_DEBUG_OBJECT (enc, "requesting subclass to finish"); - ret = enc_class->finish (enc); + ret = enc_class->reset (enc); } /* everything should be away now */ if (codec->frames) { @@ -262,11 +270,9 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps) GstBaseVideoEncoder *base_video_encoder; GstBaseVideoEncoderClass *base_video_encoder_class; GstStructure *structure; - GstVideoState *state; + GstVideoState *state, tmp_state; gboolean ret; - gboolean changed = FALSE, u, v; - GstVideoFormat fmt; - gint w, h, num, den; + gboolean changed = FALSE; base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); base_video_encoder_class = @@ -278,58 +284,49 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps) GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps); state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state; + memset (&tmp_state, 0, sizeof (tmp_state)); + + tmp_state.caps = gst_caps_ref (caps); structure = gst_caps_get_structure (caps, 0); - ret = gst_video_format_parse_caps (caps, &fmt, &w, &h); + ret = + gst_video_format_parse_caps (caps, &tmp_state.format, &tmp_state.width, + &tmp_state.height); if (!ret) goto exit; - if (fmt != state->format || w != state->width || h != state->height) { - changed = TRUE; - state->format = fmt; - state->width = w; - state->height = h; - } + changed = (tmp_state.format != state->format + || tmp_state.width != state->width || tmp_state.height != state->height); - num = 0; - den = 1; - gst_video_parse_caps_framerate (caps, &num, &den); - if (den == 0) { - num = 0; - den = 1; - } - if (num != state->fps_n || den != state->fps_d) { - changed = TRUE; - state->fps_n = num; - state->fps_d = den; + if (!gst_video_parse_caps_framerate (caps, &tmp_state.fps_n, + &tmp_state.fps_d)) { + tmp_state.fps_n = 0; + tmp_state.fps_d = 1; } + changed = changed || (tmp_state.fps_n != state->fps_n + || tmp_state.fps_d != state->fps_d); - num = 0; - den = 1; - gst_video_parse_caps_pixel_aspect_ratio (caps, &num, &den); - if (den == 0) { - num = 0; - den = 1; - } - if (num != state->par_n || den != state->par_d) { - changed = TRUE; - state->par_n = num; - state->par_d = den; + if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp_state.par_n, + &tmp_state.par_d)) { + tmp_state.par_n = 1; + tmp_state.par_d = 1; } + changed = changed || (tmp_state.par_n != state->par_n + || tmp_state.par_d != state->par_d); - u = gst_structure_get_boolean (structure, "interlaced", &v); - if (u != state->have_interlaced || v != state->interlaced) { - changed = TRUE; - state->have_interlaced = u; - state->interlaced = v; - } + tmp_state.have_interlaced = + gst_structure_get_boolean (structure, "interlaced", + &tmp_state.interlaced); + changed = changed || (tmp_state.have_interlaced != state->have_interlaced + || tmp_state.interlaced != state->interlaced); - state->bytes_per_picture = - gst_video_format_get_size (state->format, state->width, state->height); - state->clean_width = state->width; - state->clean_height = state->height; - state->clean_offset_left = 0; - state->clean_offset_top = 0; + tmp_state.bytes_per_picture = + gst_video_format_get_size (tmp_state.format, tmp_state.width, + tmp_state.height); + tmp_state.clean_width = tmp_state.width; + tmp_state.clean_height = tmp_state.height; + tmp_state.clean_offset_left = 0; + tmp_state.clean_offset_top = 0; if (changed) { /* arrange draining pending frames */ @@ -337,11 +334,17 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps) /* and subclass should be ready to configure format at any time around */ if (base_video_encoder_class->set_format) - ret = base_video_encoder_class->set_format (base_video_encoder, state); + ret = + base_video_encoder_class->set_format (base_video_encoder, &tmp_state); + if (ret) { + gst_caps_replace (&state->caps, NULL); + *state = tmp_state; + } } else { /* no need to stir things up */ GST_DEBUG_OBJECT (base_video_encoder, "new video format identical to configured format"); + gst_caps_unref (tmp_state.caps); ret = TRUE; } @@ -368,13 +371,26 @@ static gboolean gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder, GstEvent * event) { + GstBaseVideoEncoderClass *base_video_encoder_class; gboolean ret = FALSE; + base_video_encoder_class = + GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); + switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: { + GstFlowReturn flow_ret; + base_video_encoder->a.at_eos = TRUE; - gst_base_video_encoder_drain (base_video_encoder); + + if (base_video_encoder_class->finish) { + flow_ret = base_video_encoder_class->finish (base_video_encoder); + } else { + flow_ret = GST_FLOW_OK; + } + + ret = (flow_ret == GST_BASE_VIDEO_ENCODER_FLOW_DROPPED); break; } case GST_EVENT_NEWSEGMENT: @@ -453,8 +469,27 @@ gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event) if (!handled) handled = gst_base_video_encoder_sink_eventfunc (enc, event); - if (!handled) - ret = gst_pad_event_default (pad, event); + if (!handled) { + /* Forward non-serialized events and EOS/FLUSH_STOP immediately. + * For EOS this is required because no buffer or serialized event + * will come after EOS and nothing could trigger another + * _finish_frame() call. * + * If the subclass handles sending of EOS manually it can return + * _DROPPED from ::finish() and all other subclasses should have + * decoded/flushed all remaining data before this + * + * For FLUSH_STOP this is required because it is expected + * to be forwarded immediately and no buffers are queued anyway. + */ + if (!GST_EVENT_IS_SERIALIZED (event) + || GST_EVENT_TYPE (event) == GST_EVENT_EOS + || GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) { + ret = gst_pad_push_event (enc->base_video_codec.srcpad, event); + } else { + enc->current_frame_events = + g_list_prepend (enc->current_frame_events, event); + } + } GST_DEBUG_OBJECT (enc, "event handled"); @@ -630,6 +665,8 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf) frame = gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC (base_video_encoder)); + frame->events = base_video_encoder->current_frame_events; + base_video_encoder->current_frame_events = NULL; frame->sink_buffer = buf; frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf); frame->presentation_duration = GST_BUFFER_DURATION (buf); @@ -639,8 +676,10 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf) frame->force_keyframe = base_video_encoder->force_keyframe; base_video_encoder->force_keyframe = FALSE; + GST_OBJECT_LOCK (base_video_encoder); GST_BASE_VIDEO_CODEC (base_video_encoder)->frames = g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame); + GST_OBJECT_UNLOCK (base_video_encoder); /* new data, more finish needed */ base_video_encoder->drained = FALSE; @@ -701,7 +740,8 @@ gst_base_video_encoder_change_state (GstElement * element, * @frame: an encoded #GstVideoFrame * * @frame must have a valid encoded data buffer, whose metadata fields - * are then appropriately set according to frame data. + * are then appropriately set according to frame data or no buffer at + * all if the frame should be dropped. * It is subsequently pushed downstream or provided to @shape_output. * In any case, the frame is considered finished and released. * @@ -711,10 +751,9 @@ GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, GstVideoFrame * frame) { - GstFlowReturn ret; + GstFlowReturn ret = GST_FLOW_OK; GstBaseVideoEncoderClass *base_video_encoder_class; - - g_return_val_if_fail (frame->src_buffer != NULL, GST_FLOW_ERROR); + GList *l; base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); @@ -722,6 +761,86 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, GST_LOG_OBJECT (base_video_encoder, "finish frame fpn %d", frame->presentation_frame_number); + /* FIXME get rid of this ? + * seems a roundabout way that adds little benefit to simply get + * and subsequently set. subclass is adult enough to set_caps itself ... + * so simply check/ensure/assert that src pad caps are set by now */ + if (!base_video_encoder->set_output_caps) { + if (!GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))) { + GstCaps *caps; + + if (base_video_encoder_class->get_caps) { + caps = base_video_encoder_class->get_caps (base_video_encoder); + } else { + caps = gst_caps_new_simple ("video/unknown", NULL); + } + GST_DEBUG_OBJECT (base_video_encoder, "src caps %" GST_PTR_FORMAT, caps); + gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), + caps); + gst_caps_unref (caps); + } + base_video_encoder->set_output_caps = TRUE; + } + + /* Push all pending events that arrived before this frame */ + for (l = base_video_encoder->base_video_codec.frames; l; l = l->next) { + GstVideoFrame *tmp = l->data; + + if (tmp->events) { + GList *k; + + for (k = g_list_last (tmp->events); k; k = k->prev) + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), + k->data); + g_list_free (tmp->events); + tmp->events = NULL; + } + + if (tmp == frame) + break; + } + + if (frame->force_keyframe) { + GstClockTime stream_time; + GstClockTime running_time; + GstEvent *ev; + + running_time = + gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC + (base_video_encoder)->segment, GST_FORMAT_TIME, + frame->presentation_timestamp); + stream_time = + gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC + (base_video_encoder)->segment, GST_FORMAT_TIME, + frame->presentation_timestamp); + + /* re-use upstream event if any so it also conveys any additional + * info upstream arranged in there */ + GST_OBJECT_LOCK (base_video_encoder); + if (base_video_encoder->force_keyunit_event) { + ev = base_video_encoder->force_keyunit_event; + base_video_encoder->force_keyunit_event = NULL; + } else { + ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, + gst_structure_new ("GstForceKeyUnit", NULL)); + } + GST_OBJECT_UNLOCK (base_video_encoder); + + gst_structure_set (ev->structure, + "timestamp", G_TYPE_UINT64, frame->presentation_timestamp, + "stream-time", G_TYPE_UINT64, stream_time, + "running-time", G_TYPE_UINT64, running_time, NULL); + + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev); + } + + /* no buffer data means this frame is skipped/dropped */ + if (!frame->src_buffer) { + GST_DEBUG_OBJECT (base_video_encoder, "skipping frame %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp)); + goto done; + } + if (frame->is_sync_point) { GST_LOG_OBJECT (base_video_encoder, "key frame"); base_video_encoder->distance_from_sync = 0; @@ -763,64 +882,9 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE; } - GST_BASE_VIDEO_CODEC (base_video_encoder)->frames = - g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame); - - /* FIXME get rid of this ? - * seems a roundabout way that adds little benefit to simply get - * and subsequently set. subclass is adult enough to set_caps itself ... - * so simply check/ensure/assert that src pad caps are set by now */ - if (!base_video_encoder->set_output_caps) { - GstCaps *caps; - - if (base_video_encoder_class->get_caps) { - caps = base_video_encoder_class->get_caps (base_video_encoder); - } else { - caps = gst_caps_new_simple ("video/unknown", NULL); - } - GST_DEBUG_OBJECT (base_video_encoder, "src caps %" GST_PTR_FORMAT, caps); - gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), caps); - gst_caps_unref (caps); - base_video_encoder->set_output_caps = TRUE; - } - gst_buffer_set_caps (GST_BUFFER (frame->src_buffer), GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder))); - if (frame->force_keyframe) { - GstClockTime stream_time; - GstClockTime running_time; - GstEvent *ev; - - running_time = - gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC - (base_video_encoder)->segment, GST_FORMAT_TIME, - frame->presentation_timestamp); - stream_time = - gst_segment_to_stream_time (&GST_BASE_VIDEO_CODEC - (base_video_encoder)->segment, GST_FORMAT_TIME, - frame->presentation_timestamp); - - /* re-use upstream event if any so it also conveys any additional - * info upstream arranged in there */ - GST_OBJECT_LOCK (base_video_encoder); - if (base_video_encoder->force_keyunit_event) { - ev = base_video_encoder->force_keyunit_event; - base_video_encoder->force_keyunit_event = NULL; - } else { - ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, - gst_structure_new ("GstForceKeyUnit", NULL)); - } - GST_OBJECT_UNLOCK (base_video_encoder); - - gst_structure_set (ev->structure, - "timestamp", G_TYPE_UINT64, frame->presentation_timestamp, - "stream-time", G_TYPE_UINT64, stream_time, - "running-time", G_TYPE_UINT64, running_time, NULL); - - gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), ev); - } - if (base_video_encoder_class->shape_output) { ret = base_video_encoder_class->shape_output (base_video_encoder, frame); } else { @@ -828,9 +892,15 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), frame->src_buffer); } - - /* handed out */ frame->src_buffer = NULL; + +done: + /* handed out */ + GST_OBJECT_LOCK (base_video_encoder); + GST_BASE_VIDEO_CODEC (base_video_encoder)->frames = + g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame); + GST_OBJECT_UNLOCK (base_video_encoder); + gst_base_video_codec_free_frame (frame); return ret; @@ -906,7 +976,9 @@ gst_base_video_encoder_get_oldest_frame (GstBaseVideoEncoder * { GList *g; + GST_OBJECT_LOCK (base_video_encoder); g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames); + GST_OBJECT_UNLOCK (base_video_encoder); if (g == NULL) return NULL; diff --git a/gst-libs/gst/video/gstbasevideoencoder.h b/gst-libs/gst/video/gstbasevideoencoder.h index 169c82fd0f..e1dd03be13 100644 --- a/gst-libs/gst/video/gstbasevideoencoder.h +++ b/gst-libs/gst/video/gstbasevideoencoder.h @@ -58,6 +58,12 @@ G_BEGIN_DECLS */ #define GST_BASE_VIDEO_ENCODER_SRC_NAME "src" +/** + * GST_BASE_VIDEO_ENCODER_FLOW_DROPPED: + * + * Returned when the event/buffer should be dropped. + */ +#define GST_BASE_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1 typedef struct _GstBaseVideoEncoder GstBaseVideoEncoder; typedef struct _GstBaseVideoEncoderClass GstBaseVideoEncoderClass; @@ -90,6 +96,7 @@ struct _GstBaseVideoEncoder gint64 max_latency; GstEvent *force_keyunit_event; + GList *current_frame_events; union { void *padding; @@ -148,7 +155,8 @@ struct _GstBaseVideoEncoderClass GstFlowReturn (*handle_frame) (GstBaseVideoEncoder *coder, GstVideoFrame *frame); - gboolean (*finish) (GstBaseVideoEncoder *coder); + gboolean (*reset) (GstBaseVideoEncoder *coder); + GstFlowReturn (*finish) (GstBaseVideoEncoder *coder); GstFlowReturn (*shape_output) (GstBaseVideoEncoder *coder, GstVideoFrame *frame); diff --git a/gst-libs/gst/video/gstbasevideoutils.c b/gst-libs/gst/video/gstbasevideoutils.c index 2d83213478..507ad07dae 100644 --- a/gst-libs/gst/video/gstbasevideoutils.c +++ b/gst-libs/gst/video/gstbasevideoutils.c @@ -21,7 +21,7 @@ #include "config.h" #endif -#include "gstbasevideocodec.h" +#include "gstbasevideoutils.h" #include diff --git a/gst-libs/gst/video/gstbasevideoutils.h b/gst-libs/gst/video/gstbasevideoutils.h new file mode 100644 index 0000000000..aeca2d1b7e --- /dev/null +++ b/gst-libs/gst/video/gstbasevideoutils.h @@ -0,0 +1,46 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_UTILS_H_ +#define _GST_BASE_VIDEO_UTILS_H_ + +#ifndef GST_USE_UNSTABLE_API +#warning "GstBaseVideoCodec is unstable API and may change in future." +#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." +#endif + +#include +#include +#include "gstbasevideocodec.h" + +G_BEGIN_DECLS + +gboolean gst_base_video_rawvideo_convert (GstVideoState *state, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 *dest_value); +gboolean gst_base_video_encoded_video_convert (GstVideoState * state, + gint64 bytes, gint64 time, GstFormat src_format, + gint64 src_value, GstFormat * dest_format, gint64 * dest_value); + +GstClockTime gst_video_state_get_timestamp (const GstVideoState *state, + GstSegment *segment, int frame_number); + +G_END_DECLS + +#endif diff --git a/gst/bayer/Makefile.am b/gst/bayer/Makefile.am index 69666ea2f6..c980678fb8 100644 --- a/gst/bayer/Makefile.am +++ b/gst/bayer/Makefile.am @@ -3,6 +3,9 @@ plugin_LTLIBRARIES = libgstbayer.la ORC_SOURCE=gstbayerorc include $(top_srcdir)/common/orc.mak +# orc-generated code creates warnings +ERROR_CFLAGS= + libgstbayer_la_SOURCES = \ gstbayer.c \ gstbayer2rgb.c \ diff --git a/gst/camerabin2/PORTING b/gst/camerabin2/PORTING index be99ffce5d..322a7d8b79 100644 --- a/gst/camerabin2/PORTING +++ b/gst/camerabin2/PORTING @@ -26,6 +26,8 @@ property and encodebin manages to instantiate the elements for the format. * Previews new "post-previews" property for enabling/disabling preview image posting +set location=NULL to skip writing image to file but to receive the preview, +useful for scenarios that wants the image in memory. * Configuring resolution and framerate Camerabin2 has new GstCaps type properties for configuring capture and diff --git a/gst/camerabin2/camerabingeneral.c b/gst/camerabin2/camerabingeneral.c index 8758935a45..35148f0278 100644 --- a/gst/camerabin2/camerabingeneral.c +++ b/gst/camerabin2/camerabingeneral.c @@ -20,10 +20,10 @@ /** * SECTION:camerabingeneral - * @short_description: helper functions for #GstCameraBin and it's modules + * @short_description: helper functions for #GstCameraBin2 and it's modules * - * Common helper functions for #GstCameraBin, #GstCameraBinImage and - * #GstCameraBinVideo. + * Common helper functions for #GstCameraBin2, #GstCameraBin2Image and + * #GstCameraBin2Video. * */ #ifdef HAVE_CONFIG_H diff --git a/gst/camerabin2/gstcamerabin2.c b/gst/camerabin2/gstcamerabin2.c index 9df16a1ddc..ba6a926c62 100644 --- a/gst/camerabin2/gstcamerabin2.c +++ b/gst/camerabin2/gstcamerabin2.c @@ -17,12 +17,23 @@ * Boston, MA 02111-1307, USA. */ /** - * SECTION:element-gstcamerabin2 + * SECTION:element-camerabin2 * - * The gstcamerabin2 element does FIXME stuff. + * GstCameraBin22 is a high-level camera object that encapsulates the gstreamer + * internals and provides a task based API for the application. * + * * Note that camerabin2 is still UNSTABLE, EXPERIMENTAL and under heavy * development. + * + * + * + * Example launch line + * |[ + * gst-launch -v -m camerabin2 + * ]| + * + */ /* @@ -59,23 +70,23 @@ #include #include -#define GST_CAMERA_BIN_PROCESSING_INC(c) \ +#define GST_CAMERA_BIN2_PROCESSING_INC(c) \ { \ gint bef = g_atomic_int_exchange_and_add (&c->processing_counter, 1); \ if (bef == 0) \ g_object_notify (G_OBJECT (c), "idle"); \ - GST_DEBUG_OBJECT ((c), "Processing counter incremented to: %d", \ + GST_DEBUG_OBJECT ((c), "Processing counter increModemented to: %d", \ bef + 1); \ } -#define GST_CAMERA_BIN_PROCESSING_DEC(c) \ +#define GST_CAMERA_BIN2_PROCESSING_DEC(c) \ { \ if (g_atomic_int_dec_and_test (&c->processing_counter)) \ g_object_notify (G_OBJECT (c), "idle"); \ GST_DEBUG_OBJECT ((c), "Processing counter decremented"); \ } -#define GST_CAMERA_BIN_RESET_PROCESSING_COUNTER(c) \ +#define GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER(c) \ { \ g_atomic_int_set (&c->processing_counter, 0); \ GST_DEBUG_OBJECT ((c), "Processing counter reset"); \ @@ -113,7 +124,8 @@ enum PROP_ZOOM, PROP_MAX_ZOOM, PROP_IMAGE_ENCODING_PROFILE, - PROP_IDLE + PROP_IDLE, + PROP_FLAGS }; enum @@ -127,11 +139,11 @@ enum static guint camerabin_signals[LAST_SIGNAL]; #define DEFAULT_MODE MODE_IMAGE -#define DEFAULT_VID_LOCATION "vid_%d" -#define DEFAULT_IMG_LOCATION "img_%d" +#define DEFAULT_LOCATION "cap_%d" #define DEFAULT_POST_PREVIEWS TRUE #define DEFAULT_MUTE_AUDIO FALSE #define DEFAULT_IDLE TRUE +#define DEFAULT_FLAGS 0 #define DEFAULT_AUDIO_SRC "autoaudiosrc" @@ -141,9 +153,9 @@ static guint camerabin_signals[LAST_SIGNAL]; ********************************/ static GstPipelineClass *parent_class; -static void gst_camera_bin_class_init (GstCameraBinClass * klass); +static void gst_camera_bin_class_init (GstCameraBin2Class * klass); static void gst_camera_bin_base_init (gpointer klass); -static void gst_camera_bin_init (GstCameraBin * camera); +static void gst_camera_bin_init (GstCameraBin2 * camera); static void gst_camera_bin_dispose (GObject * object); static void gst_camera_bin_finalize (GObject * object); @@ -151,8 +163,33 @@ static void gst_camera_bin_handle_message (GstBin * bin, GstMessage * message); static gboolean gst_camera_bin_send_event (GstElement * element, GstEvent * event); +#define C_FLAGS(v) ((guint) v) +#define GST_TYPE_CAM_FLAGS (gst_cam_flags_get_type()) +static GType +gst_cam_flags_get_type (void) +{ + static const GFlagsValue values[] = { + {C_FLAGS (GST_CAM_FLAG_NO_AUDIO_CONVERSION), "Do not use audio conversion " + "elements", "no-audio-conversion"}, + {C_FLAGS (GST_CAM_FLAG_NO_VIDEO_CONVERSION), "Do not use video conversion " + "elements", "no-video-conversion"}, + {0, NULL, NULL} + }; + static volatile GType id = 0; + + if (g_once_init_enter ((gsize *) & id)) { + GType _id; + + _id = g_flags_register_static ("GstCamFlags", values); + + g_once_init_leave ((gsize *) & id, _id); + } + + return id; +} + GType -gst_camera_bin_get_type (void) +gst_camera_bin2_get_type (void) { static GType gst_camera_bin_type = 0; static const GInterfaceInfo camerabin_tagsetter_info = { @@ -163,13 +200,13 @@ gst_camera_bin_get_type (void) if (!gst_camera_bin_type) { static const GTypeInfo gst_camera_bin_info = { - sizeof (GstCameraBinClass), + sizeof (GstCameraBin2Class), (GBaseInitFunc) gst_camera_bin_base_init, NULL, (GClassInitFunc) gst_camera_bin_class_init, NULL, NULL, - sizeof (GstCameraBin), + sizeof (GstCameraBin2), 0, (GInstanceInitFunc) gst_camera_bin_init, NULL @@ -206,36 +243,64 @@ gst_camera_bin_new_event_renegotiate (void) gst_structure_new ("renegotiate", NULL)); } +static GstEvent * +gst_camera_bin_new_event_file_location (const gchar * location) +{ + return gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM, + gst_structure_new ("new-location", "location", G_TYPE_STRING, location, + NULL)); +} + static void -gst_camera_bin_start_capture (GstCameraBin * camerabin) +gst_camera_bin_start_capture (GstCameraBin2 * camerabin) { const GstTagList *taglist; GST_DEBUG_OBJECT (camerabin, "Received start-capture"); /* check that we have a valid location */ - if ((camerabin->mode == MODE_VIDEO && camerabin->video_location == NULL) - || (camerabin->mode == MODE_IMAGE && camerabin->image_location == NULL)) { + if (camerabin->mode == MODE_VIDEO && camerabin->location == NULL) { GST_ELEMENT_ERROR (camerabin, RESOURCE, OPEN_WRITE, (_("File location is set to NULL, please set it to a valid filename")), (NULL)); return; } - GST_CAMERA_BIN_PROCESSING_INC (camerabin); + GST_CAMERA_BIN2_PROCESSING_INC (camerabin); - if (camerabin->mode == MODE_VIDEO && camerabin->audio_src) { - gst_element_set_state (camerabin->audio_src, GST_STATE_READY); - /* need to reset eos status (pads could be flushing) */ - gst_element_set_state (camerabin->audio_queue, GST_STATE_READY); - gst_element_set_state (camerabin->audio_convert, GST_STATE_READY); - gst_element_set_state (camerabin->audio_capsfilter, GST_STATE_READY); - gst_element_set_state (camerabin->audio_volume, GST_STATE_READY); + if (camerabin->mode == MODE_VIDEO) { + if (camerabin->audio_src) { + GstClock *clock = gst_pipeline_get_clock (GST_PIPELINE_CAST (camerabin)); - gst_element_sync_state_with_parent (camerabin->audio_queue); - gst_element_sync_state_with_parent (camerabin->audio_convert); - gst_element_sync_state_with_parent (camerabin->audio_capsfilter); - gst_element_sync_state_with_parent (camerabin->audio_volume); + /* FIXME We need to set audiosrc to null to make it resync the ringbuffer + * while bug https://bugzilla.gnome.org/show_bug.cgi?id=648359 isn't + * fixed */ + gst_element_set_state (camerabin->audio_src, GST_STATE_NULL); + + /* need to reset eos status (pads could be flushing) */ + gst_element_set_state (camerabin->audio_capsfilter, GST_STATE_READY); + gst_element_set_state (camerabin->audio_volume, GST_STATE_READY); + + gst_element_sync_state_with_parent (camerabin->audio_capsfilter); + gst_element_sync_state_with_parent (camerabin->audio_volume); + gst_element_set_state (camerabin->audio_src, GST_STATE_PAUSED); + + gst_element_set_base_time (camerabin->audio_src, + gst_element_get_base_time (GST_ELEMENT_CAST (camerabin))); + if (clock) { + gst_element_set_clock (camerabin->audio_src, clock); + gst_object_unref (clock); + } + } + } else { + gchar *location = NULL; + + /* store the next capture buffer filename */ + if (camerabin->location) + location = + g_strdup_printf (camerabin->location, camerabin->capture_index++); + camerabin->image_location_list = + g_slist_append (camerabin->image_location_list, location); } g_signal_emit_by_name (camerabin->src, "start-capture", NULL); @@ -270,7 +335,7 @@ gst_camera_bin_start_capture (GstCameraBin * camerabin) } static void -gst_camera_bin_stop_capture (GstCameraBin * camerabin) +gst_camera_bin_stop_capture (GstCameraBin2 * camerabin) { GST_DEBUG_OBJECT (camerabin, "Received stop-capture"); if (camerabin->src) @@ -282,7 +347,7 @@ gst_camera_bin_stop_capture (GstCameraBin * camerabin) } static void -gst_camera_bin_change_mode (GstCameraBin * camerabin, gint mode) +gst_camera_bin_change_mode (GstCameraBin2 * camerabin, gint mode) { if (mode == camerabin->mode) return; @@ -300,7 +365,7 @@ static void gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec, gpointer user_data) { - GstCameraBin *camera = GST_CAMERA_BIN_CAST (user_data); + GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (user_data); gboolean ready; g_object_get (camera->src, "ready-for-capture", &ready, NULL); @@ -313,28 +378,13 @@ gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec, gst_element_set_state (camera->videosink, GST_STATE_NULL); gst_element_set_state (camera->video_encodebin, GST_STATE_NULL); gst_element_set_state (camera->videobin_capsfilter, GST_STATE_NULL); - gst_element_set_state (camera->videobin_queue, GST_STATE_NULL); - location = - g_strdup_printf (camera->video_location, camera->video_index++); + location = g_strdup_printf (camera->location, camera->capture_index++); GST_DEBUG_OBJECT (camera, "Switching videobin location to %s", location); g_object_set (camera->videosink, "location", location, NULL); g_free (location); gst_element_set_state (camera->videosink, GST_STATE_PLAYING); gst_element_set_state (camera->video_encodebin, GST_STATE_PLAYING); gst_element_set_state (camera->videobin_capsfilter, GST_STATE_PLAYING); - gst_element_set_state (camera->videobin_queue, GST_STATE_PLAYING); - } else if (camera->mode == MODE_IMAGE) { - gst_element_set_state (camera->imagesink, GST_STATE_NULL); - gst_element_set_state (camera->image_encodebin, GST_STATE_NULL); - gst_element_set_state (camera->imagebin_queue, GST_STATE_NULL); - gst_element_set_state (camera->imagebin_capsfilter, GST_STATE_NULL); - GST_DEBUG_OBJECT (camera, "Switching imagebin location to %s", location); - g_object_set (camera->imagesink, "location", camera->image_location, - NULL); - gst_element_set_state (camera->imagesink, GST_STATE_PLAYING); - gst_element_set_state (camera->image_encodebin, GST_STATE_PLAYING); - gst_element_set_state (camera->imagebin_capsfilter, GST_STATE_PLAYING); - gst_element_set_state (camera->imagebin_queue, GST_STATE_PLAYING); } } @@ -343,10 +393,9 @@ gst_camera_bin_src_notify_readyforcapture (GObject * obj, GParamSpec * pspec, static void gst_camera_bin_dispose (GObject * object) { - GstCameraBin *camerabin = GST_CAMERA_BIN_CAST (object); + GstCameraBin2 *camerabin = GST_CAMERA_BIN2_CAST (object); - g_free (camerabin->image_location); - g_free (camerabin->video_location); + g_free (camerabin->location); if (camerabin->src_capture_notify_id) g_signal_handler_disconnect (camerabin->src, @@ -363,10 +412,6 @@ gst_camera_bin_dispose (GObject * object) if (camerabin->audio_capsfilter) gst_object_unref (camerabin->audio_capsfilter); - if (camerabin->audio_queue) - gst_object_unref (camerabin->audio_queue); - if (camerabin->audio_convert) - gst_object_unref (camerabin->audio_convert); if (camerabin->audio_volume) gst_object_unref (camerabin->audio_volume); @@ -381,18 +426,10 @@ gst_camera_bin_dispose (GObject * object) g_signal_handler_disconnect (camerabin->video_encodebin, camerabin->video_encodebin_signal_id); - if (camerabin->videosink_probe) { - GstPad *pad = gst_element_get_static_pad (camerabin->videosink, "sink"); - gst_pad_remove_data_probe (pad, camerabin->videosink_probe); - gst_object_unref (pad); - } - if (camerabin->videosink) gst_object_unref (camerabin->videosink); if (camerabin->video_encodebin) gst_object_unref (camerabin->video_encodebin); - if (camerabin->videobin_queue) - gst_object_unref (camerabin->videobin_queue); if (camerabin->videobin_capsfilter) gst_object_unref (camerabin->videobin_capsfilter); @@ -454,7 +491,7 @@ gst_camera_bin_base_init (gpointer g_class) } static void -gst_camera_bin_class_init (GstCameraBinClass * klass) +gst_camera_bin_class_init (GstCameraBin2Class * klass) { GObjectClass *object_class; GstElementClass *element_class; @@ -479,7 +516,7 @@ gst_camera_bin_class_init (GstCameraBinClass * klass) klass->stop_capture = gst_camera_bin_stop_capture; /** - * GstCameraBin:mode: + * GstCameraBin2:mode: * * Set the mode of operation: still image capturing or video recording. */ @@ -493,8 +530,8 @@ gst_camera_bin_class_init (GstCameraBinClass * klass) g_param_spec_string ("location", "Location", "Location to save the captured files. A %d might be used on the" "filename as a placeholder for a numeric index of the capture." - "Default for images is img_%d and vid_%d for videos", - DEFAULT_IMG_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + "Default is cap_%d", + DEFAULT_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); g_object_class_install_property (object_class, PROP_CAMERA_SRC, g_param_spec_object ("camera-source", "Camera source", @@ -661,8 +698,18 @@ gst_camera_bin_class_init (GstCameraBinClass * klass) "The caps that the camera source can produce on the viewfinder pad", GST_TYPE_CAPS, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS)); + /** + * GstCameraBin:flags + * + * Control the behaviour of encodebin. + */ + g_object_class_install_property (object_class, PROP_FLAGS, + g_param_spec_flags ("flags", "Flags", "Flags to control behaviour", + GST_TYPE_CAM_FLAGS, DEFAULT_FLAGS, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + /** - * GstCameraBin::capture-start: + * GstCameraBin2::capture-start: * @camera: the camera bin element * * Starts image capture or video recording depending on the Mode. @@ -671,31 +718,31 @@ gst_camera_bin_class_init (GstCameraBinClass * klass) g_signal_new ("start-capture", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, - G_STRUCT_OFFSET (GstCameraBinClass, start_capture), + G_STRUCT_OFFSET (GstCameraBin2Class, start_capture), NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); /** - * GstCameraBin::capture-stop: + * GstCameraBin2::capture-stop: * @camera: the camera bin element */ camerabin_signals[STOP_CAPTURE_SIGNAL] = g_signal_new ("stop-capture", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST | G_SIGNAL_ACTION, - G_STRUCT_OFFSET (GstCameraBinClass, stop_capture), + G_STRUCT_OFFSET (GstCameraBin2Class, stop_capture), NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0); } static void -gst_camera_bin_init (GstCameraBin * camera) +gst_camera_bin_init (GstCameraBin2 * camera) { camera->post_previews = DEFAULT_POST_PREVIEWS; camera->mode = DEFAULT_MODE; - camera->video_location = g_strdup (DEFAULT_VID_LOCATION); - camera->image_location = g_strdup (DEFAULT_IMG_LOCATION); + camera->location = g_strdup (DEFAULT_LOCATION); camera->viewfinderbin = gst_element_factory_make ("viewfinderbin", "vf-bin"); camera->zoom = DEFAULT_ZOOM; camera->max_zoom = MAX_ZOOM; + camera->flags = DEFAULT_FLAGS; /* capsfilters are created here as we proxy their caps properties and * this way we avoid having to store the caps while on NULL state to @@ -720,7 +767,7 @@ gst_camera_bin_init (GstCameraBin * camera) } static void -gst_image_capture_bin_post_image_done (GstCameraBin * camera, +gst_image_capture_bin_post_image_done (GstCameraBin2 * camera, const gchar * filename) { GstMessage *msg; @@ -744,10 +791,10 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message) const gchar *filename; if (gst_structure_has_name (structure, "GstMultiFileSink")) { - GST_CAMERA_BIN_PROCESSING_DEC (GST_CAMERA_BIN_CAST (bin)); + GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin)); filename = gst_structure_get_string (structure, "filename"); if (filename) { - gst_image_capture_bin_post_image_done (GST_CAMERA_BIN_CAST (bin), + gst_image_capture_bin_post_image_done (GST_CAMERA_BIN2_CAST (bin), filename); } } @@ -760,15 +807,15 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message) gst_message_parse_warning (message, &err, &debug); if (err->domain == GST_RESOURCE_ERROR) { /* some capturing failed */ - GST_CAMERA_BIN_PROCESSING_DEC (GST_CAMERA_BIN_CAST (bin)); + GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin)); } } break; case GST_MESSAGE_EOS:{ GstElement *src = GST_ELEMENT (GST_MESSAGE_SRC (message)); - if (src == GST_CAMERA_BIN_CAST (bin)->videosink) { + if (src == GST_CAMERA_BIN2_CAST (bin)->videosink) { GST_DEBUG_OBJECT (bin, "EOS from video branch"); - GST_CAMERA_BIN_PROCESSING_DEC (GST_CAMERA_BIN_CAST (bin)); + GST_CAMERA_BIN2_PROCESSING_DEC (GST_CAMERA_BIN2_CAST (bin)); } } break; @@ -789,9 +836,10 @@ gst_camera_bin_handle_message (GstBin * bin, GstMessage * message) * Where current_filter and new_filter might or might not be NULL */ static void -gst_camera_bin_check_and_replace_filter (GstCameraBin * camera, +gst_camera_bin_check_and_replace_filter (GstCameraBin2 * camera, GstElement ** current_filter, GstElement * new_filter, - GstElement * previous_element, GstElement * next_element) + GstElement * previous_element, GstElement * next_element, + const gchar * prev_elem_pad) { if (*current_filter == new_filter) { GST_DEBUG_OBJECT (camera, "Current filter is the same as the previous, " @@ -815,15 +863,27 @@ gst_camera_bin_check_and_replace_filter (GstCameraBin * camera, if (new_filter) { *current_filter = gst_object_ref (new_filter); gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (new_filter)); - gst_element_link_many (previous_element, new_filter, next_element, NULL); + } + + if (prev_elem_pad) { + if (new_filter) { + gst_element_link_pads (previous_element, prev_elem_pad, new_filter, NULL); + gst_element_link (new_filter, next_element); + } else { + gst_element_link_pads (previous_element, prev_elem_pad, next_element, + NULL); + } } else { - gst_element_link (previous_element, next_element); + if (new_filter) + gst_element_link_many (previous_element, new_filter, next_element, NULL); + else + gst_element_link (previous_element, next_element); } } static void encodebin_element_added (GstElement * encodebin, GstElement * new_element, - GstCameraBin * camera) + GstCameraBin2 * camera) { GstElementFactory *factory = gst_element_get_factory (new_element); @@ -833,12 +893,18 @@ encodebin_element_added (GstElement * encodebin, GstElement * new_element, g_object_set (new_element, "skip-to-first", TRUE, NULL); } } + + if (gst_element_implements_interface (new_element, GST_TYPE_TAG_SETTER)) { + GstTagSetter *tagsetter = GST_TAG_SETTER (new_element); + + gst_tag_setter_set_tag_merge_mode (tagsetter, GST_TAG_MERGE_REPLACE); + } } #define VIDEO_PAD 1 #define AUDIO_PAD 2 static GstPad * -encodebin_find_pad (GstCameraBin * camera, GstElement * encodebin, +encodebin_find_pad (GstCameraBin2 * camera, GstElement * encodebin, gint pad_type) { GstPad *pad = NULL; @@ -903,7 +969,7 @@ encodebin_find_pad (GstCameraBin * camera, GstElement * encodebin, } static gboolean -gst_camera_bin_video_profile_has_audio (GstCameraBin * camera) +gst_camera_bin_video_profile_has_audio (GstCameraBin2 * camera) { const GList *list; @@ -925,7 +991,7 @@ gst_camera_bin_video_profile_has_audio (GstCameraBin * camera) } static GstPadLinkReturn -gst_camera_bin_link_encodebin (GstCameraBin * camera, GstElement * encodebin, +gst_camera_bin_link_encodebin (GstCameraBin2 * camera, GstElement * encodebin, GstElement * element, gint padtype) { GstPadLinkReturn ret; @@ -955,18 +1021,85 @@ static void gst_camera_bin_src_notify_max_zoom_cb (GObject * self, GParamSpec * pspec, gpointer user_data) { - GstCameraBin *camera = (GstCameraBin *) user_data; + GstCameraBin2 *camera = (GstCameraBin2 *) user_data; g_object_get (self, "max-zoom", &camera->max_zoom, NULL); GST_DEBUG_OBJECT (camera, "Max zoom updated to %f", camera->max_zoom); g_object_notify (G_OBJECT (camera), "max-zoom"); } +static gboolean +gst_camera_bin_image_src_buffer_probe (GstPad * pad, GstBuffer * buf, + gpointer data) +{ + gboolean ret = TRUE; + GstCameraBin2 *camerabin = data; + GstEvent *evt; + gchar *location = NULL; + GstPad *peer; + + if (camerabin->image_location_list) { + location = camerabin->image_location_list->data; + camerabin->image_location_list = + g_slist_delete_link (camerabin->image_location_list, + camerabin->image_location_list); + GST_DEBUG_OBJECT (camerabin, "Sending image location change to '%s'", + location); + } else { + GST_DEBUG_OBJECT (camerabin, "No filename location change to send"); + return ret; + } + + if (location) { + evt = gst_camera_bin_new_event_file_location (location); + peer = gst_pad_get_peer (pad); + gst_pad_send_event (peer, evt); + gst_object_unref (peer); + g_free (location); + } else { + /* This means we don't have to encode the capture, it is used for + * signaling the application just wants the preview */ + ret = FALSE; + GST_CAMERA_BIN2_PROCESSING_DEC (camerabin); + } + + return ret; +} + + +static gboolean +gst_camera_bin_image_sink_event_probe (GstPad * pad, GstEvent * event, + gpointer data) +{ + GstCameraBin2 *camerabin = data; + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_CUSTOM_DOWNSTREAM:{ + if (gst_event_has_name (event, "new-location")) { + const GstStructure *structure = gst_event_get_structure (event); + const gchar *filename = gst_structure_get_string (structure, + "location"); + + gst_element_set_state (camerabin->imagesink, GST_STATE_NULL); + GST_DEBUG_OBJECT (camerabin, "Setting filename to imagesink: %s", + filename); + g_object_set (camerabin->imagesink, "location", filename, NULL); + gst_element_set_state (camerabin->imagesink, GST_STATE_PLAYING); + } + } + break; + default: + break; + } + + return TRUE; +} + /** * gst_camera_bin_create_elements: - * @param camera: the #GstCameraBin + * @param camera: the #GstCameraBin2 * - * Creates all elements inside #GstCameraBin + * Creates all elements inside #GstCameraBin2 * * Each of the pads on the camera source is linked as follows: * .pad ! queue ! capsfilter ! correspondingbin @@ -975,18 +1108,20 @@ gst_camera_bin_src_notify_max_zoom_cb (GObject * self, GParamSpec * pspec, * the camera source pad. */ static gboolean -gst_camera_bin_create_elements (GstCameraBin * camera) +gst_camera_bin_create_elements (GstCameraBin2 * camera) { gboolean new_src = FALSE; gboolean new_audio_src = FALSE; gboolean has_audio; gboolean profile_switched = FALSE; const gchar *missing_element_name; + gint encbin_flags = 0; if (!camera->elements_created) { /* TODO check that elements created in _init were really created */ - camera->video_encodebin = gst_element_factory_make ("encodebin", NULL); + camera->video_encodebin = + gst_element_factory_make ("encodebin", "video-encodebin"); if (!camera->video_encodebin) { missing_element_name = "encodebin"; goto missing_element; @@ -995,18 +1130,19 @@ gst_camera_bin_create_elements (GstCameraBin * camera) g_signal_connect (camera->video_encodebin, "element-added", (GCallback) encodebin_element_added, camera); + /* propagate the flags property by translating appropriate values + * to GstEncFlags values */ + if (camera->flags & GST_CAM_FLAG_NO_AUDIO_CONVERSION) + encbin_flags |= (1 << 0); + if (camera->flags & GST_CAM_FLAG_NO_VIDEO_CONVERSION) + encbin_flags |= (1 << 1); + g_object_set (camera->video_encodebin, "flags", encbin_flags, NULL); + camera->videosink = gst_element_factory_make ("filesink", "videobin-filesink"); g_object_set (camera->videosink, "async", FALSE, NULL); /* audio elements */ - camera->audio_queue = gst_element_factory_make ("queue", "audio-queue"); - camera->audio_convert = gst_element_factory_make ("audioconvert", - "audio-convert"); - if (!camera->audio_convert) { - missing_element_name = "audioconvert"; - goto missing_element; - } if (!camera->audio_volume) { missing_element_name = "volume"; goto missing_element; @@ -1041,7 +1177,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera) camera->video_profile_switch = TRUE; } - camera->image_encodebin = gst_element_factory_make ("encodebin", NULL); + camera->image_encodebin = + gst_element_factory_make ("encodebin", "image-encodebin"); if (!camera->image_encodebin) { missing_element_name = "encodebin"; goto missing_element; @@ -1078,8 +1215,6 @@ gst_camera_bin_create_elements (GstCameraBin * camera) camera->image_profile_switch = TRUE; } - camera->videobin_queue = - gst_element_factory_make ("queue", "videobin-queue"); camera->imagebin_queue = gst_element_factory_make ("queue", "imagebin-queue"); camera->viewfinderbin_queue = @@ -1089,20 +1224,16 @@ gst_camera_bin_create_elements (GstCameraBin * camera) NULL); g_object_set (camera->imagebin_queue, "max-size-time", (guint64) 0, "silent", TRUE, NULL); - g_object_set (camera->videobin_queue, "silent", TRUE, NULL); gst_bin_add_many (GST_BIN_CAST (camera), gst_object_ref (camera->video_encodebin), gst_object_ref (camera->videosink), gst_object_ref (camera->image_encodebin), gst_object_ref (camera->imagesink), - gst_object_ref (camera->videobin_queue), gst_object_ref (camera->imagebin_queue), gst_object_ref (camera->viewfinderbin_queue), NULL); /* Linking can be optimized TODO */ - gst_element_link_many (camera->videobin_queue, camera->videobin_capsfilter, - NULL); gst_element_link (camera->video_encodebin, camera->videosink); gst_element_link_many (camera->imagebin_queue, camera->imagebin_capsfilter, @@ -1110,6 +1241,19 @@ gst_camera_bin_create_elements (GstCameraBin * camera) gst_element_link (camera->image_encodebin, camera->imagesink); gst_element_link_many (camera->viewfinderbin_queue, camera->viewfinderbin_capsfilter, camera->viewfinderbin, NULL); + + { + /* set an event probe to watch for custom location changes */ + GstPad *srcpad; + + srcpad = gst_element_get_static_pad (camera->image_encodebin, "src"); + + gst_pad_add_event_probe (srcpad, + (GCallback) gst_camera_bin_image_sink_event_probe, camera); + + gst_object_unref (srcpad); + } + /* * Video can't get into playing as its internal filesink will open * a file for writing and leave it empty if unused. @@ -1122,8 +1266,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera) gst_element_set_locked_state (camera->videosink, TRUE); gst_element_set_locked_state (camera->imagesink, TRUE); - g_object_set (camera->videosink, "location", camera->video_location, NULL); - g_object_set (camera->imagesink, "location", camera->image_location, NULL); + g_object_set (camera->videosink, "location", camera->location, NULL); + g_object_set (camera->imagesink, "location", camera->location, NULL); } if (camera->video_profile_switch) { @@ -1192,6 +1336,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera) (GCallback) gst_camera_bin_src_notify_max_zoom_cb, camera); } if (new_src) { + GstPad *imgsrc = gst_element_get_static_pad (camera->src, "imgsrc"); + gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->src)); camera->src_capture_notify_id = g_signal_connect (G_OBJECT (camera->src), "notify::ready-for-capture", @@ -1200,19 +1346,27 @@ gst_camera_bin_create_elements (GstCameraBin * camera) "sink"); gst_element_link_pads (camera->src, "imgsrc", camera->imagebin_queue, "sink"); - gst_element_link_pads (camera->src, "vidsrc", camera->videobin_queue, - "sink"); + if (!gst_element_link_pads (camera->src, "vidsrc", + camera->videobin_capsfilter, "sink")) { + GST_ERROR_OBJECT (camera, + "Failed to link camera source's vidsrc pad to video bin capsfilter"); + goto fail; + } + + gst_pad_add_buffer_probe (imgsrc, + (GCallback) gst_camera_bin_image_src_buffer_probe, camera); + gst_object_unref (imgsrc); } gst_camera_bin_check_and_replace_filter (camera, &camera->image_filter, camera->user_image_filter, camera->imagebin_queue, - camera->imagebin_capsfilter); + camera->imagebin_capsfilter, NULL); gst_camera_bin_check_and_replace_filter (camera, &camera->video_filter, - camera->user_video_filter, camera->videobin_queue, - camera->videobin_capsfilter); + camera->user_video_filter, camera->src, camera->videobin_capsfilter, + "vidsrc"); gst_camera_bin_check_and_replace_filter (camera, &camera->viewfinder_filter, camera->user_viewfinder_filter, camera->viewfinderbin_queue, - camera->viewfinderbin_capsfilter); + camera->viewfinderbin_capsfilter, NULL); /* check if we need to replace the camera audio src */ has_audio = gst_camera_bin_video_profile_has_audio (camera); @@ -1220,10 +1374,8 @@ gst_camera_bin_create_elements (GstCameraBin * camera) if ((camera->user_audio_src && camera->user_audio_src != camera->audio_src) || !has_audio) { gst_bin_remove (GST_BIN_CAST (camera), camera->audio_src); - gst_bin_remove (GST_BIN_CAST (camera), camera->audio_queue); gst_bin_remove (GST_BIN_CAST (camera), camera->audio_volume); gst_bin_remove (GST_BIN_CAST (camera), camera->audio_capsfilter); - gst_bin_remove (GST_BIN_CAST (camera), camera->audio_convert); gst_object_unref (camera->audio_src); camera->audio_src = NULL; } @@ -1242,21 +1394,23 @@ gst_camera_bin_create_elements (GstCameraBin * camera) } if (new_audio_src) { + if (g_object_class_find_property (G_OBJECT_GET_CLASS (camera->audio_src), + "provide-clock")) { + g_object_set (camera->audio_src, "provide-clock", FALSE, NULL); + } gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_src)); - gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_queue)); gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_volume)); gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_capsfilter)); - gst_bin_add (GST_BIN_CAST (camera), gst_object_ref (camera->audio_convert)); - gst_element_link_many (camera->audio_src, camera->audio_queue, - camera->audio_volume, - camera->audio_capsfilter, camera->audio_convert, NULL); + gst_element_link_many (camera->audio_src, camera->audio_volume, + camera->audio_capsfilter, NULL); } if ((profile_switched && has_audio) || new_audio_src) { if (GST_PAD_LINK_FAILED (gst_camera_bin_link_encodebin (camera, - camera->video_encodebin, camera->audio_convert, AUDIO_PAD))) { + camera->video_encodebin, camera->audio_capsfilter, + AUDIO_PAD))) { goto fail; } } @@ -1282,7 +1436,7 @@ static GstStateChangeReturn gst_camera_bin_change_state (GstElement * element, GstStateChange trans) { GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; - GstCameraBin *camera = GST_CAMERA_BIN_CAST (element); + GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (element); switch (trans) { case GST_STATE_CHANGE_NULL_TO_READY: @@ -1291,7 +1445,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans) } break; case GST_STATE_CHANGE_READY_TO_PAUSED: - GST_CAMERA_BIN_RESET_PROCESSING_COUNTER (camera); + GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera); break; case GST_STATE_CHANGE_PAUSED_TO_READY: if (GST_STATE (camera->videosink) >= GST_STATE_PAUSED) @@ -1315,23 +1469,23 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans) gst_element_set_state (camera->audio_src, GST_STATE_READY); gst_tag_setter_reset_tags (GST_TAG_SETTER (camera)); - GST_CAMERA_BIN_RESET_PROCESSING_COUNTER (camera); + GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera); + + g_slist_foreach (camera->image_location_list, (GFunc) g_free, NULL); + g_slist_free (camera->image_location_list); + camera->image_location_list = NULL; /* explicitly set to READY as they might be outside of the bin */ - gst_element_set_state (camera->audio_queue, GST_STATE_READY); gst_element_set_state (camera->audio_volume, GST_STATE_READY); gst_element_set_state (camera->audio_capsfilter, GST_STATE_READY); - gst_element_set_state (camera->audio_convert, GST_STATE_READY); break; case GST_STATE_CHANGE_READY_TO_NULL: if (camera->audio_src) gst_element_set_state (camera->audio_src, GST_STATE_NULL); /* explicitly set to NULL as they might be outside of the bin */ - gst_element_set_state (camera->audio_queue, GST_STATE_NULL); gst_element_set_state (camera->audio_volume, GST_STATE_NULL); gst_element_set_state (camera->audio_capsfilter, GST_STATE_NULL); - gst_element_set_state (camera->audio_convert, GST_STATE_NULL); break; default: @@ -1344,7 +1498,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans) static gboolean gst_camera_bin_send_event (GstElement * element, GstEvent * event) { - GstCameraBin *camera = GST_CAMERA_BIN_CAST (element); + GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (element); gboolean res; res = GST_ELEMENT_CLASS (parent_class)->send_event (element, event); @@ -1376,21 +1530,16 @@ gst_camera_bin_send_event (GstElement * element, GstEvent * event) } static void -gst_camera_bin_set_location (GstCameraBin * camera, const gchar * location) +gst_camera_bin_set_location (GstCameraBin2 * camera, const gchar * location) { GST_DEBUG_OBJECT (camera, "Setting mode %d location to %s", camera->mode, location); - if (camera->mode == MODE_IMAGE) { - g_free (camera->image_location); - camera->image_location = g_strdup (location); - } else { - g_free (camera->video_location); - camera->video_location = g_strdup (location); - } + g_free (camera->location); + camera->location = g_strdup (location); } static void -gst_camera_bin_set_audio_src (GstCameraBin * camera, GstElement * src) +gst_camera_bin_set_audio_src (GstCameraBin2 * camera, GstElement * src) { GST_DEBUG_OBJECT (GST_OBJECT (camera), "Setting audio source %" GST_PTR_FORMAT, src); @@ -1404,7 +1553,7 @@ gst_camera_bin_set_audio_src (GstCameraBin * camera, GstElement * src) } static void -gst_camera_bin_set_camera_src (GstCameraBin * camera, GstElement * src) +gst_camera_bin_set_camera_src (GstCameraBin2 * camera, GstElement * src) { GST_DEBUG_OBJECT (GST_OBJECT (camera), "Setting camera source %" GST_PTR_FORMAT, src); @@ -1421,7 +1570,7 @@ static void gst_camera_bin_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec) { - GstCameraBin *camera = GST_CAMERA_BIN_CAST (object); + GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (object); switch (prop_id) { case PROP_MODE: @@ -1592,6 +1741,9 @@ gst_camera_bin_set_property (GObject * object, guint prop_id, (GstEncodingProfile *) gst_value_dup_mini_object (value); camera->image_profile_switch = TRUE; break; + case PROP_FLAGS: + camera->flags = g_value_get_flags (value); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; @@ -1602,18 +1754,14 @@ static void gst_camera_bin_get_property (GObject * object, guint prop_id, GValue * value, GParamSpec * pspec) { - GstCameraBin *camera = GST_CAMERA_BIN_CAST (object); + GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (object); switch (prop_id) { case PROP_MODE: g_value_set_enum (value, camera->mode); break; case PROP_LOCATION: - if (camera->mode == MODE_VIDEO) { - g_value_set_string (value, camera->video_location); - } else { - g_value_set_string (value, camera->image_location); - } + g_value_set_string (value, camera->location); break; case PROP_CAMERA_SRC: g_value_set_object (value, camera->user_src); @@ -1754,6 +1902,9 @@ gst_camera_bin_get_property (GObject * object, guint prop_id, g_value_set_boolean (value, g_atomic_int_get (&camera->processing_counter) == 0); break; + case PROP_FLAGS: + g_value_set_flags (value, camera->flags); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; @@ -1761,10 +1912,10 @@ gst_camera_bin_get_property (GObject * object, guint prop_id, } gboolean -gst_camera_bin_plugin_init (GstPlugin * plugin) +gst_camera_bin2_plugin_init (GstPlugin * plugin) { GST_DEBUG_CATEGORY_INIT (gst_camera_bin_debug, "camerabin2", 0, "CameraBin2"); return gst_element_register (plugin, "camerabin2", GST_RANK_NONE, - gst_camera_bin_get_type ()); + gst_camera_bin2_get_type ()); } diff --git a/gst/camerabin2/gstcamerabin2.h b/gst/camerabin2/gstcamerabin2.h index 59e3723dff..d5a37f034e 100644 --- a/gst/camerabin2/gstcamerabin2.h +++ b/gst/camerabin2/gstcamerabin2.h @@ -16,25 +16,34 @@ * Free Software Foundation, Inc., 59 Temple Place - Suite 330, * Boston, MA 02111-1307, USA. */ -#ifndef _GST_CAMERA_BIN_H_ -#define _GST_CAMERA_BIN_H_ +#ifndef _GST_CAMERA_BIN2_H_ +#define _GST_CAMERA_BIN2_H_ #include #include G_BEGIN_DECLS -#define GST_TYPE_CAMERA_BIN (gst_camera_bin_get_type()) -#define GST_CAMERA_BIN(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_BIN,GstCameraBin)) -#define GST_CAMERA_BIN_CAST(obj) ((GstCameraBin *) obj) -#define GST_CAMERA_BIN_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_BIN,GstCameraBinClass)) -#define GST_IS_CAMERA_BIN(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_BIN)) -#define GST_IS_CAMERA_BIN_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_BIN)) +#define GST_TYPE_CAMERA_BIN2 (gst_camera_bin2_get_type()) +#define GST_CAMERA_BIN2(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAMERA_BIN2,GstCameraBin2)) +#define GST_CAMERA_BIN2_CAST(obj) ((GstCameraBin2 *) obj) +#define GST_CAMERA_BIN2_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAMERA_BIN2,GstCameraBin2Class)) +#define GST_IS_CAMERA_BIN2(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAMERA_BIN2)) +#define GST_IS_CAMERA_BIN2_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAMERA_BIN2)) -typedef struct _GstCameraBin GstCameraBin; -typedef struct _GstCameraBinClass GstCameraBinClass; +typedef enum +{ + /* matches GstEncFlags GST_ENC_FLAG_NO_AUDIO_CONVERSION in encodebin */ + GST_CAM_FLAG_NO_AUDIO_CONVERSION = (1 << 0), + /* matches GstEncFlags GST_ENC_FLAG_NO_VIDEO_CONVERSION in encodebin */ + GST_CAM_FLAG_NO_VIDEO_CONVERSION = (1 << 1) +} GstCamFlags; -struct _GstCameraBin + +typedef struct _GstCameraBin2 GstCameraBin2; +typedef struct _GstCameraBin2Class GstCameraBin2Class; + +struct _GstCameraBin2 { GstPipeline pipeline; @@ -45,8 +54,6 @@ struct _GstCameraBin GstElement *video_encodebin; gulong video_encodebin_signal_id; GstElement *videosink; - gulong videosink_probe; - GstElement *videobin_queue; GstElement *videobin_capsfilter; GstElement *viewfinderbin; @@ -68,23 +75,25 @@ struct _GstCameraBin GstElement *audio_src; GstElement *user_audio_src; - GstElement *audio_queue; GstElement *audio_volume; GstElement *audio_capsfilter; - GstElement *audio_convert; gint processing_counter; /* atomic int */ - /* Index of the auto incrementing file index for video recordings */ - gint video_index; + /* Index of the auto incrementing file index for captures */ + gint capture_index; + + /* stores list of image locations to be pushed to the image sink + * as file location change notifications, they are pushed before + * each buffer capture */ + GSList *image_location_list; gboolean video_profile_switch; gboolean image_profile_switch; /* properties */ gint mode; - gchar *video_location; - gchar *image_location; + gchar *location; gboolean post_previews; GstCaps *preview_caps; GstElement *preview_filter; @@ -92,21 +101,22 @@ struct _GstCameraBin GstEncodingProfile *image_profile; gfloat zoom; gfloat max_zoom; + GstCamFlags flags; gboolean elements_created; }; -struct _GstCameraBinClass +struct _GstCameraBin2Class { GstPipelineClass pipeline_class; /* Action signals */ - void (*start_capture) (GstCameraBin * camera); - void (*stop_capture) (GstCameraBin * camera); + void (*start_capture) (GstCameraBin2 * camera); + void (*stop_capture) (GstCameraBin2 * camera); }; -GType gst_camera_bin_get_type (void); -gboolean gst_camera_bin_plugin_init (GstPlugin * plugin); +GType gst_camera_bin2_get_type (void); +gboolean gst_camera_bin2_plugin_init (GstPlugin * plugin); G_END_DECLS diff --git a/gst/camerabin2/gstplugin.c b/gst/camerabin2/gstplugin.c index 6053521468..c4eaf2fd44 100644 --- a/gst/camerabin2/gstplugin.c +++ b/gst/camerabin2/gstplugin.c @@ -34,7 +34,7 @@ plugin_init (GstPlugin * plugin) return FALSE; if (!gst_wrapper_camera_bin_src_plugin_init (plugin)) return FALSE; - if (!gst_camera_bin_plugin_init (plugin)) + if (!gst_camera_bin2_plugin_init (plugin)) return FALSE; return TRUE; diff --git a/gst/camerabin2/gstwrappercamerabinsrc.c b/gst/camerabin2/gstwrappercamerabinsrc.c index 6a003a0b4e..16aa34b945 100644 --- a/gst/camerabin2/gstwrappercamerabinsrc.c +++ b/gst/camerabin2/gstwrappercamerabinsrc.c @@ -395,7 +395,6 @@ gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc) GstElement *src_csp; GstElement *capsfilter; gboolean ret = FALSE; - GstElement *videoscale; GstPad *vf_pad; GstPad *tee_capture_pad; GstPad *src_caps_src_pad; @@ -473,17 +472,9 @@ gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc) /* viewfinder pad */ vf_pad = gst_element_get_request_pad (tee, "src%d"); g_object_set (tee, "alloc-pad", vf_pad, NULL); + gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad); gst_object_unref (vf_pad); - /* the viewfinder should always work, so we add some converters to it */ - if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace", - "viewfinder-colorspace")) - goto done; - if (!(videoscale = - gst_camerabin_create_and_add_element (cbin, "videoscale", - "viewfinder-scale"))) - goto done; - /* image/video pad from tee */ tee_capture_pad = gst_element_get_request_pad (tee, "src%d"); @@ -526,10 +517,7 @@ gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc) NULL); } - /* hook-up the vf ghostpad */ - vf_pad = gst_element_get_static_pad (videoscale, "src"); - gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad); - gst_object_unref (vf_pad); + gst_pad_set_active (self->vfsrc, TRUE); gst_pad_set_active (self->imgsrc, TRUE); /* XXX ??? */ @@ -854,73 +842,6 @@ gst_wrapper_camera_bin_src_set_zoom (GstBaseCameraSrc * bcamsrc, gfloat zoom) } } -static GstCaps * -gst_wrapper_camera_bin_src_get_allowed_input_caps (GstBaseCameraSrc * bcamsrc) -{ - GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc); - GstCaps *caps = NULL; - GstPad *pad = NULL, *peer_pad = NULL; - GstState state; - GstElement *videosrc; - - videosrc = self->src_vid_src ? self->src_vid_src : self->app_vid_src; - - if (!videosrc) { - GST_WARNING_OBJECT (self, "no videosrc, can't get allowed caps"); - goto failed; - } - - if (self->allowed_caps) { - GST_DEBUG_OBJECT (self, "returning cached caps"); - goto done; - } - - pad = gst_element_get_static_pad (videosrc, "src"); - - if (!pad) { - GST_WARNING_OBJECT (self, "no srcpad in videosrc"); - goto failed; - } - - state = GST_STATE (videosrc); - - /* Make this function work also in NULL state */ - if (state == GST_STATE_NULL) { - GST_DEBUG_OBJECT (self, "setting videosrc to ready temporarily"); - peer_pad = gst_pad_get_peer (pad); - if (peer_pad) { - gst_pad_unlink (pad, peer_pad); - } - /* Set videosrc to READY to open video device */ - gst_element_set_locked_state (videosrc, TRUE); - gst_element_set_state (videosrc, GST_STATE_READY); - } - - self->allowed_caps = gst_pad_get_caps (pad); - - /* Restore state and re-link if necessary */ - if (state == GST_STATE_NULL) { - GST_DEBUG_OBJECT (self, "restoring videosrc state %d", state); - /* Reset videosrc to NULL state, some drivers seem to need this */ - gst_element_set_state (videosrc, GST_STATE_NULL); - if (peer_pad) { - gst_pad_link (pad, peer_pad); - gst_object_unref (peer_pad); - } - gst_element_set_locked_state (videosrc, FALSE); - } - - gst_object_unref (pad); - -done: - if (self->allowed_caps) { - caps = gst_caps_copy (self->allowed_caps); - } - GST_DEBUG_OBJECT (self, "allowed caps:%" GST_PTR_FORMAT, caps); -failed: - return caps; -} - /** * update_aspect_filter: * @self: camerasrc object @@ -1172,8 +1093,6 @@ gst_wrapper_camera_bin_src_class_init (GstWrapperCameraBinSrcClass * klass) gst_wrapper_camera_bin_src_construct_pipeline; gstbasecamerasrc_class->set_zoom = gst_wrapper_camera_bin_src_set_zoom; gstbasecamerasrc_class->set_mode = gst_wrapper_camera_bin_src_set_mode; - gstbasecamerasrc_class->get_allowed_input_caps = - gst_wrapper_camera_bin_src_get_allowed_input_caps; gstbasecamerasrc_class->start_capture = gst_wrapper_camera_bin_src_start_capture; gstbasecamerasrc_class->stop_capture = diff --git a/gst/debugutils/Makefile.am b/gst/debugutils/Makefile.am index 9d237a3251..eadb82e9b9 100644 --- a/gst/debugutils/Makefile.am +++ b/gst/debugutils/Makefile.am @@ -22,10 +22,14 @@ libgstdebugutilsbad_la_SOURCES = \ gstchecksumsink.h \ gstchopmydata.c \ gstchopmydata.h \ + gstcompare.c \ + gstcompare.h \ gstdebugspy.h + nodist_libgstdebugutilsbad_la_SOURCES = $(BUILT_SOURCES) libgstdebugutilsbad_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) libgstdebugutilsbad_la_LIBADD = $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) \ + -lgstvideo-$(GST_MAJORMINOR) \ -lgstinterfaces-$(GST_MAJORMINOR) $(GST_LIBS) libgstdebugutilsbad_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstdebugutilsbad_la_LIBTOOLFLAGS = --tag=disable-static diff --git a/gst/debugutils/debugutilsbad.c b/gst/debugutils/debugutilsbad.c index 09082a72db..2ee7f93c06 100644 --- a/gst/debugutils/debugutilsbad.c +++ b/gst/debugutils/debugutilsbad.c @@ -26,6 +26,7 @@ GType gst_checksum_sink_get_type (void); GType fps_display_sink_get_type (void); GType gst_chop_my_data_get_type (void); +GType gst_compare_get_type (void); GType gst_debug_spy_get_type (void); static gboolean @@ -37,8 +38,11 @@ plugin_init (GstPlugin * plugin) fps_display_sink_get_type ()); gst_element_register (plugin, "chopmydata", GST_RANK_NONE, gst_chop_my_data_get_type ()); + gst_element_register (plugin, "compare", GST_RANK_NONE, + gst_compare_get_type ()); gst_element_register (plugin, "debugspy", GST_RANK_NONE, gst_debug_spy_get_type ()); + return TRUE; } diff --git a/gst/debugutils/gstcompare.c b/gst/debugutils/gstcompare.c new file mode 100644 index 0000000000..abf563c6c7 --- /dev/null +++ b/gst/debugutils/gstcompare.c @@ -0,0 +1,664 @@ +/* GStreamer Element + * + * Copyright 2011 Collabora Ltd. + * @author: Mark Nauwelaerts + * Copyright 2011 Nokia Corp. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif +#include + +#include +#include +#include + +#include "gstcompare.h" + +GST_DEBUG_CATEGORY_STATIC (compare_debug); +#define GST_CAT_DEFAULT compare_debug + + +static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS_ANY); + +static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS_ANY); + +static GstStaticPadTemplate check_sink_factory = +GST_STATIC_PAD_TEMPLATE ("check", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS_ANY); + +enum GstCompareMethod +{ + GST_COMPARE_METHOD_MEM, + GST_COMPARE_METHOD_MAX, + GST_COMPARE_METHOD_SSIM +}; + +#define GST_COMPARE_METHOD_TYPE (gst_compare_method_get_type()) +static GType +gst_compare_method_get_type (void) +{ + static GType method_type = 0; + + static const GEnumValue method_types[] = { + {GST_COMPARE_METHOD_MEM, "Memory", "mem"}, + {GST_COMPARE_METHOD_MAX, "Maximum metric", "max"}, + {GST_COMPARE_METHOD_SSIM, "SSIM (raw video)", "ssim"}, + {0, NULL, NULL} + }; + + if (!method_type) { + method_type = g_enum_register_static ("GstCompareMethod", method_types); + } + return method_type; +} + +/* Filter signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + PROP_0, + PROP_META, + PROP_OFFSET_TS, + PROP_METHOD, + PROP_THRESHOLD, + PROP_UPPER, + PROP_LAST +}; + +#define DEFAULT_META GST_BUFFER_COPY_ALL +#define DEFAULT_OFFSET_TS FALSE +#define DEFAULT_METHOD GST_COMPARE_METHOD_MEM +#define DEFAULT_THRESHOLD 0 +#define DEFAULT_UPPER TRUE + +static void gst_compare_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_compare_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); + +static void gst_compare_reset (GstCompare * overlay); + +static GstCaps *gst_compare_getcaps (GstPad * pad); +static GstFlowReturn gst_compare_collect_pads (GstCollectPads * cpads, + GstCompare * comp); + +static GstStateChangeReturn gst_compare_change_state (GstElement * element, + GstStateChange transition); + +GST_BOILERPLATE (GstCompare, gst_compare, GstElement, GST_TYPE_ELEMENT); + + +static void +gst_compare_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&check_sink_factory)); + gst_element_class_set_details_simple (element_class, "Compare buffers", + "Filter/Debug", "Compares incoming buffers", + "Mark Nauwelaerts "); +} + +static void +gst_compare_finalize (GObject * object) +{ + GstCompare *comp = GST_COMPARE (object); + + gst_object_unref (comp->cpads); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static void +gst_compare_class_init (GstCompareClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = (GObjectClass *) klass; + gstelement_class = (GstElementClass *) klass; + + GST_DEBUG_CATEGORY_INIT (compare_debug, "compare", 0, "Compare buffers"); + + gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_compare_change_state); + + gobject_class->set_property = gst_compare_set_property; + gobject_class->get_property = gst_compare_get_property; + gobject_class->finalize = gst_compare_finalize; + + g_object_class_install_property (gobject_class, PROP_META, + g_param_spec_flags ("meta", "Compare Meta", + "Indicates which metadata should be compared", + gst_buffer_copy_flags_get_type (), DEFAULT_META, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_OFFSET_TS, + g_param_spec_boolean ("offset-ts", "Offsets Timestamps", + "Consider OFFSET and OFFSET_END part of timestamp metadata", + DEFAULT_OFFSET_TS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_METHOD, + g_param_spec_enum ("method", "Content Compare Method", + "Method to compare buffer content", + GST_COMPARE_METHOD_TYPE, DEFAULT_METHOD, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_THRESHOLD, + g_param_spec_double ("threshold", "Content Threshold", + "Threshold beyond which to consider content different as determined by content-method", + 0, G_MAXDOUBLE, DEFAULT_THRESHOLD, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_UPPER, + g_param_spec_boolean ("upper", "Threshold Upper Bound", + "Whether threshold value is upper bound or lower bound for difference measure", + DEFAULT_UPPER, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +} + +static void +gst_compare_init (GstCompare * comp, GstCompareClass * klass) +{ + comp->cpads = gst_collect_pads_new (); + gst_collect_pads_set_function (comp->cpads, + (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_compare_collect_pads), + comp); + + comp->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink"); + gst_pad_set_getcaps_function (comp->sinkpad, gst_compare_getcaps); + gst_element_add_pad (GST_ELEMENT (comp), comp->sinkpad); + + comp->checkpad = + gst_pad_new_from_static_template (&check_sink_factory, "check"); + gst_pad_set_getcaps_function (comp->checkpad, gst_compare_getcaps); + gst_element_add_pad (GST_ELEMENT (comp), comp->checkpad); + + gst_collect_pads_add_pad_full (comp->cpads, comp->sinkpad, + sizeof (GstCollectData), NULL); + gst_collect_pads_add_pad_full (comp->cpads, comp->checkpad, + sizeof (GstCollectData), NULL); + + comp->srcpad = gst_pad_new_from_static_template (&src_factory, "src"); + gst_pad_set_getcaps_function (comp->srcpad, gst_compare_getcaps); + gst_element_add_pad (GST_ELEMENT (comp), comp->srcpad); + + /* init properties */ + comp->meta = DEFAULT_META; + comp->offset_ts = DEFAULT_OFFSET_TS; + comp->method = DEFAULT_METHOD; + comp->threshold = DEFAULT_THRESHOLD; + comp->upper = DEFAULT_UPPER; + + gst_compare_reset (comp); +} + +static void +gst_compare_reset (GstCompare * comp) +{ +} + +static GstCaps * +gst_compare_getcaps (GstPad * pad) +{ + GstCompare *comp; + GstPad *otherpad; + GstCaps *result; + + comp = GST_COMPARE (gst_pad_get_parent (pad)); + if (G_UNLIKELY (comp == NULL)) + return gst_caps_new_any (); + + otherpad = (pad == comp->srcpad ? comp->sinkpad : comp->srcpad); + result = gst_pad_peer_get_caps (otherpad); + if (result == NULL) + result = gst_caps_new_any (); + + gst_object_unref (comp); + + return result; +} + +static void +gst_compare_meta (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2) +{ + gint flags = 0; + + if (comp->meta & GST_BUFFER_COPY_FLAGS) { + if (GST_BUFFER_FLAGS (buf1) != GST_BUFFER_FLAGS (buf2)) { + flags |= GST_BUFFER_COPY_FLAGS; + GST_DEBUG_OBJECT (comp, "flags %d != flags %d", GST_BUFFER_FLAGS (buf1), + GST_BUFFER_FLAGS (buf2)); + } + } + if (comp->meta & GST_BUFFER_COPY_TIMESTAMPS) { + if (GST_BUFFER_TIMESTAMP (buf1) != GST_BUFFER_TIMESTAMP (buf2)) { + flags |= GST_BUFFER_COPY_TIMESTAMPS; + GST_DEBUG_OBJECT (comp, + "ts %" GST_TIME_FORMAT " != ts %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf1)), + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf2))); + } + if (GST_BUFFER_DURATION (buf1) != GST_BUFFER_DURATION (buf2)) { + flags |= GST_BUFFER_COPY_TIMESTAMPS; + GST_DEBUG_OBJECT (comp, + "dur %" GST_TIME_FORMAT " != dur %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_DURATION (buf1)), + GST_TIME_ARGS (GST_BUFFER_DURATION (buf2))); + } + if (comp->offset_ts) { + if (GST_BUFFER_OFFSET (buf1) != GST_BUFFER_OFFSET (buf2)) { + flags |= GST_BUFFER_COPY_TIMESTAMPS; + GST_DEBUG_OBJECT (comp, + "offset %" G_GINT64_FORMAT " != offset %" G_GINT64_FORMAT, + GST_BUFFER_OFFSET (buf1), GST_BUFFER_OFFSET (buf2)); + } + if (GST_BUFFER_OFFSET_END (buf1) != GST_BUFFER_OFFSET_END (buf2)) { + flags |= GST_BUFFER_COPY_TIMESTAMPS; + GST_DEBUG_OBJECT (comp, + "offset_end %" G_GINT64_FORMAT " != offset_end %" G_GINT64_FORMAT, + GST_BUFFER_OFFSET_END (buf1), GST_BUFFER_OFFSET_END (buf2)); + } + } + } + if (comp->meta & GST_BUFFER_COPY_CAPS) { + if (!gst_caps_is_equal (GST_BUFFER_CAPS (buf1), GST_BUFFER_CAPS (buf2))) { + flags |= GST_BUFFER_COPY_CAPS; + GST_DEBUG_OBJECT (comp, + "caps %" GST_PTR_FORMAT " != caps %" GST_PTR_FORMAT, + GST_BUFFER_CAPS (buf1), GST_BUFFER_CAPS (buf2)); + } + } + + /* signal mismatch by debug and message */ + if (flags) { + GST_WARNING_OBJECT (comp, "buffers %p and %p failed metadata match %d", + buf1, buf2, flags); + + gst_element_post_message (GST_ELEMENT (comp), + gst_message_new_element (GST_OBJECT (comp), + gst_structure_new ("delta", "meta", G_TYPE_INT, flags, NULL))); + } +} + +/* when comparing contents, it is already ensured sizes are equal */ + +static gint +gst_compare_mem (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2) +{ + return memcmp (GST_BUFFER_DATA (buf1), GST_BUFFER_DATA (buf2), + GST_BUFFER_SIZE (buf1)) ? 1 : 0; +} + +static gint +gst_compare_max (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2) +{ + gint i, delta = 0; + gint8 *data1, *data2; + + data1 = (gint8 *) GST_BUFFER_DATA (buf1); + data2 = (gint8 *) GST_BUFFER_DATA (buf2); + + /* primitive loop */ + for (i = 0; i < GST_BUFFER_SIZE (buf1); i++) { + gint diff = ABS (*data1 - *data2); + if (diff > 0) + GST_LOG_OBJECT (comp, "diff at %d = %d", i, diff); + delta = MAX (delta, ABS (*data1 - *data2)); + data1++; + data2++; + } + + return delta; +} + +static double +gst_compare_ssim_window (GstCompare * comp, guint8 * data1, guint8 * data2, + gint width, gint height, gint step, gint stride) +{ + gint count = 0, i, j; + gint sum1 = 0, sum2 = 0, ssum1 = 0, ssum2 = 0, acov = 0; + gdouble avg1, avg2, var1, var2, cov; + + const gdouble k1 = 0.01; + const gdouble k2 = 0.03; + const gdouble L = 255.0; + const gdouble c1 = (k1 * L) * (k1 * L); + const gdouble c2 = (k2 * L) * (k2 * L); + + /* plain and simple; no fancy optimizations */ + for (i = 0; i < height; i++) { + for (j = 0; j < width; j++) { + sum1 += *data1; + sum2 += *data2; + ssum1 += *data1 * *data1; + ssum2 += *data2 * *data2; + acov += *data1 * *data2; + count++; + data1 += step; + data2 += step; + } + data1 -= j * step; + data2 -= j * step; + data1 += stride; + data2 += stride; + } + + avg1 = sum1 / count; + avg2 = sum2 / count; + var1 = ssum1 / count - avg1 * avg1; + var2 = ssum2 / count - avg2 * avg2; + cov = acov / count - avg1 * avg2; + + return (2 * avg1 * avg2 + c1) * (2 * cov + c2) / + ((avg1 * avg1 + avg2 * avg2 + c1) * (var1 + var2 + c2)); +} + +/* @width etc are for the particular component */ +static gdouble +gst_compare_ssim_component (GstCompare * comp, guint8 * data1, guint8 * data2, + gint width, gint height, gint step, gint stride) +{ + const gint window = 16; + gdouble ssim_sum = 0; + gint count = 0, i, j; + + for (j = 0; j + (window / 2) < height; j += (window / 2)) { + for (i = 0; i + (window / 2) < width; i += (window / 2)) { + gdouble ssim; + + ssim = gst_compare_ssim_window (comp, data1 + step * i + j * stride, + data2 + step * i + j * stride, + MIN (window, width - i), MIN (window, height - j), step, stride); + GST_LOG_OBJECT (comp, "ssim for %dx%d at (%d, %d) = %f", window, window, + i, j, ssim); + ssim_sum += ssim; + count++; + } + } + + return (ssim_sum / count); +} + +static gdouble +gst_compare_ssim (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2) +{ + GstCaps *caps; + GstVideoFormat format, f; + gint width, height, w, h, i, comps; + gdouble cssim[4], ssim, c[4] = { 1.0, 0.0, 0.0, 0.0 }; + guint8 *data1, *data2; + + caps = GST_BUFFER_CAPS (buf1); + if (!caps) + goto invalid_input; + + if (!gst_video_format_parse_caps (caps, &format, &width, &height)) + goto invalid_input; + + caps = GST_BUFFER_CAPS (buf2); + if (!caps) + goto invalid_input; + + if (!gst_video_format_parse_caps (caps, &f, &w, &h)) + goto invalid_input; + + if (f != format || w != width || h != height) + return comp->threshold + 1; + + comps = gst_video_format_is_gray (format) ? 1 : 3; + if (gst_video_format_has_alpha (format)) + comps += 1; + + /* note that some are reported both yuv and gray */ + for (i = 0; i < comps; ++i) + c[i] = 1.0; + /* increase luma weight if yuv */ + if (gst_video_format_is_yuv (format) && (comps > 1)) + c[0] = comps - 1; + for (i = 0; i < comps; ++i) + c[i] /= (gst_video_format_is_yuv (format) && (comps > 1)) ? + 2 * (comps - 1) : comps; + + data1 = GST_BUFFER_DATA (buf1); + data2 = GST_BUFFER_DATA (buf2); + for (i = 0; i < comps; i++) { + gint offset, cw, ch, step, stride; + + /* only support most common formats */ + if (gst_video_format_get_component_depth (format, i) != 8) + goto unsupported_input; + offset = gst_video_format_get_component_offset (format, i, width, height); + cw = gst_video_format_get_component_width (format, i, width); + ch = gst_video_format_get_component_height (format, i, height); + step = gst_video_format_get_pixel_stride (format, i); + stride = gst_video_format_get_row_stride (format, i, width); + + GST_LOG_OBJECT (comp, "component %d", i); + cssim[i] = gst_compare_ssim_component (comp, data1 + offset, data2 + offset, + cw, ch, step, stride); + GST_LOG_OBJECT (comp, "ssim[%d] = %f", i, cssim[i]); + } + +#ifndef GST_DISABLE_GST_DEBUG + for (i = 0; i < 4; i++) { + GST_DEBUG_OBJECT (comp, "ssim[%d] = %f, c[%d] = %f", i, cssim[i], i, c[i]); + } +#endif + + ssim = cssim[0] * c[0] + cssim[1] * c[1] + cssim[2] * c[2] + cssim[3] * c[3]; + + return ssim; + + /* ERRORS */ +invalid_input: + { + GST_ERROR_OBJECT (comp, "ssim method needs raw video input"); + return 0; + } +unsupported_input: + { + GST_ERROR_OBJECT (comp, "raw video format not supported %" GST_PTR_FORMAT, + caps); + return 0; + } +} + +static void +gst_compare_buffers (GstCompare * comp, GstBuffer * buf1, GstBuffer * buf2) +{ + gdouble delta = 0; + + /* first check metadata */ + gst_compare_meta (comp, buf1, buf2); + + /* check content according to method */ + /* but at least size should match */ + if (GST_BUFFER_SIZE (buf1) != GST_BUFFER_SIZE (buf2)) { + delta = comp->threshold + 1; + } else { + GST_MEMDUMP_OBJECT (comp, "buffer 1", GST_BUFFER_DATA (buf1), + GST_BUFFER_SIZE (buf1)); + GST_MEMDUMP_OBJECT (comp, "buffer 2", GST_BUFFER_DATA (buf2), + GST_BUFFER_SIZE (buf2)); + switch (comp->method) { + case GST_COMPARE_METHOD_MEM: + delta = gst_compare_mem (comp, buf1, buf2); + break; + case GST_COMPARE_METHOD_MAX: + delta = gst_compare_max (comp, buf1, buf2); + break; + case GST_COMPARE_METHOD_SSIM: + delta = gst_compare_ssim (comp, buf1, buf2); + break; + default: + g_assert_not_reached (); + break; + } + } + + if ((comp->upper && delta > comp->threshold) || + (!comp->upper && delta < comp->threshold)) { + GST_WARNING_OBJECT (comp, "buffers %p and %p failed content match %f", + buf1, buf2, delta); + + gst_element_post_message (GST_ELEMENT (comp), + gst_message_new_element (GST_OBJECT (comp), + gst_structure_new ("delta", "content", G_TYPE_DOUBLE, delta, + NULL))); + } +} + +static GstFlowReturn +gst_compare_collect_pads (GstCollectPads * cpads, GstCompare * comp) +{ + GstBuffer *buf1, *buf2; + + buf1 = gst_collect_pads_pop (comp->cpads, + gst_pad_get_element_private (comp->sinkpad)); + + buf2 = gst_collect_pads_pop (comp->cpads, + gst_pad_get_element_private (comp->checkpad)); + + if (!buf1 && !buf2) { + gst_pad_push_event (comp->srcpad, gst_event_new_eos ()); + return GST_FLOW_UNEXPECTED; + } else if (buf1 && buf2) { + gst_compare_buffers (comp, buf1, buf2); + } else { + GST_WARNING_OBJECT (comp, "buffer %p != NULL", buf1 ? buf1 : buf2); + + comp->count++; + gst_element_post_message (GST_ELEMENT (comp), + gst_message_new_element (GST_OBJECT (comp), + gst_structure_new ("delta", "count", G_TYPE_INT, comp->count, + NULL))); + } + + if (buf1) + gst_pad_push (comp->srcpad, buf1); + + if (buf2) + gst_buffer_unref (buf2); + + return GST_FLOW_OK; +} + +static void +gst_compare_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstCompare *comp = GST_COMPARE (object); + + switch (prop_id) { + case PROP_META: + comp->meta = g_value_get_flags (value); + break; + case PROP_OFFSET_TS: + comp->offset_ts = g_value_get_boolean (value); + break; + case PROP_METHOD: + comp->method = g_value_get_enum (value); + break; + case PROP_THRESHOLD: + comp->threshold = g_value_get_double (value); + break; + case PROP_UPPER: + comp->upper = g_value_get_boolean (value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_compare_get_property (GObject * object, guint prop_id, GValue * value, + GParamSpec * pspec) +{ + GstCompare *comp = GST_COMPARE (object); + + switch (prop_id) { + case PROP_META: + g_value_set_flags (value, comp->meta); + break; + case PROP_OFFSET_TS: + g_value_set_boolean (value, comp->offset_ts); + break; + case PROP_METHOD: + g_value_set_enum (value, comp->method); + break; + case PROP_THRESHOLD: + g_value_set_double (value, comp->threshold); + break; + case PROP_UPPER: + g_value_set_boolean (value, comp->upper); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static GstStateChangeReturn +gst_compare_change_state (GstElement * element, GstStateChange transition) +{ + GstCompare *comp = GST_COMPARE (element); + GstStateChangeReturn ret; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + case GST_STATE_CHANGE_READY_TO_PAUSED: + gst_collect_pads_start (comp->cpads); + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_collect_pads_stop (comp->cpads); + break; + default: + break; + } + + ret = GST_CALL_PARENT_WITH_DEFAULT (GST_ELEMENT_CLASS, change_state, + (element, transition), GST_STATE_CHANGE_SUCCESS); + if (ret != GST_STATE_CHANGE_SUCCESS) + return ret; + + switch (transition) { + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_compare_reset (comp); + break; + default: + break; + } + + return GST_STATE_CHANGE_SUCCESS; +} diff --git a/gst/debugutils/gstcompare.h b/gst/debugutils/gstcompare.h new file mode 100644 index 0000000000..bee5108e3e --- /dev/null +++ b/gst/debugutils/gstcompare.h @@ -0,0 +1,75 @@ +/* GStreamer Element + * + * Copyright 2011 Collabora Ltd. + * @author: Mark Nauwelaerts + * Copyright 2011 Nokia Corp. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + + +#ifndef __GST_COMPARE_H__ +#define __GST_COMPARE_H__ + + +#include + +G_BEGIN_DECLS + +#define GST_TYPE_COMPARE \ + (gst_compare_get_type()) +#define GST_COMPARE(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_COMPARE, GstCompare)) +#define GST_COMPARE_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_COMPARE, GstCompareClass)) +#define GST_COMPARE_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj), GST_TYPE_COMPARE, GstCompareClass)) +#define GST_IS_COMPARE(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_COMPARE)) +#define GST_IS_COMPARE_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_COMPARE)) + +typedef struct _GstCompare GstCompare; +typedef struct _GstCompareClass GstCompareClass; + +struct _GstCompare { + GstElement element; + + GstPad *srcpad; + GstPad *sinkpad; + GstPad *checkpad; + + GstCollectPads *cpads; + + gint count; + + /* properties */ + GstBufferCopyFlags meta; + gboolean offset_ts; + gint method; + gdouble threshold; + gboolean upper; +}; + +struct _GstCompareClass { + GstElementClass parent_class; +}; + +GType gst_compare_get_type(void); + +G_END_DECLS + +#endif /* __GST_COMPARE_H__ */ diff --git a/gst/dvbsuboverlay/dvb-sub.c b/gst/dvbsuboverlay/dvb-sub.c index a0ef3e1519..3431a8cfe6 100644 --- a/gst/dvbsuboverlay/dvb-sub.c +++ b/gst/dvbsuboverlay/dvb-sub.c @@ -1,7 +1,6 @@ -/* -*- Mode: C; indent-tabs-mode: t; c-basic-offset: 4; tab-width: 4 -*- */ -/* - * libdvbsub - DVB subtitle decoding +/* dvb-sub.c - DVB subtitle decoding * Copyright (C) Mart Raudsepp 2009 + * Copyright (C) 2010 ONELAN Ltd. * * Heavily uses code algorithms ported from ffmpeg's libavcodec/dvbsubdec.c, * especially the segment parsers. The original license applies to this diff --git a/gst/dvbsuboverlay/gstdvbsuboverlay.c b/gst/dvbsuboverlay/gstdvbsuboverlay.c index 5dc04cb95b..add39c47b1 100644 --- a/gst/dvbsuboverlay/gstdvbsuboverlay.c +++ b/gst/dvbsuboverlay/gstdvbsuboverlay.c @@ -1,5 +1,6 @@ /* GStreamer DVB subtitles overlay * Copyright (c) 2010 Mart Raudsepp + * Copyright (c) 2010 ONELAN Ltd. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public diff --git a/gst/dvbsuboverlay/gstdvbsuboverlay.h b/gst/dvbsuboverlay/gstdvbsuboverlay.h index 19835c6c66..012c728ebe 100644 --- a/gst/dvbsuboverlay/gstdvbsuboverlay.h +++ b/gst/dvbsuboverlay/gstdvbsuboverlay.h @@ -1,5 +1,6 @@ /* GStreamer DVB subtitles overlay * Copyright (c) 2010 Mart Raudsepp + * Copyright (c) 2010 ONELAN Ltd. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public License diff --git a/gst/h264parse/gsth264parse.c b/gst/h264parse/gsth264parse.c index 514a739450..90a537bb7e 100644 --- a/gst/h264parse/gsth264parse.c +++ b/gst/h264parse/gsth264parse.c @@ -1134,11 +1134,11 @@ gst_h264_parse_make_codec_data (GstH264Parse * h264parse) num_sps++; /* size bytes also count */ sps_size += GST_BUFFER_SIZE (nal) - 4 + 2; - if (GST_BUFFER_SIZE (nal) >= 7) { + if (GST_BUFFER_SIZE (nal) >= 8) { found = TRUE; - profile_idc = (GST_BUFFER_DATA (nal))[4]; - profile_comp = (GST_BUFFER_DATA (nal))[5]; - level_idc = (GST_BUFFER_DATA (nal))[6]; + profile_idc = (GST_BUFFER_DATA (nal))[5]; + profile_comp = (GST_BUFFER_DATA (nal))[6]; + level_idc = (GST_BUFFER_DATA (nal))[7]; } } } @@ -1313,16 +1313,18 @@ gst_h264_parse_update_src_caps (GstH264Parse * h264parse, GstCaps * caps) alignment = "au"; } else { if (h264parse->packetized) { - /* if packetized input, take upstream alignment if validly provided, - * otherwise assume au aligned ... */ - alignment = gst_structure_get_string (structure, "alignment"); - if (!alignment || (alignment && - strcmp (alignment, "au") != 0 && - strcmp (alignment, "nal") != 0)) { - if (h264parse->split_packetized) - alignment = "nal"; - else + if (h264parse->split_packetized) + alignment = "nal"; + else { + /* if packetized input is not split, + * take upstream alignment if validly provided, + * otherwise assume au aligned ... */ + alignment = gst_structure_get_string (structure, "alignment"); + if (!alignment || (alignment && + strcmp (alignment, "au") != 0 && + strcmp (alignment, "nal") != 0)) { alignment = "au"; + } } } else { alignment = "nal"; diff --git a/gst/hls/gsthlsdemux.c b/gst/hls/gsthlsdemux.c index ce1f409c31..773d24986a 100644 --- a/gst/hls/gsthlsdemux.c +++ b/gst/hls/gsthlsdemux.c @@ -43,9 +43,9 @@ #include #include "gsthlsdemux.h" -static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src", +static GstStaticPadTemplate srctemplate = GST_STATIC_PAD_TEMPLATE ("src%d", GST_PAD_SRC, - GST_PAD_ALWAYS, + GST_PAD_SOMETIMES, GST_STATIC_CAPS_ANY); static GstStaticPadTemplate sinktemplate = GST_STATIC_PAD_TEMPLATE ("sink", @@ -214,15 +214,6 @@ gst_hls_demux_init (GstHLSDemux * demux, GstHLSDemuxClass * klass) GST_DEBUG_FUNCPTR (gst_hls_demux_sink_event)); gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad); - /* demux pad */ - demux->srcpad = gst_pad_new_from_static_template (&srctemplate, "src"); - gst_pad_set_event_function (demux->srcpad, - GST_DEBUG_FUNCPTR (gst_hls_demux_src_event)); - gst_pad_set_query_function (demux->srcpad, - GST_DEBUG_FUNCPTR (gst_hls_demux_src_query)); - gst_pad_set_element_private (demux->srcpad, demux); - gst_element_add_pad (GST_ELEMENT (demux), demux->srcpad); - /* fetcher pad */ demux->fetcherpad = gst_pad_new_from_static_template (&fetchertemplate, "sink"); @@ -233,6 +224,8 @@ gst_hls_demux_init (GstHLSDemux * demux, GstHLSDemuxClass * klass) gst_pad_set_element_private (demux->fetcherpad, demux); gst_pad_activate_push (demux->fetcherpad, TRUE); + demux->do_typefind = TRUE; + /* Properties */ demux->fragments_cache = DEFAULT_FRAGMENTS_CACHE; demux->bitrate_switch_tol = DEFAULT_BITRATE_SWITCH_TOLERANCE; @@ -249,6 +242,8 @@ gst_hls_demux_init (GstHLSDemux * demux, GstHLSDemuxClass * klass) g_static_rec_mutex_init (&demux->task_lock); demux->task = gst_task_create ((GstTaskFunction) gst_hls_demux_loop, demux); gst_task_set_lock (demux->task, &demux->task_lock); + + demux->position = 0; } static void @@ -370,14 +365,15 @@ gst_hls_demux_sink_event (GstPad * pad, GstEvent * event) /* In most cases, this will happen if we set a wrong url in the * source element and we have received the 404 HTML response instead of * the playlist */ - GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."), NULL); + GST_ELEMENT_ERROR (demux, STREAM, DECODE, ("Invalid playlist."), + (NULL)); return FALSE; } if (!ret && gst_m3u8_client_is_live (demux->client)) { GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND, ("Failed querying the playlist uri, " - "required for live sources."), NULL); + "required for live sources."), (NULL)); return FALSE; } @@ -385,6 +381,10 @@ gst_hls_demux_sink_event (GstPad * pad, GstEvent * event) gst_event_unref (event); return TRUE; } + case GST_EVENT_NEWSEGMENT: + /* Swallow newsegments, we'll push our own */ + gst_event_unref (event); + return TRUE; default: break; } @@ -555,6 +555,34 @@ gst_hls_demux_stop (GstHLSDemux * demux) g_cond_signal (demux->thread_cond); } +static void +switch_pads (GstHLSDemux * demux, GstCaps * newcaps) +{ + GstPad *oldpad = demux->srcpad; + + GST_DEBUG ("Switching pads (oldpad:%p)", oldpad); + + /* First create and activate new pad */ + demux->srcpad = gst_pad_new_from_static_template (&srctemplate, NULL); + gst_pad_set_event_function (demux->srcpad, + GST_DEBUG_FUNCPTR (gst_hls_demux_src_event)); + gst_pad_set_query_function (demux->srcpad, + GST_DEBUG_FUNCPTR (gst_hls_demux_src_query)); + gst_pad_set_element_private (demux->srcpad, demux); + gst_pad_set_active (demux->srcpad, TRUE); + gst_element_add_pad (GST_ELEMENT (demux), demux->srcpad); + gst_pad_set_caps (demux->srcpad, newcaps); + + gst_element_no_more_pads (GST_ELEMENT (demux)); + + if (oldpad) { + /* Push out EOS */ + gst_pad_push_event (oldpad, gst_event_new_eos ()); + gst_pad_set_active (oldpad, FALSE); + gst_element_remove_pad (GST_ELEMENT (demux), oldpad); + } +} + static void gst_hls_demux_loop (GstHLSDemux * demux) { @@ -588,6 +616,20 @@ gst_hls_demux_loop (GstHLSDemux * demux) } buf = g_queue_pop_head (demux->queue); + + /* Figure out if we need to create/switch pads */ + if (G_UNLIKELY (!demux->srcpad + || GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad))) { + switch_pads (demux, GST_BUFFER_CAPS (buf)); + /* And send a newsegment */ + gst_pad_push_event (demux->srcpad, + gst_event_new_new_segment (0, 1.0, GST_FORMAT_TIME, demux->position, + GST_CLOCK_TIME_NONE, demux->position)); + } + + if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf))) + demux->position += GST_BUFFER_DURATION (buf); + ret = gst_pad_push (demux->srcpad, buf); if (ret != GST_FLOW_OK) goto error; @@ -605,7 +647,7 @@ end_of_playlist: cache_error: { GST_ELEMENT_ERROR (demux, RESOURCE, NOT_FOUND, - ("Could not cache the first fragments"), NULL); + ("Could not cache the first fragments"), (NULL)); gst_hls_demux_stop (demux); return; } @@ -667,6 +709,7 @@ gst_hls_demux_reset (GstHLSDemux * demux, gboolean dispose) demux->accumulated_delay = 0; demux->end_of_playlist = FALSE; demux->cancelled = FALSE; + demux->do_typefind = TRUE; if (demux->input_caps) { gst_caps_unref (demux->input_caps); @@ -868,7 +911,7 @@ uri_error: state_change_error: { GST_ELEMENT_ERROR (demux, CORE, STATE_CHANGE, - ("Error changing state of the fetcher element."), NULL); + ("Error changing state of the fetcher element."), (NULL)); bret = FALSE; goto quit; } @@ -946,6 +989,9 @@ gst_hls_demux_change_playlist (GstHLSDemux * demux, gboolean is_fast) gst_element_post_message (GST_ELEMENT_CAST (demux), gst_message_new_element (GST_OBJECT_CAST (demux), s)); + /* Force typefinding since we might have changed media type */ + demux->do_typefind = TRUE; + return TRUE; } @@ -993,6 +1039,9 @@ gst_hls_demux_switch_playlist (GstHLSDemux * demux) limit = demux->client->current->targetduration * GST_SECOND * demux->bitrate_switch_tol; + GST_DEBUG ("diff:%s%" GST_TIME_FORMAT ", limit:%" GST_TIME_FORMAT, + diff < 0 ? "-" : " ", GST_TIME_ARGS (ABS (diff)), GST_TIME_ARGS (limit)); + /* if we are on time switch to a higher bitrate */ if (diff > limit) { gst_hls_demux_change_playlist (demux, TRUE); @@ -1035,14 +1084,20 @@ gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean retry) buf = gst_adapter_take_buffer (demux->download, avail); GST_BUFFER_DURATION (buf) = duration; - if (G_UNLIKELY (demux->input_caps == NULL)) { - demux->input_caps = gst_type_find_helper_for_buffer (NULL, buf, NULL); - if (demux->input_caps) { - gst_pad_set_caps (demux->srcpad, demux->input_caps); + /* We actually need to do this every time we switch bitrate */ + if (G_UNLIKELY (demux->do_typefind)) { + GstCaps *caps = gst_type_find_helper_for_buffer (NULL, buf, NULL); + + if (!demux->input_caps || !gst_caps_is_equal (caps, demux->input_caps)) { + gst_caps_replace (&demux->input_caps, caps); + /* gst_pad_set_caps (demux->srcpad, demux->input_caps); */ GST_INFO_OBJECT (demux, "Input source caps: %" GST_PTR_FORMAT, demux->input_caps); - } + demux->do_typefind = FALSE; + } else + gst_caps_unref (caps); } + gst_buffer_set_caps (buf, demux->input_caps); if (discont) { GST_DEBUG_OBJECT (demux, "Marking fragment as discontinuous"); diff --git a/gst/hls/gsthlsdemux.h b/gst/hls/gsthlsdemux.h index caf6437207..67574675d3 100644 --- a/gst/hls/gsthlsdemux.h +++ b/gst/hls/gsthlsdemux.h @@ -61,7 +61,7 @@ struct _GstHLSDemux GQueue *queue; /* Queue storing the fetched fragments */ gboolean need_cache; /* Wheter we need to cache some fragments before starting to push data */ gboolean end_of_playlist; - + gboolean do_typefind; /* Whether we need to typefind the next buffer */ /* Properties */ guint fragments_cache; /* number of fragments needed to be cached to start playing */ @@ -87,6 +87,8 @@ struct _GstHLSDemux gboolean cancelled; GstAdapter *download; + /* Position in the stream */ + GstClockTime position; }; struct _GstHLSDemuxClass diff --git a/gst/id3tag/Makefile.am b/gst/id3tag/Makefile.am index bb9c150ce9..f2ed73df73 100644 --- a/gst/id3tag/Makefile.am +++ b/gst/id3tag/Makefile.am @@ -2,7 +2,6 @@ plugin_LTLIBRARIES = libgstid3tag.la libgstid3tag_la_SOURCES = \ gstid3mux.c \ - gsttagmux.c \ id3tag.c libgstid3tag_la_CFLAGS = \ @@ -16,7 +15,7 @@ libgstid3tag_la_LIBADD = \ libgstid3tag_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstid3tag_la_LIBTOOLFLAGS = --tag=disable-static -noinst_HEADERS = gstid3mux.h gsttagmux.h id3tag.h +noinst_HEADERS = gstid3mux.h id3tag.h Android.mk: Makefile.am $(BUILT_SOURCES) androgenizer \ diff --git a/gst/id3tag/gstid3mux.c b/gst/id3tag/gstid3mux.c index 43ab1a08b4..f2330d2b10 100644 --- a/gst/id3tag/gstid3mux.c +++ b/gst/id3tag/gstid3mux.c @@ -71,6 +71,11 @@ enum #define DEFAULT_WRITE_V2 TRUE #define DEFAULT_V2_MAJOR_VERSION 3 +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("ANY")); + static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS, @@ -79,9 +84,9 @@ static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", GST_BOILERPLATE (GstId3Mux, gst_id3_mux, GstTagMux, GST_TYPE_TAG_MUX); static GstBuffer *gst_id3_mux_render_v2_tag (GstTagMux * mux, - GstTagList * taglist); + const GstTagList * taglist); static GstBuffer *gst_id3_mux_render_v1_tag (GstTagMux * mux, - GstTagList * taglist); + const GstTagList * taglist); static void gst_id3_mux_set_property (GObject * object, guint prop_id, const GValue * value, GParamSpec * pspec); @@ -93,6 +98,9 @@ gst_id3_mux_base_init (gpointer g_class) { GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_template)); + gst_element_class_add_pad_template (element_class, gst_static_pad_template_get (&src_template)); @@ -129,8 +137,8 @@ gst_id3_mux_class_init (GstId3MuxClass * klass) GST_TAG_MUX_CLASS (klass)->render_start_tag = GST_DEBUG_FUNCPTR (gst_id3_mux_render_v2_tag); - - GST_TAG_MUX_CLASS (klass)->render_end_tag = gst_id3_mux_render_v1_tag; + GST_TAG_MUX_CLASS (klass)->render_end_tag = + GST_DEBUG_FUNCPTR (gst_id3_mux_render_v1_tag); } static void @@ -187,7 +195,7 @@ gst_id3_mux_get_property (GObject * object, guint prop_id, } static GstBuffer * -gst_id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist) +gst_id3_mux_render_v2_tag (GstTagMux * mux, const GstTagList * taglist) { GstId3Mux *id3mux = GST_ID3_MUX (mux); @@ -198,7 +206,7 @@ gst_id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist) } static GstBuffer * -gst_id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist) +gst_id3_mux_render_v1_tag (GstTagMux * mux, const GstTagList * taglist) { GstId3Mux *id3mux = GST_ID3_MUX (mux); diff --git a/gst/id3tag/gstid3mux.h b/gst/id3tag/gstid3mux.h index eb5aa050a9..ba4f930c28 100644 --- a/gst/id3tag/gstid3mux.h +++ b/gst/id3tag/gstid3mux.h @@ -23,7 +23,7 @@ #ifndef GST_ID3_MUX_H #define GST_ID3_MUX_H -#include "gsttagmux.h" +#include #include "id3tag.h" G_BEGIN_DECLS diff --git a/gst/id3tag/gsttagmux.c b/gst/id3tag/gsttagmux.c deleted file mode 100644 index 4aafb96d85..0000000000 --- a/gst/id3tag/gsttagmux.c +++ /dev/null @@ -1,495 +0,0 @@ -/* GStreamer tag muxer base class - * - * Copyright (C) 2006 Christophe Fergeau - * Copyright (C) 2006 Tim-Philipp Müller - * Copyright (C) 2006 Sebastian Dröge - * Copyright (C) 2009 Pioneers of the Inevitable - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Library General Public - * License as published by the Free Software Foundation; either - * version 2 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Library General Public License for more details. - * - * You should have received a copy of the GNU Library General Public - * License along with this library; if not, write to the - * Free Software Foundation, Inc., 59 Temple Place - Suite 330, - * Boston, MA 02111-1307, USA. - */ - -#ifdef HAVE_CONFIG_H -#include -#endif - -#include -#include -#include - -#include "gsttagmux.h" - -GST_DEBUG_CATEGORY_STATIC (gst_tag_mux_debug); -#define GST_CAT_DEFAULT gst_tag_mux_debug - -/* Subclass provides a src template and pad. We accept anything as input here, - however. */ - -static GstStaticPadTemplate gst_tag_mux_sink_template = -GST_STATIC_PAD_TEMPLATE ("sink", - GST_PAD_SINK, - GST_PAD_ALWAYS, - GST_STATIC_CAPS ("ANY")); - -static void -gst_tag_mux_iface_init (GType tag_type) -{ - static const GInterfaceInfo tag_setter_info = { - NULL, - NULL, - NULL - }; - - g_type_add_interface_static (tag_type, GST_TYPE_TAG_SETTER, &tag_setter_info); -} - -/* make sure to register a less generic type so we can easily move this - * GstTagMux base class into -base without causing GType name conflicts */ -typedef GstTagMux GstId3BaseMux; -typedef GstTagMuxClass GstId3BaseMuxClass; - -GST_BOILERPLATE_FULL (GstId3BaseMux, gst_tag_mux, - GstElement, GST_TYPE_ELEMENT, gst_tag_mux_iface_init); - - -static GstStateChangeReturn -gst_tag_mux_change_state (GstElement * element, GstStateChange transition); -static GstFlowReturn gst_tag_mux_chain (GstPad * pad, GstBuffer * buffer); -static gboolean gst_tag_mux_sink_event (GstPad * pad, GstEvent * event); - -static void -gst_tag_mux_finalize (GObject * obj) -{ - GstTagMux *mux = GST_TAG_MUX (obj); - - if (mux->newsegment_ev) { - gst_event_unref (mux->newsegment_ev); - mux->newsegment_ev = NULL; - } - - if (mux->event_tags) { - gst_tag_list_free (mux->event_tags); - mux->event_tags = NULL; - } - - if (mux->final_tags) { - gst_tag_list_free (mux->final_tags); - mux->final_tags = NULL; - } - - G_OBJECT_CLASS (parent_class)->finalize (obj); -} - -static void -gst_tag_mux_base_init (gpointer g_class) -{ - GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); - - gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_tag_mux_sink_template)); - - GST_DEBUG_CATEGORY_INIT (gst_tag_mux_debug, "id3basemux", 0, - "tag muxer base class for Id3Mux"); -} - -static void -gst_tag_mux_class_init (GstTagMuxClass * klass) -{ - GObjectClass *gobject_class; - GstElementClass *gstelement_class; - - gobject_class = (GObjectClass *) klass; - gstelement_class = (GstElementClass *) klass; - - gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_tag_mux_finalize); - gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_tag_mux_change_state); -} - -static void -gst_tag_mux_init (GstTagMux * mux, GstTagMuxClass * mux_class) -{ - GstElementClass *element_klass = GST_ELEMENT_CLASS (mux_class); - GstPadTemplate *tmpl; - - /* pad through which data comes in to the element */ - mux->sinkpad = - gst_pad_new_from_static_template (&gst_tag_mux_sink_template, "sink"); - gst_pad_set_chain_function (mux->sinkpad, - GST_DEBUG_FUNCPTR (gst_tag_mux_chain)); - gst_pad_set_event_function (mux->sinkpad, - GST_DEBUG_FUNCPTR (gst_tag_mux_sink_event)); - gst_element_add_pad (GST_ELEMENT (mux), mux->sinkpad); - - /* pad through which data goes out of the element */ - tmpl = gst_element_class_get_pad_template (element_klass, "src"); - if (tmpl) { - mux->srcpad = gst_pad_new_from_template (tmpl, "src"); - gst_pad_use_fixed_caps (mux->srcpad); - gst_pad_set_caps (mux->srcpad, gst_pad_template_get_caps (tmpl)); - gst_element_add_pad (GST_ELEMENT (mux), mux->srcpad); - } - - mux->render_start_tag = TRUE; - mux->render_end_tag = TRUE; -} - -static GstTagList * -gst_tag_mux_get_tags (GstTagMux * mux) -{ - GstTagSetter *tagsetter = GST_TAG_SETTER (mux); - const GstTagList *tagsetter_tags; - GstTagMergeMode merge_mode; - - if (mux->final_tags) - return mux->final_tags; - - tagsetter_tags = gst_tag_setter_get_tag_list (tagsetter); - merge_mode = gst_tag_setter_get_tag_merge_mode (tagsetter); - - GST_LOG_OBJECT (mux, "merging tags, merge mode = %d", merge_mode); - GST_LOG_OBJECT (mux, "event tags: %" GST_PTR_FORMAT, mux->event_tags); - GST_LOG_OBJECT (mux, "set tags: %" GST_PTR_FORMAT, tagsetter_tags); - - mux->final_tags = - gst_tag_list_merge (tagsetter_tags, mux->event_tags, merge_mode); - - GST_LOG_OBJECT (mux, "final tags: %" GST_PTR_FORMAT, mux->final_tags); - - return mux->final_tags; -} - -static GstFlowReturn -gst_tag_mux_render_start_tag (GstTagMux * mux) -{ - GstTagMuxClass *klass; - GstBuffer *buffer; - GstTagList *taglist; - GstEvent *event; - GstFlowReturn ret; - - taglist = gst_tag_mux_get_tags (mux); - - klass = GST_TAG_MUX_CLASS (G_OBJECT_GET_CLASS (mux)); - - if (klass->render_start_tag == NULL) - goto no_vfunc; - - buffer = klass->render_start_tag (mux, taglist); - - /* Null buffer is ok, just means we're not outputting anything */ - if (buffer == NULL) { - GST_INFO_OBJECT (mux, "No start tag generated"); - mux->start_tag_size = 0; - return GST_FLOW_OK; - } - - mux->start_tag_size = GST_BUFFER_SIZE (buffer); - GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes", - mux->start_tag_size); - - /* Send newsegment event from byte position 0, so the tag really gets - * written to the start of the file, independent of the upstream segment */ - gst_pad_push_event (mux->srcpad, - gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0)); - - /* Send an event about the new tags to downstream elements */ - /* gst_event_new_tag takes ownership of the list, so use a copy */ - event = gst_event_new_tag (gst_tag_list_copy (taglist)); - gst_pad_push_event (mux->srcpad, event); - - GST_BUFFER_OFFSET (buffer) = 0; - ret = gst_pad_push (mux->srcpad, buffer); - - mux->current_offset = mux->start_tag_size; - mux->max_offset = MAX (mux->max_offset, mux->current_offset); - - return ret; - -no_vfunc: - { - GST_ERROR_OBJECT (mux, "Subclass does not implement " - "render_start_tag vfunc!"); - return GST_FLOW_ERROR; - } -} - -static GstFlowReturn -gst_tag_mux_render_end_tag (GstTagMux * mux) -{ - GstTagMuxClass *klass; - GstBuffer *buffer; - GstTagList *taglist; - GstFlowReturn ret; - - taglist = gst_tag_mux_get_tags (mux); - - klass = GST_TAG_MUX_CLASS (G_OBJECT_GET_CLASS (mux)); - - if (klass->render_end_tag == NULL) - goto no_vfunc; - - buffer = klass->render_end_tag (mux, taglist); - - if (buffer == NULL) { - GST_INFO_OBJECT (mux, "No end tag generated"); - mux->end_tag_size = 0; - return GST_FLOW_OK; - } - - mux->end_tag_size = GST_BUFFER_SIZE (buffer); - GST_LOG_OBJECT (mux, "tag size = %" G_GSIZE_FORMAT " bytes", - mux->end_tag_size); - - /* Send newsegment event from the end of the file, so it gets written there, - independent of whatever new segment events upstream has sent us */ - gst_pad_push_event (mux->srcpad, - gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, mux->max_offset, - -1, 0)); - - GST_BUFFER_OFFSET (buffer) = mux->max_offset; - ret = gst_pad_push (mux->srcpad, buffer); - - return ret; - -no_vfunc: - { - GST_ERROR_OBJECT (mux, "Subclass does not implement " - "render_end_tag vfunc!"); - return GST_FLOW_ERROR; - } -} - -static GstEvent * -gst_tag_mux_adjust_event_offsets (GstTagMux * mux, - const GstEvent * newsegment_event) -{ - GstFormat format; - gint64 start, stop, cur; - - gst_event_parse_new_segment ((GstEvent *) newsegment_event, NULL, NULL, - &format, &start, &stop, &cur); - - g_assert (format == GST_FORMAT_BYTES); - - if (start != -1) - start += mux->start_tag_size; - if (stop != -1) - stop += mux->start_tag_size; - if (cur != -1) - cur += mux->start_tag_size; - - GST_DEBUG_OBJECT (mux, "adjusting newsegment event offsets to start=%" - G_GINT64_FORMAT ", stop=%" G_GINT64_FORMAT ", cur=%" G_GINT64_FORMAT - " (delta = +%" G_GSIZE_FORMAT ")", start, stop, cur, mux->start_tag_size); - - return gst_event_new_new_segment (TRUE, 1.0, format, start, stop, cur); -} - -static GstFlowReturn -gst_tag_mux_chain (GstPad * pad, GstBuffer * buffer) -{ - GstTagMux *mux = GST_TAG_MUX (GST_OBJECT_PARENT (pad)); - GstFlowReturn ret; - int length; - - if (mux->render_start_tag) { - - GST_INFO_OBJECT (mux, "Adding tags to stream"); - ret = gst_tag_mux_render_start_tag (mux); - if (ret != GST_FLOW_OK) { - GST_DEBUG_OBJECT (mux, "flow: %s", gst_flow_get_name (ret)); - gst_buffer_unref (buffer); - return ret; - } - - /* Now send the cached newsegment event that we got from upstream */ - if (mux->newsegment_ev) { - gint64 start; - GstEvent *newseg; - - GST_DEBUG_OBJECT (mux, "sending cached newsegment event"); - newseg = gst_tag_mux_adjust_event_offsets (mux, mux->newsegment_ev); - gst_event_unref (mux->newsegment_ev); - mux->newsegment_ev = NULL; - - gst_event_parse_new_segment (newseg, NULL, NULL, NULL, &start, NULL, - NULL); - - gst_pad_push_event (mux->srcpad, newseg); - mux->current_offset = start; - mux->max_offset = MAX (mux->max_offset, mux->current_offset); - } else { - /* upstream sent no newsegment event or only one in a non-BYTE format */ - } - - mux->render_start_tag = FALSE; - } - - buffer = gst_buffer_make_metadata_writable (buffer); - - if (GST_BUFFER_OFFSET (buffer) != GST_BUFFER_OFFSET_NONE) { - GST_LOG_OBJECT (mux, "Adjusting buffer offset from %" G_GINT64_FORMAT - " to %" G_GINT64_FORMAT, GST_BUFFER_OFFSET (buffer), - GST_BUFFER_OFFSET (buffer) + mux->start_tag_size); - GST_BUFFER_OFFSET (buffer) += mux->start_tag_size; - } - - length = GST_BUFFER_SIZE (buffer); - - gst_buffer_set_caps (buffer, GST_PAD_CAPS (mux->srcpad)); - ret = gst_pad_push (mux->srcpad, buffer); - - mux->current_offset += length; - mux->max_offset = MAX (mux->max_offset, mux->current_offset); - - return ret; -} - -static gboolean -gst_tag_mux_sink_event (GstPad * pad, GstEvent * event) -{ - GstTagMux *mux; - gboolean result; - - mux = GST_TAG_MUX (gst_pad_get_parent (pad)); - result = FALSE; - - switch (GST_EVENT_TYPE (event)) { - case GST_EVENT_TAG:{ - GstTagList *tags; - - gst_event_parse_tag (event, &tags); - - GST_INFO_OBJECT (mux, "Got tag event: %" GST_PTR_FORMAT, tags); - - if (mux->event_tags != NULL) { - gst_tag_list_insert (mux->event_tags, tags, GST_TAG_MERGE_REPLACE); - } else { - mux->event_tags = gst_tag_list_copy (tags); - } - - GST_INFO_OBJECT (mux, "Event tags are now: %" GST_PTR_FORMAT, - mux->event_tags); - - /* just drop the event, we'll push a new tag event in render_start_tag */ - gst_event_unref (event); - result = TRUE; - break; - } - case GST_EVENT_NEWSEGMENT:{ - GstFormat fmt; - gint64 start; - - gst_event_parse_new_segment (event, NULL, NULL, &fmt, &start, NULL, NULL); - - if (fmt != GST_FORMAT_BYTES) { - GST_WARNING_OBJECT (mux, "dropping newsegment event in %s format", - gst_format_get_name (fmt)); - gst_event_unref (event); - break; - } - - if (mux->render_start_tag) { - /* we have not rendered the tag yet, which means that we don't know - * how large it is going to be yet, so we can't adjust the offsets - * here at this point and need to cache the newsegment event for now - * (also, there could be tag events coming after this newsegment event - * and before the first buffer). */ - if (mux->newsegment_ev) { - GST_WARNING_OBJECT (mux, "discarding old cached newsegment event"); - gst_event_unref (mux->newsegment_ev); - } - - GST_LOG_OBJECT (mux, "caching newsegment event for later"); - mux->newsegment_ev = event; - } else { - GST_DEBUG_OBJECT (mux, "got newsegment event, adjusting offsets"); - gst_pad_push_event (mux->srcpad, - gst_tag_mux_adjust_event_offsets (mux, event)); - gst_event_unref (event); - - mux->current_offset = start; - mux->max_offset = MAX (mux->max_offset, mux->current_offset); - } - event = NULL; - result = TRUE; - break; - } - case GST_EVENT_EOS:{ - if (mux->render_end_tag) { - GstFlowReturn ret; - - GST_INFO_OBJECT (mux, "Adding tags to stream"); - ret = gst_tag_mux_render_end_tag (mux); - if (ret != GST_FLOW_OK) { - GST_DEBUG_OBJECT (mux, "flow: %s", gst_flow_get_name (ret)); - return ret; - } - - mux->render_end_tag = FALSE; - } - - /* Now forward EOS */ - result = gst_pad_event_default (pad, event); - break; - } - default: - result = gst_pad_event_default (pad, event); - break; - } - - gst_object_unref (mux); - - return result; -} - - -static GstStateChangeReturn -gst_tag_mux_change_state (GstElement * element, GstStateChange transition) -{ - GstTagMux *mux; - GstStateChangeReturn result; - - mux = GST_TAG_MUX (element); - - result = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); - if (result != GST_STATE_CHANGE_SUCCESS) { - return result; - } - - switch (transition) { - case GST_STATE_CHANGE_PAUSED_TO_READY:{ - if (mux->newsegment_ev) { - gst_event_unref (mux->newsegment_ev); - mux->newsegment_ev = NULL; - } - if (mux->event_tags) { - gst_tag_list_free (mux->event_tags); - mux->event_tags = NULL; - } - mux->start_tag_size = 0; - mux->end_tag_size = 0; - mux->render_start_tag = TRUE; - mux->render_end_tag = TRUE; - mux->current_offset = 0; - mux->max_offset = 0; - break; - } - default: - break; - } - - return result; -} diff --git a/gst/id3tag/gsttagmux.h b/gst/id3tag/gsttagmux.h deleted file mode 100644 index c13a7326d2..0000000000 --- a/gst/id3tag/gsttagmux.h +++ /dev/null @@ -1,79 +0,0 @@ -/* GStreamer tag muxer base class - * - * Copyright (C) 2006 Christophe Fergeau - * Copyright (C) 2006 Tim-Philipp Müller - * Copyright (C) 2009 Pioneers of the Inevitable - * - * This library is free software; you can redistribute it and/or - * modify it under the terms of the GNU Library General Public - * License as published by the Free Software Foundation; either - * version 2 of the License, or (at your option) any later version. - * - * This library is distributed in the hope that it will be useful, - * but WITHOUT ANY WARRANTY; without even the implied warranty of - * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU - * Library General Public License for more details. - * - * You should have received a copy of the GNU Library General Public - * License along with this library; if not, write to the - * Free Software Foundation, Inc., 59 Temple Place - Suite 330, - * Boston, MA 02111-1307, USA. - */ - -#ifndef GST_TAG_MUX_H -#define GST_TAG_MUX_H - -#include - -G_BEGIN_DECLS - -typedef struct _GstTagMux GstTagMux; -typedef struct _GstTagMuxClass GstTagMuxClass; - -/* Definition of structure storing data for this element. */ -struct _GstTagMux { - GstElement element; - - GstPad *srcpad; - GstPad *sinkpad; - GstTagList *event_tags; /* tags received from upstream elements */ - GstTagList *final_tags; /* Final set of tags used for muxing */ - gsize start_tag_size; - gsize end_tag_size; - gboolean render_start_tag; - gboolean render_end_tag; - - gint64 current_offset; - gint64 max_offset; - - GstEvent *newsegment_ev; /* cached newsegment event from upstream */ -}; - -/* Standard definition defining a class for this element. */ -struct _GstTagMuxClass { - GstElementClass parent_class; - - /* vfuncs */ - GstBuffer * (*render_start_tag) (GstTagMux * mux, GstTagList * tag_list); - GstBuffer * (*render_end_tag) (GstTagMux * mux, GstTagList * tag_list); -}; - -/* Standard macros for defining types for this element. */ -#define GST_TYPE_TAG_MUX \ - (gst_tag_mux_get_type()) -#define GST_TAG_MUX(obj) \ - (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_TAG_MUX,GstTagMux)) -#define GST_TAG_MUX_CLASS(klass) \ - (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_TAG_MUX,GstTagMuxClass)) -#define GST_IS_TAG_MUX(obj) \ - (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_TAG_MUX)) -#define GST_IS_TAG_MUX_CLASS(klass) \ - (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_TAG_MUX)) - -/* Standard function returning type information. */ -GType gst_tag_mux_get_type (void); - -G_END_DECLS - -#endif - diff --git a/gst/id3tag/id3tag.c b/gst/id3tag/id3tag.c index 9645dd3290..fc69a49c48 100644 --- a/gst/id3tag/id3tag.c +++ b/gst/id3tag/id3tag.c @@ -1132,7 +1132,7 @@ foreach_add_tag (const GstTagList * list, const gchar * tag, gpointer userdata) } GstBuffer * -id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist, int version) +id3_mux_render_v2_tag (GstTagMux * mux, const GstTagList * taglist, int version) { GstId3v2Tag tag; GstBuffer *buf; @@ -1159,7 +1159,6 @@ id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist, int version) /* Create buffer with tag */ buf = id3v2_tag_to_buffer (&tag); - gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad)); GST_LOG_OBJECT (mux, "tag size = %d bytes", GST_BUFFER_SIZE (buf)); id3v2_tag_unset (&tag); @@ -1285,7 +1284,7 @@ static const struct }; GstBuffer * -id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist) +id3_mux_render_v1_tag (GstTagMux * mux, const GstTagList * taglist) { GstBuffer *buf = gst_buffer_new_and_alloc (ID3_V1_TAG_SIZE); guint8 *data = GST_BUFFER_DATA (buf); @@ -1312,6 +1311,5 @@ id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist) return NULL; } - gst_buffer_set_caps (buf, GST_PAD_CAPS (mux->srcpad)); return buf; } diff --git a/gst/id3tag/id3tag.h b/gst/id3tag/id3tag.h index d55049691f..321c7b8fbe 100644 --- a/gst/id3tag/id3tag.h +++ b/gst/id3tag/id3tag.h @@ -17,16 +17,18 @@ * Boston, MA 02111-1307, USA. */ -#include "gsttagmux.h" +#include G_BEGIN_DECLS #define ID3_VERSION_2_3 3 #define ID3_VERSION_2_4 4 -GstBuffer * id3_mux_render_v2_tag (GstTagMux * mux, GstTagList * taglist, - int version); -GstBuffer * id3_mux_render_v1_tag (GstTagMux * mux, GstTagList * taglist); +GstBuffer * id3_mux_render_v2_tag (GstTagMux * mux, + const GstTagList * taglist, + int version); + +GstBuffer * id3_mux_render_v1_tag (GstTagMux * mux, + const GstTagList * taglist); G_END_DECLS - diff --git a/gst/inter/.gitignore b/gst/inter/.gitignore new file mode 100644 index 0000000000..0644f4986f --- /dev/null +++ b/gst/inter/.gitignore @@ -0,0 +1,2 @@ +gstintertest + diff --git a/gst/inter/Makefile.am b/gst/inter/Makefile.am new file mode 100644 index 0000000000..ce39e50fc8 --- /dev/null +++ b/gst/inter/Makefile.am @@ -0,0 +1,56 @@ +plugin_LTLIBRARIES = libgstinter.la + +noinst_PROGRAMS = gstintertest + +libgstinter_la_SOURCES = \ + gstinteraudiosink.c \ + gstinteraudiosrc.c \ + gstintervideosink.c \ + gstintervideosrc.c \ + gstinter.c \ + gstintersurface.c + +noinst_HEADERS = \ + gstinteraudiosink.h \ + gstinteraudiosrc.h \ + gstintervideosink.h \ + gstintervideosrc.h \ + gstintersurface.h + +libgstinter_la_CFLAGS = \ + $(GST_CFLAGS) \ + $(GST_PLUGINS_BASE_CFLAGS) + +libgstinter_la_LIBADD = \ + $(GST_LIBS) \ + $(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@ -lgstaudio-@GST_MAJORMINOR@ \ + $(LIBM) + +libgstinter_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) +libgstinter_la_LIBTOOLFLAGS = --tag=disable-static + +gstintertest_SOURCES = \ + gstintertest.c + +gstintertest_CFLAGS = \ + $(GST_CFLAGS) \ + $(GST_PLUGINS_BASE_CFLAGS) + +gstintertest_LDADD = \ + $(GST_LIBS) \ + $(GST_PLUGINS_BASE_LIBS) \ + $(LIBM) + +Android.mk: Makefile.am $(BUILT_SOURCES) + androgenizer \ + -:PROJECT libgstinter -:SHARED libgstinter \ + -:TAGS eng debug \ + -:REL_TOP $(top_srcdir) -:ABS_TOP $(abs_top_srcdir) \ + -:SOURCES $(libgstinter_la_SOURCES) \ + -:CFLAGS $(DEFS) $(DEFAULT_INCLUDES) $(libgstinter_la_CFLAGS) \ + -:LDFLAGS $(libgstinter_la_LDFLAGS) \ + $(libgstinter_la_LIBADD) \ + -ldl \ + -:PASSTHROUGH LOCAL_ARM_MODE:=arm \ + LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \ + > $@ diff --git a/gst/inter/gstinter.c b/gst/inter/gstinter.c new file mode 100644 index 0000000000..2ecc71df6b --- /dev/null +++ b/gst/inter/gstinter.c @@ -0,0 +1,51 @@ +/* GStreamer + * Copyright (C) 2011 FIXME + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstinteraudiosrc.h" +#include "gstinteraudiosink.h" +#include "gstintervideosrc.h" +#include "gstintervideosink.h" +#include "gstintersurface.h" + +static gboolean +plugin_init (GstPlugin * plugin) +{ + gst_element_register (plugin, "interaudiosrc", GST_RANK_NONE, + GST_TYPE_INTER_AUDIO_SRC); + gst_element_register (plugin, "interaudiosink", GST_RANK_NONE, + GST_TYPE_INTER_AUDIO_SINK); + gst_element_register (plugin, "intervideosrc", GST_RANK_NONE, + GST_TYPE_INTER_VIDEO_SRC); + gst_element_register (plugin, "intervideosink", GST_RANK_NONE, + GST_TYPE_INTER_VIDEO_SINK); + + gst_inter_surface_init (); + + return TRUE; +} + +GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, + GST_VERSION_MINOR, + "inter", + "plugin for inter-pipeline communication", + plugin_init, VERSION, "LGPL", PACKAGE_NAME, GST_PACKAGE_ORIGIN) diff --git a/gst/inter/gstinteraudiosink.c b/gst/inter/gstinteraudiosink.c new file mode 100644 index 0000000000..d5eb98b0f5 --- /dev/null +++ b/gst/inter/gstinteraudiosink.c @@ -0,0 +1,342 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ +/** + * SECTION:element-gstinteraudiosink + * + * The interaudiosink element does FIXME stuff. + * + * + * Example launch line + * |[ + * gst-launch -v fakesrc ! interaudiosink ! FIXME ! fakesink + * ]| + * FIXME Describe what the pipeline does. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include +#include "gstinteraudiosink.h" +#include + +GST_DEBUG_CATEGORY_STATIC (gst_inter_audio_sink_debug_category); +#define GST_CAT_DEFAULT gst_inter_audio_sink_debug_category + +/* prototypes */ + + +static void gst_inter_audio_sink_set_property (GObject * object, + guint property_id, const GValue * value, GParamSpec * pspec); +static void gst_inter_audio_sink_get_property (GObject * object, + guint property_id, GValue * value, GParamSpec * pspec); +static void gst_inter_audio_sink_dispose (GObject * object); +static void gst_inter_audio_sink_finalize (GObject * object); + +static GstCaps *gst_inter_audio_sink_get_caps (GstBaseSink * sink); +static gboolean gst_inter_audio_sink_set_caps (GstBaseSink * sink, + GstCaps * caps); +static GstFlowReturn gst_inter_audio_sink_buffer_alloc (GstBaseSink * sink, + guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf); +static void gst_inter_audio_sink_get_times (GstBaseSink * sink, + GstBuffer * buffer, GstClockTime * start, GstClockTime * end); +static gboolean gst_inter_audio_sink_start (GstBaseSink * sink); +static gboolean gst_inter_audio_sink_stop (GstBaseSink * sink); +static gboolean gst_inter_audio_sink_unlock (GstBaseSink * sink); +static gboolean gst_inter_audio_sink_event (GstBaseSink * sink, + GstEvent * event); +static GstFlowReturn gst_inter_audio_sink_preroll (GstBaseSink * sink, + GstBuffer * buffer); +static GstFlowReturn gst_inter_audio_sink_render (GstBaseSink * sink, + GstBuffer * buffer); +static GstStateChangeReturn gst_inter_audio_sink_async_play (GstBaseSink * + sink); +static gboolean gst_inter_audio_sink_activate_pull (GstBaseSink * sink, + gboolean active); +static gboolean gst_inter_audio_sink_unlock_stop (GstBaseSink * sink); + +enum +{ + PROP_0 +}; + +/* pad templates */ + +static GstStaticPadTemplate gst_inter_audio_sink_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("audio/x-raw-int, " + "endianness = (int) BYTE_ORDER, " + "signed = (boolean) true, " + "width = (int) 16, " + "depth = (int) 16, " + "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]") + ); + + +/* class initialization */ + +#define DEBUG_INIT(bla) \ + GST_DEBUG_CATEGORY_INIT (gst_inter_audio_sink_debug_category, "interaudiosink", 0, \ + "debug category for interaudiosink element"); + +GST_BOILERPLATE_FULL (GstInterAudioSink, gst_inter_audio_sink, GstBaseSink, + GST_TYPE_BASE_SINK, DEBUG_INIT); + +static void +gst_inter_audio_sink_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_inter_audio_sink_sink_template)); + + gst_element_class_set_details_simple (element_class, "FIXME Long name", + "Generic", "FIXME Description", "FIXME "); +} + +static void +gst_inter_audio_sink_class_init (GstInterAudioSinkClass * klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS (klass); + + gobject_class->set_property = gst_inter_audio_sink_set_property; + gobject_class->get_property = gst_inter_audio_sink_get_property; + gobject_class->dispose = gst_inter_audio_sink_dispose; + gobject_class->finalize = gst_inter_audio_sink_finalize; + base_sink_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_get_caps); + base_sink_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_set_caps); + if (0) + base_sink_class->buffer_alloc = + GST_DEBUG_FUNCPTR (gst_inter_audio_sink_buffer_alloc); + base_sink_class->get_times = + GST_DEBUG_FUNCPTR (gst_inter_audio_sink_get_times); + base_sink_class->start = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_start); + base_sink_class->stop = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_stop); + base_sink_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_unlock); + if (0) + base_sink_class->event = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_event); + //if (0) + base_sink_class->preroll = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_preroll); + base_sink_class->render = GST_DEBUG_FUNCPTR (gst_inter_audio_sink_render); + if (0) + base_sink_class->async_play = + GST_DEBUG_FUNCPTR (gst_inter_audio_sink_async_play); + if (0) + base_sink_class->activate_pull = + GST_DEBUG_FUNCPTR (gst_inter_audio_sink_activate_pull); + base_sink_class->unlock_stop = + GST_DEBUG_FUNCPTR (gst_inter_audio_sink_unlock_stop); + +} + +static void +gst_inter_audio_sink_init (GstInterAudioSink * interaudiosink, + GstInterAudioSinkClass * interaudiosink_class) +{ + + interaudiosink->sinkpad = + gst_pad_new_from_static_template (&gst_inter_audio_sink_sink_template, + "sink"); + + interaudiosink->surface = gst_inter_surface_get ("default"); +} + +void +gst_inter_audio_sink_set_property (GObject * object, guint property_id, + const GValue * value, GParamSpec * pspec) +{ + /* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_audio_sink_get_property (GObject * object, guint property_id, + GValue * value, GParamSpec * pspec) +{ + /* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_audio_sink_dispose (GObject * object) +{ + /* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */ + + /* clean up as possible. may be called multiple times */ + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +void +gst_inter_audio_sink_finalize (GObject * object) +{ + /* GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (object); */ + + /* clean up object here */ + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + + + +static GstCaps * +gst_inter_audio_sink_get_caps (GstBaseSink * sink) +{ + + return NULL; +} + +static gboolean +gst_inter_audio_sink_set_caps (GstBaseSink * sink, GstCaps * caps) +{ + + return TRUE; +} + +static GstFlowReturn +gst_inter_audio_sink_buffer_alloc (GstBaseSink * sink, guint64 offset, + guint size, GstCaps * caps, GstBuffer ** buf) +{ + + return GST_FLOW_ERROR; +} + +static void +gst_inter_audio_sink_get_times (GstBaseSink * sink, GstBuffer * buffer, + GstClockTime * start, GstClockTime * end) +{ + GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink); + + if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) { + *start = GST_BUFFER_TIMESTAMP (buffer); + if (GST_BUFFER_DURATION_IS_VALID (buffer)) { + *end = *start + GST_BUFFER_DURATION (buffer); + } else { + if (interaudiosink->fps_n > 0) { + *end = *start + + gst_util_uint64_scale_int (GST_SECOND, interaudiosink->fps_d, + interaudiosink->fps_n); + } + } + } + + +} + +static gboolean +gst_inter_audio_sink_start (GstBaseSink * sink) +{ + + return TRUE; +} + +static gboolean +gst_inter_audio_sink_stop (GstBaseSink * sink) +{ + GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink); + + GST_DEBUG ("stop"); + + g_mutex_lock (interaudiosink->surface->mutex); + gst_adapter_clear (interaudiosink->surface->audio_adapter); + g_mutex_unlock (interaudiosink->surface->mutex); + + return TRUE; +} + +static gboolean +gst_inter_audio_sink_unlock (GstBaseSink * sink) +{ + + return TRUE; +} + +static gboolean +gst_inter_audio_sink_event (GstBaseSink * sink, GstEvent * event) +{ + + return TRUE; +} + +static GstFlowReturn +gst_inter_audio_sink_preroll (GstBaseSink * sink, GstBuffer * buffer) +{ + + return GST_FLOW_OK; +} + +static GstFlowReturn +gst_inter_audio_sink_render (GstBaseSink * sink, GstBuffer * buffer) +{ + GstInterAudioSink *interaudiosink = GST_INTER_AUDIO_SINK (sink); + int n; + + GST_DEBUG ("render %d", GST_BUFFER_SIZE (buffer)); + + g_mutex_lock (interaudiosink->surface->mutex); + n = gst_adapter_available (interaudiosink->surface->audio_adapter) / 4; + if (n > (800 * 2 * 2)) { + GST_INFO ("flushing 800 samples"); + gst_adapter_flush (interaudiosink->surface->audio_adapter, 800 * 4); + n -= 800; + } + gst_adapter_push (interaudiosink->surface->audio_adapter, + gst_buffer_ref (buffer)); + g_mutex_unlock (interaudiosink->surface->mutex); + + return GST_FLOW_OK; +} + +static GstStateChangeReturn +gst_inter_audio_sink_async_play (GstBaseSink * sink) +{ + + return GST_STATE_CHANGE_SUCCESS; +} + +static gboolean +gst_inter_audio_sink_activate_pull (GstBaseSink * sink, gboolean active) +{ + + return TRUE; +} + +static gboolean +gst_inter_audio_sink_unlock_stop (GstBaseSink * sink) +{ + + return TRUE; +} diff --git a/gst/inter/gstinteraudiosink.h b/gst/inter/gstinteraudiosink.h new file mode 100644 index 0000000000..53597e7c9b --- /dev/null +++ b/gst/inter/gstinteraudiosink.h @@ -0,0 +1,58 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_INTER_AUDIO_SINK_H_ +#define _GST_INTER_AUDIO_SINK_H_ + +#include +#include "gstintersurface.h" + +G_BEGIN_DECLS + +#define GST_TYPE_INTER_AUDIO_SINK (gst_inter_audio_sink_get_type()) +#define GST_INTER_AUDIO_SINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_AUDIO_SINK,GstInterAudioSink)) +#define GST_INTER_AUDIO_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_AUDIO_SINK,GstInterAudioSinkClass)) +#define GST_IS_INTER_AUDIO_SINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_AUDIO_SINK)) +#define GST_IS_INTER_AUDIO_SINK_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_AUDIO_SINK)) + +typedef struct _GstInterAudioSink GstInterAudioSink; +typedef struct _GstInterAudioSinkClass GstInterAudioSinkClass; + +struct _GstInterAudioSink +{ + GstBaseSink base_interaudiosink; + + GstInterSurface *surface; + + GstPad *sinkpad; + + int fps_n; + int fps_d; +}; + +struct _GstInterAudioSinkClass +{ + GstBaseSinkClass base_interaudiosink_class; +}; + +GType gst_inter_audio_sink_get_type (void); + +G_END_DECLS + +#endif diff --git a/gst/inter/gstinteraudiosrc.c b/gst/inter/gstinteraudiosrc.c new file mode 100644 index 0000000000..df7c16f70d --- /dev/null +++ b/gst/inter/gstinteraudiosrc.c @@ -0,0 +1,481 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ +/** + * SECTION:element-gstinteraudiosrc + * + * The interaudiosrc element does FIXME stuff. + * + * + * Example launch line + * |[ + * gst-launch -v fakesrc ! interaudiosrc ! FIXME ! fakesink + * ]| + * FIXME Describe what the pipeline does. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include "gstinteraudiosrc.h" +#include + +GST_DEBUG_CATEGORY_STATIC (gst_inter_audio_src_debug_category); +#define GST_CAT_DEFAULT gst_inter_audio_src_debug_category + +/* prototypes */ + + +static void gst_inter_audio_src_set_property (GObject * object, + guint property_id, const GValue * value, GParamSpec * pspec); +static void gst_inter_audio_src_get_property (GObject * object, + guint property_id, GValue * value, GParamSpec * pspec); +static void gst_inter_audio_src_dispose (GObject * object); +static void gst_inter_audio_src_finalize (GObject * object); + +static GstCaps *gst_inter_audio_src_get_caps (GstBaseSrc * src); +static gboolean gst_inter_audio_src_set_caps (GstBaseSrc * src, GstCaps * caps); +static gboolean gst_inter_audio_src_negotiate (GstBaseSrc * src); +static gboolean gst_inter_audio_src_newsegment (GstBaseSrc * src); +static gboolean gst_inter_audio_src_start (GstBaseSrc * src); +static gboolean gst_inter_audio_src_stop (GstBaseSrc * src); +static void +gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer, + GstClockTime * start, GstClockTime * end); +static gboolean gst_inter_audio_src_is_seekable (GstBaseSrc * src); +static gboolean gst_inter_audio_src_unlock (GstBaseSrc * src); +static gboolean gst_inter_audio_src_event (GstBaseSrc * src, GstEvent * event); +static GstFlowReturn +gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size, + GstBuffer ** buf); +static gboolean gst_inter_audio_src_do_seek (GstBaseSrc * src, + GstSegment * segment); +static gboolean gst_inter_audio_src_query (GstBaseSrc * src, GstQuery * query); +static gboolean gst_inter_audio_src_check_get_range (GstBaseSrc * src); +static void gst_inter_audio_src_fixate (GstBaseSrc * src, GstCaps * caps); +static gboolean gst_inter_audio_src_unlock_stop (GstBaseSrc * src); +static gboolean +gst_inter_audio_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek, + GstSegment * segment); + +enum +{ + PROP_0 +}; + +/* pad templates */ + +static GstStaticPadTemplate gst_inter_audio_src_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS ("audio/x-raw-int, " + "endianness = (int) BYTE_ORDER, " + "signed = (boolean) true, " + "width = (int) 16, " + "depth = (int) 16, " + "rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]") + ); + + +/* class initialization */ + +#define DEBUG_INIT(bla) \ + GST_DEBUG_CATEGORY_INIT (gst_inter_audio_src_debug_category, "interaudiosrc", 0, \ + "debug category for interaudiosrc element"); + +GST_BOILERPLATE_FULL (GstInterAudioSrc, gst_inter_audio_src, GstBaseSrc, + GST_TYPE_BASE_SRC, DEBUG_INIT); + +static void +gst_inter_audio_src_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_inter_audio_src_src_template)); + + gst_element_class_set_details_simple (element_class, "FIXME Long name", + "Generic", "FIXME Description", "FIXME "); +} + +static void +gst_inter_audio_src_class_init (GstInterAudioSrcClass * klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass); + + gobject_class->set_property = gst_inter_audio_src_set_property; + gobject_class->get_property = gst_inter_audio_src_get_property; + gobject_class->dispose = gst_inter_audio_src_dispose; + gobject_class->finalize = gst_inter_audio_src_finalize; + base_src_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_src_get_caps); + base_src_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_audio_src_set_caps); + if (0) + base_src_class->negotiate = + GST_DEBUG_FUNCPTR (gst_inter_audio_src_negotiate); + base_src_class->newsegment = + GST_DEBUG_FUNCPTR (gst_inter_audio_src_newsegment); + base_src_class->start = GST_DEBUG_FUNCPTR (gst_inter_audio_src_start); + base_src_class->stop = GST_DEBUG_FUNCPTR (gst_inter_audio_src_stop); + base_src_class->get_times = GST_DEBUG_FUNCPTR (gst_inter_audio_src_get_times); + if (0) + base_src_class->is_seekable = + GST_DEBUG_FUNCPTR (gst_inter_audio_src_is_seekable); + base_src_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_audio_src_unlock); + base_src_class->event = GST_DEBUG_FUNCPTR (gst_inter_audio_src_event); + base_src_class->create = GST_DEBUG_FUNCPTR (gst_inter_audio_src_create); + if (0) + base_src_class->do_seek = GST_DEBUG_FUNCPTR (gst_inter_audio_src_do_seek); + base_src_class->query = GST_DEBUG_FUNCPTR (gst_inter_audio_src_query); + if (0) + base_src_class->check_get_range = + GST_DEBUG_FUNCPTR (gst_inter_audio_src_check_get_range); + base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_inter_audio_src_fixate); + if (0) + base_src_class->unlock_stop = + GST_DEBUG_FUNCPTR (gst_inter_audio_src_unlock_stop); + if (0) + base_src_class->prepare_seek_segment = + GST_DEBUG_FUNCPTR (gst_inter_audio_src_prepare_seek_segment); + + +} + +static void +gst_inter_audio_src_init (GstInterAudioSrc * interaudiosrc, + GstInterAudioSrcClass * interaudiosrc_class) +{ + + interaudiosrc->srcpad = + gst_pad_new_from_static_template (&gst_inter_audio_src_src_template, + "src"); + + gst_base_src_set_live (GST_BASE_SRC (interaudiosrc), TRUE); + gst_base_src_set_blocksize (GST_BASE_SRC (interaudiosrc), -1); + + interaudiosrc->surface = gst_inter_surface_get ("default"); +} + +void +gst_inter_audio_src_set_property (GObject * object, guint property_id, + const GValue * value, GParamSpec * pspec) +{ + /* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_audio_src_get_property (GObject * object, guint property_id, + GValue * value, GParamSpec * pspec) +{ + /* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_audio_src_dispose (GObject * object) +{ + /* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */ + + /* clean up as possible. may be called multiple times */ + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +void +gst_inter_audio_src_finalize (GObject * object) +{ + /* GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (object); */ + + /* clean up object here */ + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + + +static GstCaps * +gst_inter_audio_src_get_caps (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "get_caps"); + + return NULL; +} + +static gboolean +gst_inter_audio_src_set_caps (GstBaseSrc * src, GstCaps * caps) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + const GstStructure *structure; + gboolean ret; + int sample_rate; + + GST_DEBUG_OBJECT (interaudiosrc, "set_caps"); + + structure = gst_caps_get_structure (caps, 0); + + ret = gst_structure_get_int (structure, "rate", &sample_rate); + if (ret) { + interaudiosrc->sample_rate = sample_rate; + } + + return ret; +} + +static gboolean +gst_inter_audio_src_negotiate (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "negotiate"); + + return TRUE; +} + +static gboolean +gst_inter_audio_src_newsegment (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "newsegment"); + + return TRUE; +} + +static gboolean +gst_inter_audio_src_start (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "start"); + + return TRUE; +} + +static gboolean +gst_inter_audio_src_stop (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "stop"); + + return TRUE; +} + +static void +gst_inter_audio_src_get_times (GstBaseSrc * src, GstBuffer * buffer, + GstClockTime * start, GstClockTime * end) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "get_times"); + + /* for live sources, sync on the timestamp of the buffer */ + if (gst_base_src_is_live (src)) { + GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer); + + if (GST_CLOCK_TIME_IS_VALID (timestamp)) { + /* get duration to calculate end time */ + GstClockTime duration = GST_BUFFER_DURATION (buffer); + + if (GST_CLOCK_TIME_IS_VALID (duration)) { + *end = timestamp + duration; + } + *start = timestamp; + } + } else { + *start = -1; + *end = -1; + } +} + +static gboolean +gst_inter_audio_src_is_seekable (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "is_seekable"); + + return FALSE; +} + +static gboolean +gst_inter_audio_src_unlock (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "unlock"); + + return TRUE; +} + +static gboolean +gst_inter_audio_src_event (GstBaseSrc * src, GstEvent * event) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "event"); + + return TRUE; +} + +static GstFlowReturn +gst_inter_audio_src_create (GstBaseSrc * src, guint64 offset, guint size, + GstBuffer ** buf) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + GstBuffer *buffer; + int n; + + GST_DEBUG_OBJECT (interaudiosrc, "create"); + + buffer = NULL; + + g_mutex_lock (interaudiosrc->surface->mutex); + n = gst_adapter_available (interaudiosrc->surface->audio_adapter) / 4; + if (n > 1600 * 2) { + GST_DEBUG ("flushing %d samples", 800); + gst_adapter_flush (interaudiosrc->surface->audio_adapter, 800 * 4); + n -= 800; + } + if (n > 1600) + n = 1600; + if (n > 0) { + buffer = gst_adapter_take_buffer (interaudiosrc->surface->audio_adapter, + n * 4); + } + g_mutex_unlock (interaudiosrc->surface->mutex); + + if (n < 1600) { + GstBuffer *newbuf = gst_buffer_new_and_alloc (1600 * 4); + + GST_DEBUG ("creating %d samples of silence", 1600 - n); + memset (GST_BUFFER_DATA (newbuf) + n * 4, 0, 1600 * 4 - n * 4); + if (buffer) { + memcpy (GST_BUFFER_DATA (newbuf), GST_BUFFER_DATA (buffer), n * 4); + gst_buffer_unref (buffer); + } + buffer = newbuf; + } + n = 1600; + + GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples; + GST_BUFFER_OFFSET_END (buffer) = interaudiosrc->n_samples + n; + GST_BUFFER_TIMESTAMP (buffer) = + gst_util_uint64_scale_int (interaudiosrc->n_samples, GST_SECOND, + interaudiosrc->sample_rate); + GST_DEBUG_OBJECT (interaudiosrc, "create ts %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); + GST_BUFFER_DURATION (buffer) = + gst_util_uint64_scale_int (interaudiosrc->n_samples + n, GST_SECOND, + interaudiosrc->sample_rate) - GST_BUFFER_TIMESTAMP (buffer); + GST_BUFFER_OFFSET (buffer) = interaudiosrc->n_samples; + GST_BUFFER_OFFSET_END (buffer) = -1; + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); + if (interaudiosrc->n_samples == 0) { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); + } + gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_BASE_SRC_PAD (interaudiosrc))); + interaudiosrc->n_samples += n; + + *buf = buffer; + + return GST_FLOW_OK; +} + +static gboolean +gst_inter_audio_src_do_seek (GstBaseSrc * src, GstSegment * segment) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "do_seek"); + + return FALSE; +} + +static gboolean +gst_inter_audio_src_query (GstBaseSrc * src, GstQuery * query) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "query"); + + return TRUE; +} + +static gboolean +gst_inter_audio_src_check_get_range (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "get_range"); + + return FALSE; +} + +static void +gst_inter_audio_src_fixate (GstBaseSrc * src, GstCaps * caps) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + GstStructure *structure; + + structure = gst_caps_get_structure (caps, 0); + + GST_DEBUG_OBJECT (interaudiosrc, "fixate"); + + gst_structure_fixate_field_nearest_int (structure, "channels", 2); + gst_structure_fixate_field_nearest_int (structure, "rate", 48000); + +} + +static gboolean +gst_inter_audio_src_unlock_stop (GstBaseSrc * src) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "stop"); + + return TRUE; +} + +static gboolean +gst_inter_audio_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek, + GstSegment * segment) +{ + GstInterAudioSrc *interaudiosrc = GST_INTER_AUDIO_SRC (src); + + GST_DEBUG_OBJECT (interaudiosrc, "seek_segment"); + + return FALSE; +} diff --git a/gst/inter/gstinteraudiosrc.h b/gst/inter/gstinteraudiosrc.h new file mode 100644 index 0000000000..cac928f702 --- /dev/null +++ b/gst/inter/gstinteraudiosrc.h @@ -0,0 +1,57 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_INTER_AUDIO_SRC_H_ +#define _GST_INTER_AUDIO_SRC_H_ + +#include +#include "gstintersurface.h" + +G_BEGIN_DECLS + +#define GST_TYPE_INTER_AUDIO_SRC (gst_inter_audio_src_get_type()) +#define GST_INTER_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_AUDIO_SRC,GstInterAudioSrc)) +#define GST_INTER_AUDIO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_AUDIO_SRC,GstInterAudioSrcClass)) +#define GST_IS_INTER_AUDIO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_AUDIO_SRC)) +#define GST_IS_INTER_AUDIO_SRC_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_AUDIO_SRC)) + +typedef struct _GstInterAudioSrc GstInterAudioSrc; +typedef struct _GstInterAudioSrcClass GstInterAudioSrcClass; + +struct _GstInterAudioSrc +{ + GstBaseSrc base_interaudiosrc; + + GstPad *srcpad; + GstInterSurface *surface; + + guint64 n_samples; + int sample_rate; +}; + +struct _GstInterAudioSrcClass +{ + GstBaseSrcClass base_interaudiosrc_class; +}; + +GType gst_inter_audio_src_get_type (void); + +G_END_DECLS + +#endif diff --git a/gst/inter/gstintersurface.c b/gst/inter/gstintersurface.c new file mode 100644 index 0000000000..9a43fb9a48 --- /dev/null +++ b/gst/inter/gstintersurface.c @@ -0,0 +1,42 @@ +/* GStreamer + * Copyright (C) 2011 FIXME + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstintersurface.h" + +static GstInterSurface *surface; + + +GstInterSurface * +gst_inter_surface_get (const char *name) +{ + return surface; + +} + +void +gst_inter_surface_init (void) +{ + surface = g_malloc0 (sizeof (GstInterSurface)); + surface->mutex = g_mutex_new (); + surface->audio_adapter = gst_adapter_new (); +} diff --git a/gst/inter/gstintersurface.h b/gst/inter/gstintersurface.h new file mode 100644 index 0000000000..92440448ad --- /dev/null +++ b/gst/inter/gstintersurface.h @@ -0,0 +1,58 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_INTER_SURFACE_H_ +#define _GST_INTER_SURFACE_H_ + +#include +#include + +G_BEGIN_DECLS + +typedef struct _GstInterSurface GstInterSurface; + +struct _GstInterSurface +{ + GMutex *mutex; + + /* video */ + GstVideoFormat format; + int fps_n; + int fps_d; + int width; + int height; + int n_frames; + int video_buffer_count; + + /* audio */ + int sample_rate; + int n_channels; + + GstBuffer *video_buffer; + GstAdapter *audio_adapter; +}; + + +GstInterSurface * gst_inter_surface_get (const char *name); +void gst_inter_surface_init (void); + + +G_END_DECLS + +#endif diff --git a/gst/inter/gstintertest.c b/gst/inter/gstintertest.c new file mode 100644 index 0000000000..cb7b08c668 --- /dev/null +++ b/gst/inter/gstintertest.c @@ -0,0 +1,502 @@ +/* GstInterTest + * Copyright (C) 2011 FIXME + * Copyright (C) 2010 Entropy Wave Inc + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AS IS'' AND ANY EXPRESS OR + * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, + * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES + * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) + * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, + * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING + * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE + * POSSIBILITY OF SUCH DAMAGE. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +//#define GETTEXT_PACKAGE "intertest" + + +typedef struct _GstInterTest GstInterTest; +struct _GstInterTest +{ + GstElement *pipeline; + GstBus *bus; + GMainLoop *main_loop; + + GstElement *source_element; + GstElement *sink_element; + + gboolean paused_for_buffering; + guint timer_id; +}; + +GstInterTest *gst_inter_test_new (void); +void gst_inter_test_free (GstInterTest * intertest); +void gst_inter_test_create_pipeline_server (GstInterTest * intertest); +void gst_inter_test_create_pipeline_vts (GstInterTest * intertest); +void gst_inter_test_create_pipeline_playbin (GstInterTest * intertest, + const char *uri); +void gst_inter_test_start (GstInterTest * intertest); +void gst_inter_test_stop (GstInterTest * intertest); + +static gboolean gst_inter_test_handle_message (GstBus * bus, + GstMessage * message, gpointer data); +static gboolean onesecond_timer (gpointer priv); + + +gboolean verbose; + +static GOptionEntry entries[] = { + {"verbose", 'v', 0, G_OPTION_ARG_NONE, &verbose, "Be verbose", NULL}, + + {NULL} + +}; + +int +main (int argc, char *argv[]) +{ + GError *error = NULL; + GOptionContext *context; + GstInterTest *intertest1; + GstInterTest *intertest2; + GMainLoop *main_loop; + + if (!g_thread_supported ()) + g_thread_init (NULL); + + context = g_option_context_new ("- FIXME"); + g_option_context_add_main_entries (context, entries, GETTEXT_PACKAGE); + g_option_context_add_group (context, gst_init_get_option_group ()); + if (!g_option_context_parse (context, &argc, &argv, &error)) { + g_print ("option parsing failed: %s\n", error->message); + exit (1); + } + g_option_context_free (context); + + intertest1 = gst_inter_test_new (); + gst_inter_test_create_pipeline_server (intertest1); + gst_inter_test_start (intertest1); + + intertest2 = gst_inter_test_new (); + gst_inter_test_create_pipeline_playbin (intertest2, NULL); + gst_inter_test_start (intertest2); + + main_loop = g_main_loop_new (NULL, TRUE); + intertest1->main_loop = main_loop; + intertest2->main_loop = main_loop; + + g_main_loop_run (main_loop); + + exit (0); +} + + +GstInterTest * +gst_inter_test_new (void) +{ + GstInterTest *intertest; + + intertest = g_new0 (GstInterTest, 1); + + return intertest; +} + +void +gst_inter_test_free (GstInterTest * intertest) +{ + if (intertest->source_element) { + gst_object_unref (intertest->source_element); + intertest->source_element = NULL; + } + if (intertest->sink_element) { + gst_object_unref (intertest->sink_element); + intertest->sink_element = NULL; + } + + if (intertest->pipeline) { + gst_element_set_state (intertest->pipeline, GST_STATE_NULL); + gst_object_unref (intertest->pipeline); + intertest->pipeline = NULL; + } + g_free (intertest); +} + +void +gst_inter_test_create_pipeline_playbin (GstInterTest * intertest, + const char *uri) +{ + GstElement *pipeline; + GError *error = NULL; + + if (uri == NULL) { + gst_inter_test_create_pipeline_vts (intertest); + return; + } + + pipeline = gst_pipeline_new (NULL); + gst_bin_add (GST_BIN (pipeline), + gst_element_factory_make ("playbin2", "source")); + + if (error) { + g_print ("pipeline parsing error: %s\n", error->message); + gst_object_unref (pipeline); + return; + } + + intertest->pipeline = pipeline; + + gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE); + intertest->bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); + gst_bus_add_watch (intertest->bus, gst_inter_test_handle_message, intertest); + + intertest->source_element = + gst_bin_get_by_name (GST_BIN (pipeline), "source"); + g_print ("source_element is %p\n", intertest->source_element); + + g_print ("setting uri to %s\n", uri); + g_object_set (intertest->source_element, "uri", uri, NULL); +} + +void +gst_inter_test_create_pipeline_vts (GstInterTest * intertest) +{ + GString *pipe_desc; + GstElement *pipeline; + GError *error = NULL; + + pipe_desc = g_string_new (""); + + g_string_append (pipe_desc, "videotestsrc name=source num-buffers=10000 ! "); + g_string_append (pipe_desc, + "video/x-raw-yuv,format=(fourcc)I420,width=320,height=240 ! "); + g_string_append (pipe_desc, "timeoverlay ! "); + g_string_append (pipe_desc, "intervideosink name=sink sync=true "); + g_string_append (pipe_desc, + "audiotestsrc samplesperbuffer=1600 num-buffers=100 ! "); + g_string_append (pipe_desc, "interaudiosink "); + + if (verbose) + g_print ("pipeline: %s\n", pipe_desc->str); + + pipeline = (GstElement *) gst_parse_launch (pipe_desc->str, &error); + g_string_free (pipe_desc, FALSE); + + if (error) { + g_print ("pipeline parsing error: %s\n", error->message); + gst_object_unref (pipeline); + return; + } + + intertest->pipeline = pipeline; + + gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE); + intertest->bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); + gst_bus_add_watch (intertest->bus, gst_inter_test_handle_message, intertest); + + intertest->source_element = + gst_bin_get_by_name (GST_BIN (pipeline), "source"); + intertest->sink_element = gst_bin_get_by_name (GST_BIN (pipeline), "sink"); +} + +void +gst_inter_test_create_pipeline_server (GstInterTest * intertest) +{ + GString *pipe_desc; + GstElement *pipeline; + GError *error = NULL; + + pipe_desc = g_string_new (""); + + g_string_append (pipe_desc, "intervideosrc ! queue ! "); + g_string_append (pipe_desc, "xvimagesink name=sink "); + g_string_append (pipe_desc, "interaudiosrc ! queue ! "); + g_string_append (pipe_desc, "alsasink latency-time=100000000 "); + + if (verbose) + g_print ("pipeline: %s\n", pipe_desc->str); + + pipeline = (GstElement *) gst_parse_launch (pipe_desc->str, &error); + g_string_free (pipe_desc, FALSE); + + if (error) { + g_print ("pipeline parsing error: %s\n", error->message); + gst_object_unref (pipeline); + return; + } + + intertest->pipeline = pipeline; + + gst_pipeline_set_auto_flush_bus (GST_PIPELINE (pipeline), FALSE); + intertest->bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); + gst_bus_add_watch (intertest->bus, gst_inter_test_handle_message, intertest); + + intertest->source_element = + gst_bin_get_by_name (GST_BIN (pipeline), "source"); + intertest->sink_element = gst_bin_get_by_name (GST_BIN (pipeline), "sink"); +} + +void +gst_inter_test_start (GstInterTest * intertest) +{ + gst_element_set_state (intertest->pipeline, GST_STATE_READY); + + intertest->timer_id = g_timeout_add (1000, onesecond_timer, intertest); +} + +void +gst_inter_test_stop (GstInterTest * intertest) +{ + gst_element_set_state (intertest->pipeline, GST_STATE_NULL); + + g_source_remove (intertest->timer_id); +} + +static void +gst_inter_test_handle_eos (GstInterTest * intertest) +{ + gst_inter_test_stop (intertest); +} + +static void +gst_inter_test_handle_error (GstInterTest * intertest, GError * error, + const char *debug) +{ + g_print ("error: %s\n", error->message); + gst_inter_test_stop (intertest); +} + +static void +gst_inter_test_handle_warning (GstInterTest * intertest, GError * error, + const char *debug) +{ + g_print ("warning: %s\n", error->message); +} + +static void +gst_inter_test_handle_info (GstInterTest * intertest, GError * error, + const char *debug) +{ + g_print ("info: %s\n", error->message); +} + +static void +gst_inter_test_handle_null_to_ready (GstInterTest * intertest) +{ + gst_element_set_state (intertest->pipeline, GST_STATE_PAUSED); + +} + +static void +gst_inter_test_handle_ready_to_paused (GstInterTest * intertest) +{ + if (!intertest->paused_for_buffering) { + gst_element_set_state (intertest->pipeline, GST_STATE_PLAYING); + } +} + +static void +gst_inter_test_handle_paused_to_playing (GstInterTest * intertest) +{ + +} + +static void +gst_inter_test_handle_playing_to_paused (GstInterTest * intertest) +{ + +} + +static void +gst_inter_test_handle_paused_to_ready (GstInterTest * intertest) +{ + +} + +static void +gst_inter_test_handle_ready_to_null (GstInterTest * intertest) +{ + g_main_loop_quit (intertest->main_loop); + +} + + +static gboolean +gst_inter_test_handle_message (GstBus * bus, GstMessage * message, + gpointer data) +{ + GstInterTest *intertest = (GstInterTest *) data; + + switch (GST_MESSAGE_TYPE (message)) { + case GST_MESSAGE_EOS: + gst_inter_test_handle_eos (intertest); + break; + case GST_MESSAGE_ERROR: + { + GError *error = NULL; + gchar *debug; + + gst_message_parse_error (message, &error, &debug); + gst_inter_test_handle_error (intertest, error, debug); + } + break; + case GST_MESSAGE_WARNING: + { + GError *error = NULL; + gchar *debug; + + gst_message_parse_warning (message, &error, &debug); + gst_inter_test_handle_warning (intertest, error, debug); + } + break; + case GST_MESSAGE_INFO: + { + GError *error = NULL; + gchar *debug; + + gst_message_parse_info (message, &error, &debug); + gst_inter_test_handle_info (intertest, error, debug); + } + break; + case GST_MESSAGE_TAG: + { + GstTagList *tag_list; + + gst_message_parse_tag (message, &tag_list); + if (verbose) + g_print ("tag\n"); + } + break; + case GST_MESSAGE_STATE_CHANGED: + { + GstState oldstate, newstate, pending; + + gst_message_parse_state_changed (message, &oldstate, &newstate, &pending); + if (GST_ELEMENT (message->src) == intertest->pipeline) { + if (verbose) + g_print ("state change from %s to %s\n", + gst_element_state_get_name (oldstate), + gst_element_state_get_name (newstate)); + switch (GST_STATE_TRANSITION (oldstate, newstate)) { + case GST_STATE_CHANGE_NULL_TO_READY: + gst_inter_test_handle_null_to_ready (intertest); + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + gst_inter_test_handle_ready_to_paused (intertest); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + gst_inter_test_handle_paused_to_playing (intertest); + break; + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + gst_inter_test_handle_playing_to_paused (intertest); + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_inter_test_handle_paused_to_ready (intertest); + break; + case GST_STATE_CHANGE_READY_TO_NULL: + gst_inter_test_handle_ready_to_null (intertest); + break; + default: + if (verbose) + g_print ("unknown state change from %s to %s\n", + gst_element_state_get_name (oldstate), + gst_element_state_get_name (newstate)); + } + } + } + break; + case GST_MESSAGE_BUFFERING: + { + int percent; + gst_message_parse_buffering (message, &percent); + //g_print("buffering %d\n", percent); + if (!intertest->paused_for_buffering && percent < 100) { + g_print ("pausing for buffing\n"); + intertest->paused_for_buffering = TRUE; + gst_element_set_state (intertest->pipeline, GST_STATE_PAUSED); + } else if (intertest->paused_for_buffering && percent == 100) { + g_print ("unpausing for buffing\n"); + intertest->paused_for_buffering = FALSE; + gst_element_set_state (intertest->pipeline, GST_STATE_PLAYING); + } + } + break; + case GST_MESSAGE_STATE_DIRTY: + case GST_MESSAGE_CLOCK_PROVIDE: + case GST_MESSAGE_CLOCK_LOST: + case GST_MESSAGE_NEW_CLOCK: + case GST_MESSAGE_STRUCTURE_CHANGE: + case GST_MESSAGE_STREAM_STATUS: + break; + case GST_MESSAGE_STEP_DONE: + case GST_MESSAGE_APPLICATION: + case GST_MESSAGE_ELEMENT: + case GST_MESSAGE_SEGMENT_START: + case GST_MESSAGE_SEGMENT_DONE: + case GST_MESSAGE_DURATION: + case GST_MESSAGE_LATENCY: + case GST_MESSAGE_ASYNC_START: + case GST_MESSAGE_ASYNC_DONE: + case GST_MESSAGE_REQUEST_STATE: + case GST_MESSAGE_STEP_START: + default: + if (verbose) { + g_print ("message: %s\n", GST_MESSAGE_TYPE_NAME (message)); + } + break; + case GST_MESSAGE_QOS: + break; + } + + return TRUE; +} + + + +static gboolean +onesecond_timer (gpointer priv) +{ + //GstInterTest *intertest = (GstInterTest *)priv; + + g_print (".\n"); + + return TRUE; +} + + + +/* helper functions */ + +#if 0 +gboolean +have_element (const gchar * element_name) +{ + GstPluginFeature *feature; + + feature = gst_default_registry_find_feature (element_name, + GST_TYPE_ELEMENT_FACTORY); + if (feature) { + g_object_unref (feature); + return TRUE; + } + return FALSE; +} +#endif diff --git a/gst/inter/gstintervideosink.c b/gst/inter/gstintervideosink.c new file mode 100644 index 0000000000..940d7b27e7 --- /dev/null +++ b/gst/inter/gstintervideosink.c @@ -0,0 +1,332 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ +/** + * SECTION:element-gstintervideosink + * + * The intervideosink element does FIXME stuff. + * + * + * Example launch line + * |[ + * gst-launch -v fakesrc ! intervideosink ! FIXME ! fakesink + * ]| + * FIXME Describe what the pipeline does. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include +#include "gstintervideosink.h" + +GST_DEBUG_CATEGORY_STATIC (gst_inter_video_sink_debug_category); +#define GST_CAT_DEFAULT gst_inter_video_sink_debug_category + +/* prototypes */ + + +static void gst_inter_video_sink_set_property (GObject * object, + guint property_id, const GValue * value, GParamSpec * pspec); +static void gst_inter_video_sink_get_property (GObject * object, + guint property_id, GValue * value, GParamSpec * pspec); +static void gst_inter_video_sink_dispose (GObject * object); +static void gst_inter_video_sink_finalize (GObject * object); + +static GstCaps *gst_inter_video_sink_get_caps (GstBaseSink * sink); +static gboolean gst_inter_video_sink_set_caps (GstBaseSink * sink, + GstCaps * caps); +static GstFlowReturn gst_inter_video_sink_buffer_alloc (GstBaseSink * sink, + guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf); +static void gst_inter_video_sink_get_times (GstBaseSink * sink, + GstBuffer * buffer, GstClockTime * start, GstClockTime * end); +static gboolean gst_inter_video_sink_start (GstBaseSink * sink); +static gboolean gst_inter_video_sink_stop (GstBaseSink * sink); +static gboolean gst_inter_video_sink_unlock (GstBaseSink * sink); +static gboolean gst_inter_video_sink_event (GstBaseSink * sink, + GstEvent * event); +static GstFlowReturn gst_inter_video_sink_preroll (GstBaseSink * sink, + GstBuffer * buffer); +static GstFlowReturn gst_inter_video_sink_render (GstBaseSink * sink, + GstBuffer * buffer); +static GstStateChangeReturn gst_inter_video_sink_async_play (GstBaseSink * + sink); +static gboolean gst_inter_video_sink_activate_pull (GstBaseSink * sink, + gboolean active); +static gboolean gst_inter_video_sink_unlock_stop (GstBaseSink * sink); + +enum +{ + PROP_0 +}; + +/* pad templates */ + +static GstStaticPadTemplate gst_inter_video_sink_sink_template = +GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + ); + + +/* class initialization */ + +#define DEBUG_INIT(bla) \ + GST_DEBUG_CATEGORY_INIT (gst_inter_video_sink_debug_category, "intervideosink", 0, \ + "debug category for intervideosink element"); + +GST_BOILERPLATE_FULL (GstInterVideoSink, gst_inter_video_sink, GstBaseSink, + GST_TYPE_BASE_SINK, DEBUG_INIT); + +static void +gst_inter_video_sink_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_inter_video_sink_sink_template)); + + gst_element_class_set_details_simple (element_class, "FIXME Long name", + "Generic", "FIXME Description", "FIXME "); +} + +static void +gst_inter_video_sink_class_init (GstInterVideoSinkClass * klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstBaseSinkClass *base_sink_class = GST_BASE_SINK_CLASS (klass); + + gobject_class->set_property = gst_inter_video_sink_set_property; + gobject_class->get_property = gst_inter_video_sink_get_property; + gobject_class->dispose = gst_inter_video_sink_dispose; + gobject_class->finalize = gst_inter_video_sink_finalize; + base_sink_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_video_sink_get_caps); + base_sink_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_video_sink_set_caps); + if (0) + base_sink_class->buffer_alloc = + GST_DEBUG_FUNCPTR (gst_inter_video_sink_buffer_alloc); + base_sink_class->get_times = + GST_DEBUG_FUNCPTR (gst_inter_video_sink_get_times); + base_sink_class->start = GST_DEBUG_FUNCPTR (gst_inter_video_sink_start); + base_sink_class->stop = GST_DEBUG_FUNCPTR (gst_inter_video_sink_stop); + base_sink_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_video_sink_unlock); + if (0) + base_sink_class->event = GST_DEBUG_FUNCPTR (gst_inter_video_sink_event); + //if (0) + base_sink_class->preroll = GST_DEBUG_FUNCPTR (gst_inter_video_sink_preroll); + base_sink_class->render = GST_DEBUG_FUNCPTR (gst_inter_video_sink_render); + if (0) + base_sink_class->async_play = + GST_DEBUG_FUNCPTR (gst_inter_video_sink_async_play); + if (0) + base_sink_class->activate_pull = + GST_DEBUG_FUNCPTR (gst_inter_video_sink_activate_pull); + base_sink_class->unlock_stop = + GST_DEBUG_FUNCPTR (gst_inter_video_sink_unlock_stop); + +} + +static void +gst_inter_video_sink_init (GstInterVideoSink * intervideosink, + GstInterVideoSinkClass * intervideosink_class) +{ + + intervideosink->sinkpad = + gst_pad_new_from_static_template (&gst_inter_video_sink_sink_template, + "sink"); + + intervideosink->surface = gst_inter_surface_get ("default"); +} + +void +gst_inter_video_sink_set_property (GObject * object, guint property_id, + const GValue * value, GParamSpec * pspec) +{ + /* GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_video_sink_get_property (GObject * object, guint property_id, + GValue * value, GParamSpec * pspec) +{ + /* GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_video_sink_dispose (GObject * object) +{ + /* GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object); */ + + /* clean up as possible. may be called multiple times */ + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +void +gst_inter_video_sink_finalize (GObject * object) +{ + /* GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (object); */ + + /* clean up object here */ + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + + + +static GstCaps * +gst_inter_video_sink_get_caps (GstBaseSink * sink) +{ + + return NULL; +} + +static gboolean +gst_inter_video_sink_set_caps (GstBaseSink * sink, GstCaps * caps) +{ + + return TRUE; +} + +static GstFlowReturn +gst_inter_video_sink_buffer_alloc (GstBaseSink * sink, guint64 offset, + guint size, GstCaps * caps, GstBuffer ** buf) +{ + + return GST_FLOW_ERROR; +} + +static void +gst_inter_video_sink_get_times (GstBaseSink * sink, GstBuffer * buffer, + GstClockTime * start, GstClockTime * end) +{ + GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (sink); + + if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer)) { + *start = GST_BUFFER_TIMESTAMP (buffer); + if (GST_BUFFER_DURATION_IS_VALID (buffer)) { + *end = *start + GST_BUFFER_DURATION (buffer); + } else { + if (intervideosink->fps_n > 0) { + *end = *start + + gst_util_uint64_scale_int (GST_SECOND, intervideosink->fps_d, + intervideosink->fps_n); + } + } + } + + +} + +static gboolean +gst_inter_video_sink_start (GstBaseSink * sink) +{ + + return TRUE; +} + +static gboolean +gst_inter_video_sink_stop (GstBaseSink * sink) +{ + GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (sink); + + g_mutex_lock (intervideosink->surface->mutex); + if (intervideosink->surface->video_buffer) { + gst_buffer_unref (intervideosink->surface->video_buffer); + } + intervideosink->surface->video_buffer = NULL; + g_mutex_unlock (intervideosink->surface->mutex); + + return TRUE; +} + +static gboolean +gst_inter_video_sink_unlock (GstBaseSink * sink) +{ + + return TRUE; +} + +static gboolean +gst_inter_video_sink_event (GstBaseSink * sink, GstEvent * event) +{ + + return TRUE; +} + +static GstFlowReturn +gst_inter_video_sink_preroll (GstBaseSink * sink, GstBuffer * buffer) +{ + //return gst_inter_video_sink_render (sink, buffer); + + return GST_FLOW_OK; +} + +static GstFlowReturn +gst_inter_video_sink_render (GstBaseSink * sink, GstBuffer * buffer) +{ + GstInterVideoSink *intervideosink = GST_INTER_VIDEO_SINK (sink); + + g_mutex_lock (intervideosink->surface->mutex); + if (intervideosink->surface->video_buffer) { + gst_buffer_unref (intervideosink->surface->video_buffer); + } + intervideosink->surface->video_buffer = gst_buffer_ref (buffer); + intervideosink->surface->video_buffer_count = 0; + g_mutex_unlock (intervideosink->surface->mutex); + + return GST_FLOW_OK; +} + +static GstStateChangeReturn +gst_inter_video_sink_async_play (GstBaseSink * sink) +{ + + return GST_STATE_CHANGE_SUCCESS; +} + +static gboolean +gst_inter_video_sink_activate_pull (GstBaseSink * sink, gboolean active) +{ + + return TRUE; +} + +static gboolean +gst_inter_video_sink_unlock_stop (GstBaseSink * sink) +{ + + return TRUE; +} diff --git a/gst/inter/gstintervideosink.h b/gst/inter/gstintervideosink.h new file mode 100644 index 0000000000..00bbd6e857 --- /dev/null +++ b/gst/inter/gstintervideosink.h @@ -0,0 +1,58 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_INTER_VIDEO_SINK_H_ +#define _GST_INTER_VIDEO_SINK_H_ + +#include +#include "gstintersurface.h" + +G_BEGIN_DECLS + +#define GST_TYPE_INTER_VIDEO_SINK (gst_inter_video_sink_get_type()) +#define GST_INTER_VIDEO_SINK(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_VIDEO_SINK,GstInterVideoSink)) +#define GST_INTER_VIDEO_SINK_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_VIDEO_SINK,GstInterVideoSinkClass)) +#define GST_IS_INTER_VIDEO_SINK(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_VIDEO_SINK)) +#define GST_IS_INTER_VIDEO_SINK_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_VIDEO_SINK)) + +typedef struct _GstInterVideoSink GstInterVideoSink; +typedef struct _GstInterVideoSinkClass GstInterVideoSinkClass; + +struct _GstInterVideoSink +{ + GstBaseSink base_intervideosink; + + GstInterSurface *surface; + + GstPad *sinkpad; + + int fps_n; + int fps_d; +}; + +struct _GstInterVideoSinkClass +{ + GstBaseSinkClass base_intervideosink_class; +}; + +GType gst_inter_video_sink_get_type (void); + +G_END_DECLS + +#endif diff --git a/gst/inter/gstintervideosrc.c b/gst/inter/gstintervideosrc.c new file mode 100644 index 0000000000..04d9655305 --- /dev/null +++ b/gst/inter/gstintervideosrc.c @@ -0,0 +1,510 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin Street, Suite 500, + * Boston, MA 02110-1335, USA. + */ +/** + * SECTION:element-gstintervideosrc + * + * The intervideosrc element does FIXME stuff. + * + * + * Example launch line + * |[ + * gst-launch -v fakesrc ! intervideosrc ! FIXME ! fakesink + * ]| + * FIXME Describe what the pipeline does. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include +#include +#include "gstintervideosrc.h" +#include + +GST_DEBUG_CATEGORY_STATIC (gst_inter_video_src_debug_category); +#define GST_CAT_DEFAULT gst_inter_video_src_debug_category + +/* prototypes */ + + +static void gst_inter_video_src_set_property (GObject * object, + guint property_id, const GValue * value, GParamSpec * pspec); +static void gst_inter_video_src_get_property (GObject * object, + guint property_id, GValue * value, GParamSpec * pspec); +static void gst_inter_video_src_dispose (GObject * object); +static void gst_inter_video_src_finalize (GObject * object); + +static GstCaps *gst_inter_video_src_get_caps (GstBaseSrc * src); +static gboolean gst_inter_video_src_set_caps (GstBaseSrc * src, GstCaps * caps); +static gboolean gst_inter_video_src_negotiate (GstBaseSrc * src); +static gboolean gst_inter_video_src_newsegment (GstBaseSrc * src); +static gboolean gst_inter_video_src_start (GstBaseSrc * src); +static gboolean gst_inter_video_src_stop (GstBaseSrc * src); +static void +gst_inter_video_src_get_times (GstBaseSrc * src, GstBuffer * buffer, + GstClockTime * start, GstClockTime * end); +static gboolean gst_inter_video_src_is_seekable (GstBaseSrc * src); +static gboolean gst_inter_video_src_unlock (GstBaseSrc * src); +static gboolean gst_inter_video_src_event (GstBaseSrc * src, GstEvent * event); +static GstFlowReturn +gst_inter_video_src_create (GstBaseSrc * src, guint64 offset, guint size, + GstBuffer ** buf); +static gboolean gst_inter_video_src_do_seek (GstBaseSrc * src, + GstSegment * segment); +static gboolean gst_inter_video_src_query (GstBaseSrc * src, GstQuery * query); +static gboolean gst_inter_video_src_check_get_range (GstBaseSrc * src); +static void gst_inter_video_src_fixate (GstBaseSrc * src, GstCaps * caps); +static gboolean gst_inter_video_src_unlock_stop (GstBaseSrc * src); +static gboolean +gst_inter_video_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek, + GstSegment * segment); + +enum +{ + PROP_0 +}; + +/* pad templates */ + +static GstStaticPadTemplate gst_inter_video_src_src_template = +GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("I420")) + ); + + +/* class initialization */ + +#define DEBUG_INIT(bla) \ + GST_DEBUG_CATEGORY_INIT (gst_inter_video_src_debug_category, "intervideosrc", 0, \ + "debug category for intervideosrc element"); + +GST_BOILERPLATE_FULL (GstInterVideoSrc, gst_inter_video_src, GstBaseSrc, + GST_TYPE_BASE_SRC, DEBUG_INIT); + +static void +gst_inter_video_src_base_init (gpointer g_class) +{ + GstElementClass *element_class = GST_ELEMENT_CLASS (g_class); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&gst_inter_video_src_src_template)); + + gst_element_class_set_details_simple (element_class, "FIXME Long name", + "Generic", "FIXME Description", "FIXME "); +} + +static void +gst_inter_video_src_class_init (GstInterVideoSrcClass * klass) +{ + GObjectClass *gobject_class = G_OBJECT_CLASS (klass); + GstBaseSrcClass *base_src_class = GST_BASE_SRC_CLASS (klass); + + gobject_class->set_property = gst_inter_video_src_set_property; + gobject_class->get_property = gst_inter_video_src_get_property; + gobject_class->dispose = gst_inter_video_src_dispose; + gobject_class->finalize = gst_inter_video_src_finalize; + if (0) + base_src_class->get_caps = GST_DEBUG_FUNCPTR (gst_inter_video_src_get_caps); + base_src_class->set_caps = GST_DEBUG_FUNCPTR (gst_inter_video_src_set_caps); + if (0) + base_src_class->negotiate = + GST_DEBUG_FUNCPTR (gst_inter_video_src_negotiate); + if (0) + base_src_class->newsegment = + GST_DEBUG_FUNCPTR (gst_inter_video_src_newsegment); + base_src_class->start = GST_DEBUG_FUNCPTR (gst_inter_video_src_start); + base_src_class->stop = GST_DEBUG_FUNCPTR (gst_inter_video_src_stop); + base_src_class->get_times = GST_DEBUG_FUNCPTR (gst_inter_video_src_get_times); + if (0) + base_src_class->is_seekable = + GST_DEBUG_FUNCPTR (gst_inter_video_src_is_seekable); + base_src_class->unlock = GST_DEBUG_FUNCPTR (gst_inter_video_src_unlock); + base_src_class->event = GST_DEBUG_FUNCPTR (gst_inter_video_src_event); + base_src_class->create = GST_DEBUG_FUNCPTR (gst_inter_video_src_create); + if (0) + base_src_class->do_seek = GST_DEBUG_FUNCPTR (gst_inter_video_src_do_seek); + base_src_class->query = GST_DEBUG_FUNCPTR (gst_inter_video_src_query); + if (0) + base_src_class->check_get_range = + GST_DEBUG_FUNCPTR (gst_inter_video_src_check_get_range); + base_src_class->fixate = GST_DEBUG_FUNCPTR (gst_inter_video_src_fixate); + if (0) + base_src_class->unlock_stop = + GST_DEBUG_FUNCPTR (gst_inter_video_src_unlock_stop); + if (0) + base_src_class->prepare_seek_segment = + GST_DEBUG_FUNCPTR (gst_inter_video_src_prepare_seek_segment); + + +} + +static void +gst_inter_video_src_init (GstInterVideoSrc * intervideosrc, + GstInterVideoSrcClass * intervideosrc_class) +{ + + intervideosrc->srcpad = + gst_pad_new_from_static_template (&gst_inter_video_src_src_template, + "src"); + + gst_base_src_set_format (GST_BASE_SRC (intervideosrc), GST_FORMAT_TIME); + gst_base_src_set_live (GST_BASE_SRC (intervideosrc), TRUE); + + intervideosrc->surface = gst_inter_surface_get ("default"); +} + +void +gst_inter_video_src_set_property (GObject * object, guint property_id, + const GValue * value, GParamSpec * pspec) +{ + /* GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_video_src_get_property (GObject * object, guint property_id, + GValue * value, GParamSpec * pspec) +{ + /* GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object); */ + + switch (property_id) { + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); + break; + } +} + +void +gst_inter_video_src_dispose (GObject * object) +{ + /* GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object); */ + + /* clean up as possible. may be called multiple times */ + + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +void +gst_inter_video_src_finalize (GObject * object) +{ + /* GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (object); */ + + /* clean up object here */ + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + + +static GstCaps * +gst_inter_video_src_get_caps (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "get_caps"); + + return NULL; +} + +static gboolean +gst_inter_video_src_set_caps (GstBaseSrc * src, GstCaps * caps) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + gboolean ret; + GstVideoFormat format; + int width, height; + int fps_n, fps_d; + + GST_DEBUG_OBJECT (intervideosrc, "set_caps"); + + ret = gst_video_format_parse_caps (caps, &format, &width, &height); + ret &= gst_video_parse_caps_framerate (caps, &fps_n, &fps_d); + + if (ret) { + intervideosrc->format = format; + intervideosrc->width = width; + intervideosrc->height = height; + intervideosrc->fps_n = fps_n; + intervideosrc->fps_d = fps_d; + GST_DEBUG ("fps %d/%d", fps_n, fps_d); + } + + return ret; +} + +static gboolean +gst_inter_video_src_negotiate (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "negotiate"); + + return TRUE; +} + +static gboolean +gst_inter_video_src_newsegment (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "newsegment"); + + return TRUE; +} + +static gboolean +gst_inter_video_src_start (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "start"); + + return TRUE; +} + +static gboolean +gst_inter_video_src_stop (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "stop"); + + return TRUE; +} + +static void +gst_inter_video_src_get_times (GstBaseSrc * src, GstBuffer * buffer, + GstClockTime * start, GstClockTime * end) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "get_times"); + + /* for live sources, sync on the timestamp of the buffer */ + if (gst_base_src_is_live (src)) { + GstClockTime timestamp = GST_BUFFER_TIMESTAMP (buffer); + + if (GST_CLOCK_TIME_IS_VALID (timestamp)) { + /* get duration to calculate end time */ + GstClockTime duration = GST_BUFFER_DURATION (buffer); + + if (GST_CLOCK_TIME_IS_VALID (duration)) { + *end = timestamp + duration; + } + *start = timestamp; + } + } else { + *start = -1; + *end = -1; + } +} + +static gboolean +gst_inter_video_src_is_seekable (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "is_seekable"); + + return FALSE; +} + +static gboolean +gst_inter_video_src_unlock (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "unlock"); + + return TRUE; +} + +static gboolean +gst_inter_video_src_event (GstBaseSrc * src, GstEvent * event) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "event"); + + return TRUE; +} + +static GstFlowReturn +gst_inter_video_src_create (GstBaseSrc * src, guint64 offset, guint size, + GstBuffer ** buf) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + GstBuffer *buffer; + guint8 *data; + + GST_DEBUG_OBJECT (intervideosrc, "create"); + + buffer = NULL; + + g_mutex_lock (intervideosrc->surface->mutex); + if (intervideosrc->surface->video_buffer) { + buffer = gst_buffer_ref (intervideosrc->surface->video_buffer); + intervideosrc->surface->video_buffer_count++; + if (intervideosrc->surface->video_buffer_count >= 30) { + gst_buffer_unref (intervideosrc->surface->video_buffer); + intervideosrc->surface->video_buffer = NULL; + } + } + g_mutex_unlock (intervideosrc->surface->mutex); + + if (buffer == NULL) { + buffer = + gst_buffer_new_and_alloc (gst_video_format_get_size + (intervideosrc->format, intervideosrc->width, intervideosrc->height)); + + data = GST_BUFFER_DATA (buffer); + memset (data, 16, + gst_video_format_get_row_stride (intervideosrc->format, 0, + intervideosrc->width) * + gst_video_format_get_component_height (intervideosrc->format, 0, + intervideosrc->height)); + + memset (data + gst_video_format_get_component_offset (intervideosrc->format, + 1, intervideosrc->width, intervideosrc->height), + 128, + 2 * gst_video_format_get_row_stride (intervideosrc->format, 1, + intervideosrc->width) * + gst_video_format_get_component_height (intervideosrc->format, 1, + intervideosrc->height)); + +#if 0 + { + int i; + for (i = 0; i < 10000; i++) { + data[i] = g_random_int () & 0xff; + } + } +#endif + } + + buffer = gst_buffer_make_metadata_writable (buffer); + + GST_BUFFER_TIMESTAMP (buffer) = + gst_util_uint64_scale_int (GST_SECOND * intervideosrc->n_frames, + intervideosrc->fps_d, intervideosrc->fps_n); + GST_DEBUG_OBJECT (intervideosrc, "create ts %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer))); + GST_BUFFER_DURATION (buffer) = + gst_util_uint64_scale_int (GST_SECOND * (intervideosrc->n_frames + 1), + intervideosrc->fps_d, + intervideosrc->fps_n) - GST_BUFFER_TIMESTAMP (buffer); + GST_BUFFER_OFFSET (buffer) = intervideosrc->n_frames; + GST_BUFFER_OFFSET_END (buffer) = -1; + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); + if (intervideosrc->n_frames == 0) { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); + } + gst_buffer_set_caps (buffer, GST_PAD_CAPS (GST_BASE_SRC_PAD (intervideosrc))); + intervideosrc->n_frames++; + + *buf = buffer; + + return GST_FLOW_OK; +} + +static gboolean +gst_inter_video_src_do_seek (GstBaseSrc * src, GstSegment * segment) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "do_seek"); + + return FALSE; +} + +static gboolean +gst_inter_video_src_query (GstBaseSrc * src, GstQuery * query) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "query"); + + return TRUE; +} + +static gboolean +gst_inter_video_src_check_get_range (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "get_range"); + + return FALSE; +} + +static void +gst_inter_video_src_fixate (GstBaseSrc * src, GstCaps * caps) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + GstStructure *structure; + + GST_DEBUG_OBJECT (intervideosrc, "fixate"); + + structure = gst_caps_get_structure (caps, 0); + + gst_structure_fixate_field_nearest_int (structure, "width", 320); + gst_structure_fixate_field_nearest_int (structure, "height", 240); + gst_structure_fixate_field_nearest_fraction (structure, "framerate", 30, 1); + if (gst_structure_has_field (structure, "pixel-aspect-ratio")) + gst_structure_fixate_field_nearest_fraction (structure, + "pixel-aspect-ratio", 1, 1); + if (gst_structure_has_field (structure, "color-matrix")) + gst_structure_fixate_field_string (structure, "color-matrix", "sdtv"); + if (gst_structure_has_field (structure, "chroma-site")) + gst_structure_fixate_field_string (structure, "chroma-site", "mpeg2"); + + if (gst_structure_has_field (structure, "interlaced")) + gst_structure_fixate_field_boolean (structure, "interlaced", FALSE); + +} + +static gboolean +gst_inter_video_src_unlock_stop (GstBaseSrc * src) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "stop"); + + return TRUE; +} + +static gboolean +gst_inter_video_src_prepare_seek_segment (GstBaseSrc * src, GstEvent * seek, + GstSegment * segment) +{ + GstInterVideoSrc *intervideosrc = GST_INTER_VIDEO_SRC (src); + + GST_DEBUG_OBJECT (intervideosrc, "seek_segment"); + + return FALSE; +} diff --git a/gst/inter/gstintervideosrc.h b/gst/inter/gstintervideosrc.h new file mode 100644 index 0000000000..909410a186 --- /dev/null +++ b/gst/inter/gstintervideosrc.h @@ -0,0 +1,62 @@ +/* GStreamer + * Copyright (C) 2011 David A. Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_INTER_VIDEO_SRC_H_ +#define _GST_INTER_VIDEO_SRC_H_ + +#include +#include +#include "gstintersurface.h" + +G_BEGIN_DECLS + +#define GST_TYPE_INTER_VIDEO_SRC (gst_inter_video_src_get_type()) +#define GST_INTER_VIDEO_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_INTER_VIDEO_SRC,GstInterVideoSrc)) +#define GST_INTER_VIDEO_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_INTER_VIDEO_SRC,GstInterVideoSrcClass)) +#define GST_IS_INTER_VIDEO_SRC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_INTER_VIDEO_SRC)) +#define GST_IS_INTER_VIDEO_SRC_CLASS(obj) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_INTER_VIDEO_SRC)) + +typedef struct _GstInterVideoSrc GstInterVideoSrc; +typedef struct _GstInterVideoSrcClass GstInterVideoSrcClass; + +struct _GstInterVideoSrc +{ + GstBaseSrc base_intervideosrc; + + GstPad *srcpad; + GstInterSurface *surface; + + GstVideoFormat format; + int fps_n; + int fps_d; + int n_frames; + int width; + int height; +}; + +struct _GstInterVideoSrcClass +{ + GstBaseSrcClass base_intervideosrc_class; +}; + +GType gst_inter_video_src_get_type (void); + +G_END_DECLS + +#endif diff --git a/gst/mpeg4videoparse/mpeg4parse.c b/gst/mpeg4videoparse/mpeg4parse.c index bfdb6e4378..50fd3c7482 100644 --- a/gst/mpeg4videoparse/mpeg4parse.c +++ b/gst/mpeg4videoparse/mpeg4parse.c @@ -47,7 +47,7 @@ GST_DEBUG_CATEGORY_EXTERN (mpeg4v_parse_debug); static inline gboolean next_start_code (GstBitReader * b) { - guint32 bits; + guint32 bits = 0; GET_BITS (b, 1, &bits); if (bits != 0) @@ -69,7 +69,7 @@ static inline gboolean skip_user_data (GstBitReader * bs, guint32 * bits) { while (*bits == MPEG4_USER_DATA_STARTCODE_MARKER) { - guint32 b; + guint32 b = 0; do { GET_BITS (bs, 8, &b); @@ -205,7 +205,7 @@ failed: static gboolean gst_mpeg4_params_parse_vos (MPEG4Params * params, GstBitReader * br) { - guint32 bits; + guint32 bits = 0; GET_BITS (br, 32, &bits); if (bits != MPEG4_VOS_STARTCODE_MARKER) diff --git a/gst/mpegdemux/mpegtspacketizer.c b/gst/mpegdemux/mpegtspacketizer.c index 94ffd9472d..a9a69c9d3c 100644 --- a/gst/mpegdemux/mpegtspacketizer.c +++ b/gst/mpegdemux/mpegtspacketizer.c @@ -336,7 +336,7 @@ static gboolean mpegts_packetizer_parse_descriptors (MpegTSPacketizer * packetizer, guint8 ** buffer, guint8 * buffer_end, GValueArray * descriptors) { - guint8 tag, length; + guint8 length; guint8 *data; GValue value = { 0 }; GString *desc; @@ -344,7 +344,7 @@ mpegts_packetizer_parse_descriptors (MpegTSPacketizer * packetizer, data = *buffer; while (data < buffer_end) { - tag = *data++; + data++; /* skip tag */ length = *data++; if (data + length > buffer_end) { @@ -353,7 +353,7 @@ mpegts_packetizer_parse_descriptors (MpegTSPacketizer * packetizer, goto error; } - /* include tag and length */ + /* include length */ desc = g_string_new_len ((gchar *) data - 2, length + 2); data += length; /* G_TYPE_GSTRING is a GBoxed type and is used so properly marshalled from @@ -1285,7 +1285,6 @@ mpegts_packetizer_parse_sdt (MpegTSPacketizer * packetizer, guint16 transport_stream_id, original_network_id, service_id; guint tmp; guint sdt_info_length; - gboolean EIT_schedule, EIT_present_following; guint8 running_status; gboolean scrambled; guint descriptors_loop_length; @@ -1356,8 +1355,8 @@ mpegts_packetizer_parse_sdt (MpegTSPacketizer * packetizer, service_id = GST_READ_UINT16_BE (data); data += 2; - EIT_schedule = ((*data & 0x02) == 2); - EIT_present_following = (*data & 0x01) == 1; + /* EIT_schedule = ((*data & 0x02) == 2); */ + /* EIT_present_following = (*data & 0x01) == 1; */ data += 1; tmp = GST_READ_UINT16_BE (data); @@ -1489,7 +1488,6 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer * packetizer, guint transport_stream_id, original_network_id; gboolean free_ca_mode; guint event_id, running_status; - guint64 start_and_duration; guint16 mjd; guint year, month, day, hour, minute, second; guint duration; @@ -1566,7 +1564,7 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer * packetizer, event_id = GST_READ_UINT16_BE (data); data += 2; - start_and_duration = GST_READ_UINT64_BE (data); + /* start_and_duration = GST_READ_UINT64_BE (data); */ duration_ptr = data + 5; utc_ptr = data + 2; mjd = GST_READ_UINT16_BE (data); @@ -1710,7 +1708,7 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer * packetizer, GValue component_value = { 0 }; gint widescreen = 0; /* 0 for 4:3, 1 for 16:9, 2 for > 16:9 */ gint freq = 25; /* 25 or 30 measured in Hertz */ - gboolean highdef = FALSE; + /* gboolean highdef = FALSE; */ gboolean panvectors = FALSE; const gchar *comptype = ""; @@ -1757,46 +1755,46 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer * packetizer, break; case 0x09: widescreen = 0; - highdef = TRUE; + /* highdef = TRUE; */ freq = 25; break; case 0x0A: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = TRUE; freq = 25; break; case 0x0B: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = FALSE; freq = 25; break; case 0x0C: widescreen = 2; - highdef = TRUE; + /* highdef = TRUE; */ freq = 25; break; case 0x0D: widescreen = 0; - highdef = TRUE; + /* highdef = TRUE; */ freq = 30; break; case 0x0E: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = TRUE; freq = 30; break; case 0x0F: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = FALSE; freq = 30; break; case 0x10: widescreen = 2; - highdef = TRUE; + /* highdef = TRUE; */ freq = 30; break; } @@ -2386,14 +2384,15 @@ get_encoding (const gchar * text, guint * start_text, gboolean * is_multibyte) *start_text = 1; *is_multibyte = TRUE; } else if (firstbyte == 0x12) { - // That's korean encoding. - // The spec says it's encoded in KSC 5601, but iconv only knows KSC 5636. - // Couldn't find any information about either of them. + /* That's korean encoding. + * The spec says it's encoded in KSC 5601, but iconv only knows KSC 5636. + * Couldn't find any information about either of them. + */ encoding = NULL; *start_text = 1; *is_multibyte = TRUE; } else { - // reserved + /* reserved */ encoding = NULL; *start_text = 0; *is_multibyte = FALSE; @@ -2443,7 +2442,7 @@ convert_to_utf8 (const gchar * text, gint length, guint start, /* skip it */ break; case 0xE08A:{ - guint8 nl[] = { 0x0A, 0x00 }; // new line + guint8 nl[] = { 0x0A, 0x00 }; /* new line */ g_byte_array_append (sb, nl, 2); break; } @@ -2464,7 +2463,7 @@ convert_to_utf8 (const gchar * text, gint length, guint start, /* skip it */ break; case 0xE08A:{ - guint8 nl[] = { 0x0A, 0x00 }; // new line + guint8 nl[] = { 0x0A, 0x00 }; /* new line */ g_byte_array_append (sb, nl, 2); break; } diff --git a/gst/mpegtsdemux/Makefile.am b/gst/mpegtsdemux/Makefile.am index 7d3e663256..55b57d9600 100644 --- a/gst/mpegtsdemux/Makefile.am +++ b/gst/mpegtsdemux/Makefile.am @@ -7,7 +7,8 @@ libgstmpegtsdemux_la_SOURCES = \ mpegtspacketizer.c \ mpegtsparse.c \ payload_parsers.c \ - tsdemux.c + tsdemux.c \ + pesparse.c libgstmpegtsdemux_la_CFLAGS = \ $(GST_PLUGINS_BAD_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \ @@ -25,7 +26,8 @@ noinst_HEADERS = \ mpegtspacketizer.h \ mpegtsparse.h \ payload_parsers.h \ - tsdemux.h + tsdemux.h \ + pesparse.h Android.mk: Makefile.am $(BUILT_SOURCES) androgenizer \ diff --git a/gst/mpegtsdemux/gstmpegdefs.h b/gst/mpegtsdemux/gstmpegdefs.h index 60ab5800a1..be6209aaf7 100644 --- a/gst/mpegtsdemux/gstmpegdefs.h +++ b/gst/mpegtsdemux/gstmpegdefs.h @@ -44,7 +44,11 @@ * 1111 0111 ITU-T Rec. H.222.1 type D * 1111 1000 ITU-T Rec. H.222.1 type E * 1111 1001 ancillary_stream - * 1111 1010 E 1111 1110 reserved data stream + * 1111 1010 ISO/IEC 14496-1_SL-packetized_stream + * 1111 1011 ISO/IEC 14496-1_FlexMux_stream + * 1111 1100 metadata stream + * 1111 1101 extended_stream_id + * 1111 1110 reserved data stream * 1111 1111 program_stream_directory */ @@ -69,9 +73,9 @@ #define ID_ITU_TREC_H222_TYPE_D_STREAM 0x000001F7 #define ID_ITU_TREC_H222_TYPE_E_STREAM 0x000001F8 #define ID_ANCILLARY_STREAM 0x000001F9 -#define ID_RESERVED_STREAM_1 0x000001FA -#define ID_RESERVED_STREAM_2 0x000001FB -#define ID_EXTENDED_METADATA 0x000001FC +#define ID_14496_1_SL_PACKETIZED_STREAM 0x000001FA +#define ID_14496_1_SL_FLEXMUX_STREAM 0x000001FB +#define ID_METADATA_STREAM 0x000001FC #define ID_EXTENDED_STREAM_ID 0x000001FD #define ID_RESERVED_STREAM_3 0x000001FE #define ID_PROGRAM_STREAM_DIRECTORY 0x000001FF diff --git a/gst/mpegtsdemux/mpegtsbase.c b/gst/mpegtsdemux/mpegtsbase.c index 866f0d4ccd..2a81f75939 100644 --- a/gst/mpegtsdemux/mpegtsbase.c +++ b/gst/mpegtsdemux/mpegtsbase.c @@ -192,11 +192,11 @@ mpegts_base_reset (MpegTSBase * base) MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base); mpegts_packetizer_clear (base->packetizer); - memset (base->is_pes, 0, 8192); - memset (base->known_psi, 0, 8192); + memset (base->is_pes, 0, 1024); + memset (base->known_psi, 0, 1024); /* PAT */ - base->known_psi[0] = TRUE; + MPEGTS_BIT_SET (base->known_psi, 0); /* FIXME : Commenting the Following lines is to be in sync with the following * commit @@ -210,6 +210,12 @@ mpegts_base_reset (MpegTSBase * base) /* base->pat = NULL; */ /* pmt pids will be added and removed dynamically */ + gst_segment_init (&base->segment, GST_FORMAT_UNDEFINED); + + base->mode = BASE_MODE_STREAMING; + base->seen_pat = FALSE; + base->first_pat_offset = -1; + if (klass->reset) klass->reset (base); } @@ -232,14 +238,12 @@ mpegts_base_init (MpegTSBase * base, MpegTSBaseClass * klass) base->programs = g_hash_table_new_full (g_direct_hash, g_direct_equal, NULL, (GDestroyNotify) mpegts_base_free_program); - base->is_pes = g_new0 (gboolean, 8192); - base->known_psi = g_new0 (gboolean, 8192); - mpegts_base_reset (base); + base->is_pes = g_new0 (guint8, 1024); + base->known_psi = g_new0 (guint8, 1024); base->program_size = sizeof (MpegTSBaseProgram); base->stream_size = sizeof (MpegTSBaseStream); - base->mode = BASE_MODE_STREAMING; - base->first_pat_offset = -1; + mpegts_base_reset (base); } static void @@ -326,6 +330,35 @@ mpegts_get_descriptor_from_stream (MpegTSBaseStream * stream, guint8 tag) return retval; } +typedef struct +{ + gboolean res; + guint16 pid; +} PIDLookup; + +static void +foreach_pid_in_program (gpointer key, MpegTSBaseProgram * program, + PIDLookup * lookup) +{ + if (!program->active) + return; + if (program->streams[lookup->pid]) + lookup->res = TRUE; +} + +static gboolean +mpegts_pid_in_active_programs (MpegTSBase * base, guint16 pid) +{ + PIDLookup lookup; + + lookup.res = FALSE; + lookup.pid = pid; + g_hash_table_foreach (base->programs, (GHFunc) foreach_pid_in_program, + &lookup); + + return lookup.res; +} + /* returns NULL if no matching descriptor found * * otherwise returns a descriptor that needs to * * be freed */ @@ -358,12 +391,15 @@ mpegts_get_descriptor_from_program (MpegTSBaseProgram * program, guint8 tag) return retval; } -MpegTSBaseProgram * -mpegts_base_add_program (MpegTSBase * base, +static MpegTSBaseProgram * +mpegts_base_new_program (MpegTSBase * base, gint program_number, guint16 pmt_pid) { MpegTSBaseProgram *program; + GST_DEBUG_OBJECT (base, "program_number : %d, pmt_pid : %d", + program_number, pmt_pid); + program = g_malloc0 (base->program_size); program->program_number = program_number; program->pmt_pid = pmt_pid; @@ -371,6 +407,23 @@ mpegts_base_add_program (MpegTSBase * base, program->streams = g_new0 (MpegTSBaseStream *, 0x2000); program->patcount = 0; + return program; +} + +MpegTSBaseProgram * +mpegts_base_add_program (MpegTSBase * base, + gint program_number, guint16 pmt_pid) +{ + MpegTSBaseProgram *program; + + GST_DEBUG_OBJECT (base, "program_number : %d, pmt_pid : %d", + program_number, pmt_pid); + + program = mpegts_base_new_program (base, program_number, pmt_pid); + + /* Mark the PMT PID as being a known PSI PID */ + MPEGTS_BIT_SET (base->known_psi, pmt_pid); + g_hash_table_insert (base->programs, GINT_TO_POINTER (program_number), program); @@ -388,53 +441,34 @@ mpegts_base_get_program (MpegTSBase * base, gint program_number) return program; } -#if 0 -static GstPad * -mpegts_base_activate_program (MpegTSBase * base, MpegTSBaseProgram * program) +static MpegTSBaseProgram * +mpegts_base_steal_program (MpegTSBase * base, gint program_number) { - MpegTSBasePad *tspad; - gchar *pad_name; + MpegTSBaseProgram *program; - pad_name = g_strdup_printf ("program_%d", program->program_number); + program = (MpegTSBaseProgram *) g_hash_table_lookup (base->programs, + GINT_TO_POINTER ((gint) program_number)); - tspad = mpegts_base_create_tspad (base, pad_name); - tspad->program_number = program->program_number; - tspad->program = program; - program->tspad = tspad; - g_free (pad_name); - gst_pad_set_active (tspad->pad, TRUE); - program->active = TRUE; + if (program) + g_hash_table_steal (base->programs, + GINT_TO_POINTER ((gint) program_number)); - return tspad->pad; + return program; } -static GstPad * -mpegts_base_deactivate_program (MpegTSBase * base, MpegTSBaseProgram * program) -{ - MpegTSBasePad *tspad; - - tspad = program->tspad; - gst_pad_set_active (tspad->pad, FALSE); - program->active = FALSE; - - /* tspad will be destroyed in GstElementClass::pad_removed */ - - return tspad->pad; -} -#endif - - static void mpegts_base_free_program (MpegTSBaseProgram * program) { - guint i; + GList *tmp; if (program->pmt_info) gst_structure_free (program->pmt_info); - for (i = 0; i < 0x2000; i++) - if (program->streams[i]) - mpegts_base_free_stream (program->streams[i]); + for (tmp = program->stream_list; tmp; tmp = tmp->next) + mpegts_base_free_stream ((MpegTSBaseStream *) tmp->data); + if (program->stream_list) + g_list_free (program->stream_list); + g_free (program->streams); if (program->tags) @@ -443,12 +477,16 @@ mpegts_base_free_program (MpegTSBaseProgram * program) g_free (program); } +/* FIXME : This is being called by tsdemux::find_timestamps() + * We need to avoid re-entrant code like that */ void mpegts_base_remove_program (MpegTSBase * base, gint program_number) { MpegTSBaseProgram *program; MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base); + GST_DEBUG_OBJECT (base, "program_number : %d", program_number); + if (klass->program_stopped) { program = (MpegTSBaseProgram *) g_hash_table_lookup (base->programs, @@ -457,7 +495,6 @@ mpegts_base_remove_program (MpegTSBase * base, gint program_number) klass->program_stopped (base, program); } g_hash_table_remove (base->programs, GINT_TO_POINTER (program_number)); - } static MpegTSBaseStream * @@ -471,12 +508,18 @@ mpegts_base_program_add_stream (MpegTSBase * base, GST_DEBUG ("pid:0x%04x, stream_type:0x%03x, stream_info:%" GST_PTR_FORMAT, pid, stream_type, stream_info); + if (G_UNLIKELY (program->streams[pid])) { + GST_WARNING ("Stream already present !"); + return NULL; + } + stream = g_malloc0 (base->stream_size); stream->pid = pid; stream->stream_type = stream_type; stream->stream_info = stream_info; program->streams[pid] = stream; + program->stream_list = g_list_append (program->stream_list, stream); if (klass->stream_added) klass->stream_added (base, stream, program); @@ -494,48 +537,131 @@ void mpegts_base_program_remove_stream (MpegTSBase * base, MpegTSBaseProgram * program, guint16 pid) { - MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base); + MpegTSBaseClass *klass; + MpegTSBaseStream *stream = program->streams[pid]; + + GST_DEBUG ("pid:0x%04x", pid); + + if (G_UNLIKELY (stream == NULL)) { + /* Can happen if the PCR PID is the same as a audio/video PID */ + GST_DEBUG ("Stream already removed"); + return; + } + + klass = GST_MPEGTS_BASE_GET_CLASS (base); /* If subclass needs it, inform it of the stream we are about to remove */ if (klass->stream_removed) - klass->stream_removed (base, program->streams[pid]); + klass->stream_removed (base, stream); - mpegts_base_free_stream (program->streams[pid]); + program->stream_list = g_list_remove_all (program->stream_list, stream); + mpegts_base_free_stream (stream); program->streams[pid] = NULL; } static void -mpegts_base_deactivate_pmt (MpegTSBase * base, MpegTSBaseProgram * program) +mpegts_base_deactivate_program (MpegTSBase * base, MpegTSBaseProgram * program) { - gint i; + gint i, nbstreams; guint pid; - guint stream_type; GstStructure *stream; const GValue *streams; const GValue *value; MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base); + if (G_UNLIKELY (program->active == FALSE)) + return; + + GST_DEBUG_OBJECT (base, "Deactivating PMT"); + + program->active = FALSE; + if (program->pmt_info) { /* Inform subclasses we're deactivating this program */ if (klass->program_stopped) klass->program_stopped (base, program); streams = gst_structure_id_get_value (program->pmt_info, QUARK_STREAMS); + nbstreams = gst_value_list_get_size (streams); - for (i = 0; i < gst_value_list_get_size (streams); ++i) { + for (i = 0; i < nbstreams; ++i) { value = gst_value_list_get_value (streams, i); stream = g_value_get_boxed (value); - gst_structure_id_get (stream, QUARK_PID, G_TYPE_UINT, &pid, - QUARK_STREAM_TYPE, G_TYPE_UINT, &stream_type, NULL); + + gst_structure_id_get (stream, QUARK_PID, G_TYPE_UINT, &pid, NULL); mpegts_base_program_remove_stream (base, program, (guint16) pid); - base->is_pes[pid] = FALSE; + + /* Only unset the is_pes bit if the PID isn't used in any other active + * program */ + if (!mpegts_pid_in_active_programs (base, pid)) + MPEGTS_BIT_UNSET (base->is_pes, pid); } + /* remove pcr stream */ + /* FIXME : This might actually be shared with another stream ? */ mpegts_base_program_remove_stream (base, program, program->pcr_pid); - base->is_pes[program->pcr_pid] = FALSE; + if (!mpegts_pid_in_active_programs (base, program->pcr_pid)) + MPEGTS_BIT_UNSET (base->is_pes, program->pcr_pid); + + GST_DEBUG ("program stream_list is now %p", program->stream_list); } } +static void +mpegts_base_activate_program (MpegTSBase * base, MpegTSBaseProgram * program, + guint16 pmt_pid, GstStructure * pmt_info) +{ + guint i, nbstreams; + guint pcr_pid; + guint pid; + guint stream_type; + GstStructure *stream; + const GValue *new_streams; + const GValue *value; + MpegTSBaseClass *klass; + + if (G_UNLIKELY (program->active)) + return; + + GST_DEBUG ("Activating program %d", program->program_number); + + gst_structure_id_get (pmt_info, QUARK_PCR_PID, G_TYPE_UINT, &pcr_pid, NULL); + + /* activate new pmt */ + if (program->pmt_info) + gst_structure_free (program->pmt_info); + program->pmt_info = gst_structure_copy (pmt_info); + program->pmt_pid = pmt_pid; + program->pcr_pid = pcr_pid; + + new_streams = gst_structure_id_get_value (pmt_info, QUARK_STREAMS); + nbstreams = gst_value_list_get_size (new_streams); + + for (i = 0; i < nbstreams; ++i) { + value = gst_value_list_get_value (new_streams, i); + stream = g_value_get_boxed (value); + + gst_structure_id_get (stream, QUARK_PID, G_TYPE_UINT, &pid, + QUARK_STREAM_TYPE, G_TYPE_UINT, &stream_type, NULL); + MPEGTS_BIT_SET (base->is_pes, pid); + mpegts_base_program_add_stream (base, program, + (guint16) pid, (guint8) stream_type, stream); + + } + /* We add the PCR pid last. If that PID is already used by one of the media + * streams above, no new stream will be created */ + mpegts_base_program_add_stream (base, program, (guint16) pcr_pid, -1, NULL); + MPEGTS_BIT_SET (base->is_pes, pcr_pid); + + + program->active = TRUE; + + klass = GST_MPEGTS_BASE_GET_CLASS (base); + if (klass->program_started != NULL) + klass->program_started (base, program); + + GST_DEBUG_OBJECT (base, "new pmt %" GST_PTR_FORMAT, pmt_info); +} gboolean mpegts_base_is_psi (MpegTSBase * base, MpegTSPacketizerPacket * packet) @@ -551,11 +677,11 @@ mpegts_base_is_psi (MpegTSBase * base, MpegTSPacketizerPacket * packet) 0x72, 0x73, 0x7E, 0x7F, TABLE_ID_UNSET }; - if (base->known_psi[packet->pid]) + if (MPEGTS_BIT_IS_SET (base->known_psi, packet->pid)) retval = TRUE; /* check is it is a pes pid */ - if (base->is_pes[packet->pid]) + if (MPEGTS_BIT_IS_SET (base->is_pes, packet->pid)) return FALSE; if (!retval) { @@ -602,22 +728,32 @@ mpegts_base_apply_pat (MpegTSBase * base, GstStructure * pat_info) guint program_number; guint pid; MpegTSBaseProgram *program; - gint i; + gint i, nbprograms; const GValue *programs; - MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base); + + GST_INFO_OBJECT (base, "PAT %" GST_PTR_FORMAT, pat_info); + + /* Applying a new PAT does two things: + * * It adds the new programs to the list of programs this element handles + * and increments at the same time the number of times a program is referenced. + * + * * If there was a previously active PAT, It decrements the reference count + * of all program it used. If a program is no longer needed, it is removed. + */ old_pat = base->pat; base->pat = gst_structure_copy (pat_info); - GST_INFO_OBJECT (base, "PAT %" GST_PTR_FORMAT, pat_info); - gst_element_post_message (GST_ELEMENT_CAST (base), gst_message_new_element (GST_OBJECT (base), gst_structure_copy (pat_info))); - programs = gst_structure_id_get_value (pat_info, QUARK_PROGRAMS); + + GST_LOG ("Activating new Program Association Table"); /* activate the new table */ - for (i = 0; i < gst_value_list_get_size (programs); ++i) { + programs = gst_structure_id_get_value (pat_info, QUARK_PROGRAMS); + nbprograms = gst_value_list_get_size (programs); + for (i = 0; i < nbprograms; ++i) { value = gst_value_list_get_value (programs, i); program_info = g_value_get_boxed (value); @@ -626,30 +762,34 @@ mpegts_base_apply_pat (MpegTSBase * base, GstStructure * pat_info) program = mpegts_base_get_program (base, program_number); if (program) { + /* IF the program already existed, just check if the PMT PID changed */ if (program->pmt_pid != pid) { if (program->pmt_pid != G_MAXUINT16) { /* pmt pid changed */ /* FIXME: when this happens it may still be pmt pid of another * program, so setting to False may make it go through expensive * path in is_psi unnecessarily */ - base->known_psi[program->pmt_pid] = FALSE; + MPEGTS_BIT_UNSET (base->known_psi, program->pmt_pid); } program->pmt_pid = pid; - base->known_psi[pid] = TRUE; + MPEGTS_BIT_SET (base->known_psi, pid); } } else { - base->known_psi[pid] = TRUE; + /* Create a new program */ program = mpegts_base_add_program (base, program_number, pid); } + /* We mark this program as being referenced by one PAT */ program->patcount += 1; } if (old_pat) { /* deactivate the old table */ + GST_LOG ("Deactivating old Program Association Table"); programs = gst_structure_id_get_value (old_pat, QUARK_PROGRAMS); - for (i = 0; i < gst_value_list_get_size (programs); ++i) { + nbprograms = gst_value_list_get_size (programs); + for (i = 0; i < nbprograms; ++i) { value = gst_value_list_get_value (programs, i); program_info = g_value_get_boxed (value); @@ -658,7 +798,7 @@ mpegts_base_apply_pat (MpegTSBase * base, GstStructure * pat_info) QUARK_PID, G_TYPE_UINT, &pid, NULL); program = mpegts_base_get_program (base, program_number); - if (program == NULL) { + if (G_UNLIKELY (program == NULL)) { GST_DEBUG_OBJECT (base, "broken PAT, duplicated entry for program %d", program_number); continue; @@ -671,93 +811,84 @@ mpegts_base_apply_pat (MpegTSBase * base, GstStructure * pat_info) GST_INFO_OBJECT (base, "PAT removing program %" GST_PTR_FORMAT, program_info); - if (klass->program_stopped) { - klass->program_stopped (base, program); - } - mpegts_base_deactivate_pmt (base, program); + mpegts_base_deactivate_program (base, program); mpegts_base_remove_program (base, program_number); /* FIXME: when this happens it may still be pmt pid of another * program, so setting to False may make it go through expensive * path in is_psi unnecessarily */ - base->known_psi[pid] = TRUE; + MPEGTS_BIT_SET (base->known_psi, pid); mpegts_packetizer_remove_stream (base->packetizer, pid); } gst_structure_free (old_pat); } -#if 0 - mpegts_base_sync_program_pads (base); -#endif } static void mpegts_base_apply_pmt (MpegTSBase * base, guint16 pmt_pid, GstStructure * pmt_info) { - MpegTSBaseProgram *program; + MpegTSBaseProgram *program, *old_program; guint program_number; - guint pcr_pid; - guint pid; - guint stream_type; - GstStructure *stream; - gint i; - const GValue *new_streams; - const GValue *value; - MpegTSBaseClass *klass = GST_MPEGTS_BASE_GET_CLASS (base); + gboolean deactivate_old_program = FALSE; - if (G_UNLIKELY (base->first_pat_offset == -1)) { + /* FIXME : not so sure this is valid anymore */ + if (G_UNLIKELY (base->seen_pat == FALSE)) { GST_WARNING ("Got pmt without pat first. Returning"); /* remove the stream since we won't get another PMT otherwise */ mpegts_packetizer_remove_stream (base->packetizer, pmt_pid); return; } - gst_structure_id_get (pmt_info, - QUARK_PROGRAM_NUMBER, G_TYPE_UINT, &program_number, - QUARK_PCR_PID, G_TYPE_UINT, &pcr_pid, NULL); - new_streams = gst_structure_id_get_value (pmt_info, QUARK_STREAMS); + gst_structure_id_get (pmt_info, QUARK_PROGRAM_NUMBER, G_TYPE_UINT, + &program_number, NULL); - program = mpegts_base_get_program (base, program_number); - if (program) { + GST_DEBUG ("Applying PMT (program_number:%d, pid:0x%04x)", + program_number, pmt_pid); + + /* In order for stream switching to happen properly in decodebin(2), + * we need to first add the new pads (i.e. activate the new program) + * before removing the old ones (i.e. deactivating the old program) + */ + + old_program = mpegts_base_get_program (base, program_number); + if (G_UNLIKELY (old_program == NULL)) + goto no_program; + + /* If the current program is active, this means we have a new program */ + if (old_program->active) { + old_program = mpegts_base_steal_program (base, program_number); + program = mpegts_base_new_program (base, program_number, pmt_pid); + g_hash_table_insert (base->programs, + GINT_TO_POINTER (program_number), program); + deactivate_old_program = TRUE; + } else + program = old_program; + + /* First activate program */ + mpegts_base_activate_program (base, program, pmt_pid, pmt_info); + + if (deactivate_old_program) { /* deactivate old pmt */ ; - mpegts_base_deactivate_pmt (base, program); - if (program->pmt_info) - gst_structure_free (program->pmt_info); - program->pmt_info = NULL; - } else { - /* no PAT?? */ - base->known_psi[pmt_pid] = TRUE; - program = mpegts_base_add_program (base, program_number, pid); + mpegts_base_deactivate_program (base, old_program); + mpegts_base_free_program (old_program); } - /* activate new pmt */ - program->pmt_info = gst_structure_copy (pmt_info); - program->pmt_pid = pmt_pid; - program->pcr_pid = pcr_pid; - mpegts_base_program_add_stream (base, program, (guint16) pcr_pid, -1, NULL); - base->is_pes[pcr_pid] = TRUE; - - for (i = 0; i < gst_value_list_get_size (new_streams); ++i) { - value = gst_value_list_get_value (new_streams, i); - stream = g_value_get_boxed (value); - - gst_structure_id_get (stream, QUARK_PID, G_TYPE_UINT, &pid, - QUARK_STREAM_TYPE, G_TYPE_UINT, &stream_type, NULL); - base->is_pes[pid] = TRUE; - mpegts_base_program_add_stream (base, program, - (guint16) pid, (guint8) stream_type, stream); - - } - - if (klass->program_started != NULL) { - klass->program_started (base, program); - } - - GST_DEBUG_OBJECT (base, "new pmt %" GST_PTR_FORMAT, pmt_info); + /* if (program->pmt_info) */ + /* gst_structure_free (program->pmt_info); */ + /* program->pmt_info = NULL; */ gst_element_post_message (GST_ELEMENT_CAST (base), gst_message_new_element (GST_OBJECT (base), gst_structure_copy (pmt_info))); + + return; + +no_program: + { + GST_ERROR ("Attempted to apply a PMT on a program that wasn't created"); + return; + } } static void @@ -832,8 +963,8 @@ mpegts_base_handle_psi (MpegTSBase * base, MpegTSPacketizerSection * section) structure = mpegts_packetizer_parse_pat (base->packetizer, section); if (G_LIKELY (structure)) { mpegts_base_apply_pat (base, structure); - if (base->first_pat_offset == -1) { - + if (base->seen_pat == FALSE) { + base->seen_pat = TRUE; base->first_pat_offset = GST_BUFFER_OFFSET (section->buffer); GST_DEBUG ("First PAT offset: %" G_GUINT64_FORMAT, base->first_pat_offset); @@ -1003,11 +1134,28 @@ mpegts_base_get_tags_from_eit (MpegTSBase * base, GstStructure * eit_info) } } +static void +remove_each_program (gpointer key, MpegTSBaseProgram * program, + MpegTSBase * base) +{ + /* First deactivate it */ + mpegts_base_deactivate_program (base, program); + /* Then remove it */ + mpegts_base_remove_program (base, program->program_number); +} + +static gboolean +gst_mpegts_base_handle_eos (MpegTSBase * base) +{ + g_hash_table_foreach (base->programs, (GHFunc) remove_each_program, base); + /* finally remove */ + return TRUE; +} static gboolean mpegts_base_sink_event (GstPad * pad, GstEvent * event) { - gboolean res; + gboolean res = TRUE; MpegTSBase *base = GST_MPEGTS_BASE (gst_object_get_parent (GST_OBJECT (pad))); GST_WARNING_OBJECT (base, "Got event %s", @@ -1015,11 +1163,30 @@ mpegts_base_sink_event (GstPad * pad, GstEvent * event) switch (GST_EVENT_TYPE (event)) { case GST_EVENT_NEWSEGMENT: - /* FIXME : STORE NEW SEGMENT ! */ + { + gboolean update; + gdouble rate, applied_rate; + GstFormat format; + gint64 start, stop, position; + + gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate, + &format, &start, &stop, &position); + GST_DEBUG_OBJECT (base, + "Segment update:%d, rate:%f, applied_rate:%f, format:%s", update, + rate, applied_rate, gst_format_get_name (format)); + GST_DEBUG_OBJECT (base, + " start:%" G_GINT64_FORMAT ", stop:%" G_GINT64_FORMAT + ", position:%" G_GINT64_FORMAT, start, stop, position); + gst_segment_set_newsegment_full (&base->segment, update, rate, + applied_rate, format, start, stop, position); gst_event_unref (event); - res = FALSE; + } + break; + case GST_EVENT_EOS: + res = gst_mpegts_base_handle_eos (base); break; case GST_EVENT_FLUSH_START: + gst_segment_init (&base->segment, GST_FORMAT_UNDEFINED); mpegts_packetizer_flush (base->packetizer); /* Passthrough */ default: @@ -1087,7 +1254,7 @@ mpegts_base_chain (GstPad * pad, GstBuffer * buf) /* we need to push section packet downstream */ res = mpegts_base_push (base, &packet, §ion); - } else if (base->is_pes[packet.pid]) { + } else if (MPEGTS_BIT_IS_SET (base->is_pes, packet.pid)) { /* push the packet downstream */ res = mpegts_base_push (base, &packet, NULL); } else diff --git a/gst/mpegtsdemux/mpegtsbase.h b/gst/mpegtsdemux/mpegtsbase.h index a8e66721a1..369078d476 100644 --- a/gst/mpegtsdemux/mpegtsbase.h +++ b/gst/mpegtsdemux/mpegtsbase.h @@ -64,17 +64,24 @@ struct _MpegTSBaseProgram guint16 pcr_pid; GstStructure *pmt_info; MpegTSBaseStream **streams; + GList *stream_list; gint patcount; /* Pending Tags for the program */ GstTagList *tags; guint event_id; + + /* TRUE if the program is currently being used */ + gboolean active; }; typedef enum { - BASE_MODE_SCANNING, - BASE_MODE_SEEKING, - BASE_MODE_STREAMING, + /* PULL MODE */ + BASE_MODE_SCANNING, /* Looking for PAT/PMT */ + BASE_MODE_SEEKING, /* Seeking */ + BASE_MODE_STREAMING, /* Normal mode (pushing out data) */ + + /* PUSH MODE */ BASE_MODE_PUSHING } MpegTSBaseMode; @@ -102,10 +109,10 @@ struct _MpegTSBase { GstStructure *pat; MpegTSPacketizer2 *packetizer; - /* arrays that say whether a pid is a known psi pid or a pes pid - * FIXME: Make these bit arrays so we can make them 8 times smaller */ - gboolean *known_psi; - gboolean *is_pes; + /* arrays that say whether a pid is a known psi pid or a pes pid */ + /* Use MPEGTS_BIT_* to set/unset/check the values */ + guint8 *known_psi; + guint8 *is_pes; gboolean disposed; @@ -117,8 +124,14 @@ struct _MpegTSBase { * by subclasses if they have their own MpegTSBaseStream subclasses */ gsize stream_size; - /*Offset from the origin to the first PAT (pullmode) */ + /* Whether we saw a PAT yet */ + gboolean seen_pat; + + /* Offset from the origin to the first PAT (pullmode) */ guint64 first_pat_offset; + + /* Upstream segment */ + GstSegment segment; }; struct _MpegTSBaseClass { @@ -128,6 +141,7 @@ struct _MpegTSBaseClass { void (*reset) (MpegTSBase *base); GstFlowReturn (*push) (MpegTSBase *base, MpegTSPacketizerPacket *packet, MpegTSPacketizerSection * section); gboolean (*push_event) (MpegTSBase *base, GstEvent * event); + /* program_started gets called when program's pmt arrives for first time */ void (*program_started) (MpegTSBase *base, MpegTSBaseProgram *program); /* program_stopped gets called when pat no longer has program's pmt */ @@ -139,7 +153,7 @@ struct _MpegTSBaseClass { void (*stream_removed) (MpegTSBase *base, MpegTSBaseStream *stream); /* find_timestamps is called to find PCR */ - GstFlowReturn (*find_timestamps) (MpegTSBase * base, guint64 initoff, guint64 *offset); + GstFlowReturn (*find_timestamps) (MpegTSBase * base, guint64 initoff, guint64 *offset); /* seek is called to wait for seeking */ GstFlowReturn (*seek) (MpegTSBase * base, GstEvent * event, guint16 pid); @@ -152,6 +166,10 @@ struct _MpegTSBaseClass { void (*eit_info) (GstStructure *eit); }; +#define MPEGTS_BIT_SET(field, offs) ((field)[(offs) / 8] |= (1 << ((offs) % 8))) +#define MPEGTS_BIT_UNSET(field, offs) ((field)[(offs) / 8] &= ~(1 << ((offs) % 8))) +#define MPEGTS_BIT_IS_SET(field, offs) ((field)[(offs) / 8] & (1 << ((offs) % 8))) + GType mpegts_base_get_type(void); MpegTSBaseProgram *mpegts_base_get_program (MpegTSBase * base, gint program_number); diff --git a/gst/mpegtsdemux/mpegtspacketizer.c b/gst/mpegtsdemux/mpegtspacketizer.c index 87a57c6a56..25d75cfa84 100644 --- a/gst/mpegtsdemux/mpegtspacketizer.c +++ b/gst/mpegtsdemux/mpegtspacketizer.c @@ -376,7 +376,7 @@ static gboolean mpegts_packetizer_parse_descriptors (MpegTSPacketizer2 * packetizer, guint8 ** buffer, guint8 * buffer_end, GValueArray * descriptors) { - guint8 tag, length; + guint8 length; guint8 *data; GValue value = { 0 }; GString *desc; @@ -384,7 +384,7 @@ mpegts_packetizer_parse_descriptors (MpegTSPacketizer2 * packetizer, data = *buffer; while (data < buffer_end) { - tag = *data++; + data++; /* skip tag */ length = *data++; if (data + length > buffer_end) { @@ -393,7 +393,7 @@ mpegts_packetizer_parse_descriptors (MpegTSPacketizer2 * packetizer, goto error; } - /* include tag and length */ + /* include length */ desc = g_string_new_len ((gchar *) data - 2, length + 2); data += length; /* G_TYPE_GSTING is a GBoxed type and is used so properly marshalled from python */ @@ -1335,7 +1335,6 @@ mpegts_packetizer_parse_sdt (MpegTSPacketizer2 * packetizer, guint16 transport_stream_id, original_network_id, service_id; guint tmp; guint sdt_info_length; - gboolean EIT_schedule, EIT_present_following; guint8 running_status; gboolean scrambled; guint descriptors_loop_length; @@ -1406,8 +1405,8 @@ mpegts_packetizer_parse_sdt (MpegTSPacketizer2 * packetizer, service_id = GST_READ_UINT16_BE (data); data += 2; - EIT_schedule = ((*data & 0x02) == 2); - EIT_present_following = (*data & 0x01) == 1; + /* EIT_schedule = ((*data & 0x02) == 2); */ + /* EIT_present_following = (*data & 0x01) == 1; */ data += 1; tmp = GST_READ_UINT16_BE (data); @@ -1539,7 +1538,6 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer2 * packetizer, guint transport_stream_id, original_network_id; gboolean free_ca_mode; guint event_id, running_status; - guint64 start_and_duration; guint16 mjd; guint year, month, day, hour, minute, second; guint duration; @@ -1616,7 +1614,7 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer2 * packetizer, event_id = GST_READ_UINT16_BE (data); data += 2; - start_and_duration = GST_READ_UINT64_BE (data); + /* start_and_duration = GST_READ_UINT64_BE (data); */ duration_ptr = data + 5; utc_ptr = data + 2; mjd = GST_READ_UINT16_BE (data); @@ -1760,7 +1758,7 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer2 * packetizer, GValue component_value = { 0 }; gint widescreen = 0; /* 0 for 4:3, 1 for 16:9, 2 for > 16:9 */ gint freq = 25; /* 25 or 30 measured in Hertz */ - gboolean highdef = FALSE; + /* gboolean highdef = FALSE; */ gboolean panvectors = FALSE; const gchar *comptype = ""; @@ -1807,46 +1805,46 @@ mpegts_packetizer_parse_eit (MpegTSPacketizer2 * packetizer, break; case 0x09: widescreen = 0; - highdef = TRUE; + /* highdef = TRUE; */ freq = 25; break; case 0x0A: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = TRUE; freq = 25; break; case 0x0B: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = FALSE; freq = 25; break; case 0x0C: widescreen = 2; - highdef = TRUE; + /* highdef = TRUE; */ freq = 25; break; case 0x0D: widescreen = 0; - highdef = TRUE; + /* highdef = TRUE; */ freq = 30; break; case 0x0E: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = TRUE; freq = 30; break; case 0x0F: widescreen = 1; - highdef = TRUE; + /* highdef = TRUE; */ panvectors = FALSE; freq = 30; break; case 0x10: widescreen = 2; - highdef = TRUE; + /* highdef = TRUE; */ freq = 30; break; } @@ -2521,14 +2519,15 @@ get_encoding (const gchar * text, guint * start_text, gboolean * is_multibyte) *start_text = 1; *is_multibyte = TRUE; } else if (firstbyte == 0x12) { - // That's korean encoding. - // The spec says it's encoded in KSC 5601, but iconv only knows KSC 5636. - // Couldn't find any information about either of them. + /* That's korean encoding. + * The spec says it's encoded in KSC 5601, but iconv only knows KSC 5636. + * Couldn't find any information about either of them. + */ encoding = NULL; *start_text = 1; *is_multibyte = TRUE; } else { - // reserved + /* reserved */ encoding = NULL; *start_text = 0; *is_multibyte = FALSE; @@ -2578,7 +2577,7 @@ convert_to_utf8 (const gchar * text, gint length, guint start, /* skip it */ break; case 0xE08A:{ - guint8 nl[] = { 0x0A, 0x00 }; // new line + guint8 nl[] = { 0x0A, 0x00 }; /* new line */ g_byte_array_append (sb, nl, 2); break; } @@ -2599,7 +2598,7 @@ convert_to_utf8 (const gchar * text, gint length, guint start, /* skip it */ break; case 0xE08A:{ - guint8 nl[] = { 0x0A, 0x00 }; // new line + guint8 nl[] = { 0x0A, 0x00 }; /* new line */ g_byte_array_append (sb, nl, 2); break; } diff --git a/gst/mpegtsdemux/pesparse.c b/gst/mpegtsdemux/pesparse.c new file mode 100644 index 0000000000..e5100688d0 --- /dev/null +++ b/gst/mpegtsdemux/pesparse.c @@ -0,0 +1,428 @@ +/* + * pesparse.c : MPEG PES parsing utility + * Copyright (C) 2011 Edward Hervey + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include + +#include + +#include "pesparse.h" + +GST_DEBUG_CATEGORY_STATIC (pes_parser_debug); +#define GST_CAT_DEFAULT pes_parser_debug + +/** + * mpegts_parse_pes_header: + * @data: data to parse (starting from, and including, the sync code) + * @length: size of @data in bytes + * @res: PESHeader to fill (only valid with #PES_PARSING_OK. + * @offset: Offset in @data to the data to parse. If #PES_PARSING_OK, offset to + * first byte of data after the header. + * + * Parses the mpeg-ts PES header located in @data into the @res. + * + * Returns: #PES_PARSING_OK if the header was fully parsed and valid, + * #PES_PARSING_BAD if the header is invalid, or #PES_PARSING_NEED_MORE if more data + * is needed to properly parse the header. + */ +PESParsingResult +mpegts_parse_pes_header (const guint8 * data, gsize length, PESHeader * res, + gint * offset) +{ + PESParsingResult ret = PES_PARSING_NEED_MORE; + gsize origlength = length; + const guint8 *origdata = data; + guint32 val32; + guint8 val8, flags; + + g_return_val_if_fail (res != NULL, PES_PARSING_BAD); + g_return_val_if_fail (offset != NULL, PES_PARSING_BAD); + g_return_val_if_fail (*offset < length, PES_PARSING_BAD); + + data += *offset; + length -= *offset; + + /* The smallest valid PES header is 6 bytes (prefix + stream_id + length) */ + if (G_UNLIKELY (length < 6)) + goto need_more_data; + + val32 = GST_READ_UINT32_BE (data); + data += 4; + length -= 4; + if (G_UNLIKELY ((val32 & 0xffffff00) != 0x00000100)) + goto bad_start_code; + + /* Clear the header */ + memset (res, 0, sizeof (PESHeader)); + res->PTS = -1; + res->DTS = -1; + res->ESCR = -1; + + res->stream_id = val32 & 0x000000ff; + + res->packet_length = GST_READ_UINT16_BE (data); + if (res->packet_length) + res->packet_length += 6; + data += 2; + length -= 2; + + GST_LOG ("stream_id : 0x%08x , packet_length : %d", res->stream_id, + res->packet_length); + + /* Jump if we don't need to parse anything more */ + if (G_UNLIKELY (res->stream_id == 0xbc || res->stream_id == 0xbe + || res->stream_id == 0xbf || (res->stream_id >= 0xf0 + && res->stream_id <= 0xf2) || res->stream_id == 0xff + || res->stream_id == 0xf8)) + goto done_parsing; + + if (G_UNLIKELY (length < 3)) + goto need_more_data; + + /* '10' 2 + * PES_scrambling_control 2 + * PES_priority 1 + * data_alignment_indicator 1 + * copyright 1 + * original_or_copy 1 */ + val8 = *data++; + if (G_UNLIKELY ((val8 & 0xc0) != 0x80)) + goto bad_marker_1; + res->scrambling_control = (val8 >> 4) & 0x3; + res->flags = val8 & 0xf; + + GST_LOG ("scrambling_control 0x%0x", res->scrambling_control); + + /* PTS_DTS_flags 2 + * ESCR_flag 1 + * ES_rate_flag 1 + * DSM_trick_mode_flag 1 + * additional_copy_info_flag 1 + * PES_CRC_flag 1 + * PES_extension_flag 1*/ + flags = *data++; + GST_DEBUG ("PES_flag 0x%02x", flags); + + /* PES_header_data_length 8 */ + res->header_size = *data++; + length -= 3; + if (G_UNLIKELY (length < res->header_size)) + goto need_more_data; + + res->header_size += 9; /* We add 9 since that's the offset + * of the field in the header*/ + GST_DEBUG ("header_size : %d", res->header_size); + + /* PTS/DTS */ + + /* PTS_DTS_flags == 0x01 is invalid */ + if (G_UNLIKELY ((flags >> 6) == 0x01)) + goto bad_PTS_DTS_flags; + + if ((flags & 0x80) == 0x80) { + /* PTS */ + if (G_UNLIKELY (length < 5)) + goto need_more_data; + + READ_TS (data, res->PTS, bad_PTS_value); + length -= 5; + GST_LOG ("PTS %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT, + res->PTS, GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (res->PTS))); + + if ((flags & 0x40) == 0x40) { + /* DTS */ + if (G_UNLIKELY (length < 5)) + goto need_more_data; + + READ_TS (data, res->DTS, bad_DTS_value); + length -= 5; + + GST_LOG ("DTS %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT, + res->DTS, GST_TIME_ARGS (MPEGTIME_TO_GSTTIME (res->DTS))); + } + } + + if (flags & 0x20) { + /* ESCR */ + if (G_UNLIKELY (length < 5)) + goto need_more_data; + READ_TS (data, res->ESCR, bad_ESCR_value); + length -= 5; + + GST_LOG ("ESCR %" G_GUINT64_FORMAT " %" GST_TIME_FORMAT, + res->ESCR, GST_TIME_ARGS (PCRTIME_TO_GSTTIME (res->ESCR))); + } + + if (flags & 0x10) { + /* ES_rate */ + if (G_UNLIKELY (length < 3)) + goto need_more_data; + val32 = GST_READ_UINT32_BE (data); + data += 3; + length -= 3; + if (G_UNLIKELY ((val32 & 0x80000100) != 0x80000100)) + goto bad_ES_rate; + res->ES_rate = ((val32 >> 9) & 0x003fffff) * 50; + GST_LOG ("ES_rate : %d", res->ES_rate); + } + + if (flags & 0x08) { + /* DSM trick mode */ + if (G_UNLIKELY (length < 1)) + goto need_more_data; + val8 = *data++; + length -= 1; + + res->trick_mode = val8 >> 5; + GST_LOG ("trick_mode 0x%x", res->trick_mode); + + switch (res->trick_mode) { + case PES_TRICK_MODE_FAST_FORWARD: + case PES_TRICK_MODE_FAST_REVERSE: + res->intra_slice_refresh = (val8 >> 2) & 0x1; + res->frequency_truncation = val8 & 0x3; + /* passthrough */ + case PES_TRICK_MODE_FREEZE_FRAME: + res->field_id = (val8 >> 3) & 0x3; + break; + case PES_TRICK_MODE_SLOW_MOTION: + case PES_TRICK_MODE_SLOW_REVERSE: + res->rep_cntrl = val8 & 0x1f; + break; + default: + break; + } + } + + if (flags & 0x04) { + /* additional copy info */ + if (G_UNLIKELY (length < 1)) + goto need_more_data; + val8 = *data++; + length -= 1; + + if (G_UNLIKELY (!(val8 & 0x80))) + goto bad_original_copy_info_marker; + res->additional_copy_info = val8 >> 1; + GST_LOG ("additional_copy_info : 0x%x", res->additional_copy_info); + } + + if (flags & 0x02) { + /* CRC */ + if (G_UNLIKELY (length < 2)) + goto need_more_data; + res->previous_PES_packet_CRC = GST_READ_UINT16_BE (data); + GST_LOG ("previous_PES_packet_CRC : 0x%x", res->previous_PES_packet_CRC); + data += 2; + length -= 2; + } + + + /* jump if we don't have a PES extension */ + if (!(flags & 0x01)) + goto stuffing_byte; + + if (G_UNLIKELY (length < 1)) + goto need_more_data; + + /* PES extension */ + flags = *data++; + length -= 1; + GST_DEBUG ("PES_extension_flag 0x%02x", flags); + + if (flags & 0x80) { + /* PES_private data */ + if (G_UNLIKELY (length < 16)) + goto need_more_data; + res->private_data = data; + GST_MEMDUMP ("private_data", data, 16); + data += 16; + length -= 16; + } + + if (flags & 0x40) { + /* pack_header_field */ + if (G_UNLIKELY (length < 1)) + goto need_more_data; + + val8 = *data++; + length -= 1; + if (G_UNLIKELY (length < val8)) + goto need_more_data; + res->pack_header_size = val8; + res->pack_header = data; + + GST_MEMDUMP ("Pack header data", res->pack_header, res->pack_header_size); + + data += val8; + length -= val8; + } + + if (flags & 0x20) { + /* sequence counter */ + if (G_UNLIKELY (length < 2)) + goto need_more_data; + + val8 = *data++; + /* GRMBL, this is most often wrong */ + if (G_UNLIKELY ((val8 & 0x80) != 0x80)) + goto bad_sequence_marker1; + res->program_packet_sequence_counter = val8 * 0x70; + GST_LOG ("program_packet_sequence_counter %d", + res->program_packet_sequence_counter); + + val8 = *data++; + /* GRMBL, this is most often wrong */ + if (G_UNLIKELY ((val8 * 0x80) != 0x80)) + goto bad_sequence_marker2; + res->MPEG1_MPEG2_identifier = (val8 >> 6) & 0x1; + res->original_stuff_length = val8 * 0x3f; + GST_LOG ("MPEG1_MPEG2_identifier : %d , original_stuff_length : %d", + res->MPEG1_MPEG2_identifier, res->original_stuff_length); + length -= 2; + } + + if (flags & 0x10) { + /* P-STD */ + if (G_UNLIKELY (length < 2)) + goto need_more_data; + val8 = *data; + if (G_UNLIKELY ((val8 * 0xc0) != 0x40)) + goto bad_P_STD_marker; + res->P_STD_buffer_size = + (GST_READ_UINT16_BE (data) & 0x1fff) << (val8 & 0x20) ? 10 : 7; + GST_LOG ("P_STD_buffer_size : %d", res->P_STD_buffer_size); + data += 2; + length -= 2; + } + + if (flags & 0x01) { + /* Extension flag 2 */ + if (G_UNLIKELY (length < 1)) + goto need_more_data; + + val8 = *data++; + length -= 1; + + if (!(val8 & 0x80)) + goto bad_extension_marker_2; + + res->extension_field_length = val8 & 0x7f; + if (G_UNLIKELY (length < res->extension_field_length + 1)) + goto need_more_data; + + GST_LOG ("extension_field_length : %" G_GSSIZE_FORMAT, + res->extension_field_length); + + if (res->extension_field_length) { + flags = *data++; + /* Only valid if stream_id_extension_flag == 0x0 */ + if (!(flags & 0x80)) { + res->stream_id_extension = flags & 0x7f; + GST_LOG ("stream_id_extension : 0x%02x", res->stream_id_extension); + res->stream_id_extension_data = data; + GST_MEMDUMP ("stream_id_extension_data", + res->stream_id_extension_data, res->extension_field_length); + } else + GST_WARNING ("What are we meant to do ??"); + data += res->extension_field_length; + } + length -= res->extension_field_length + 1; + } + +stuffing_byte: + /* Go to the expected data start position */ + data = origdata + res->header_size; + length = origlength - res->header_size; + +done_parsing: + GST_DEBUG ("origlength:%" G_GSSIZE_FORMAT ", length:%" G_GSSIZE_FORMAT, + origlength, length); + + res->header_size = origlength - length; + *offset += res->header_size; + ret = PES_PARSING_OK; + + return ret; + + /* Errors */ +need_more_data: + GST_DEBUG ("Not enough data to parse PES header"); + return ret; + +bad_start_code: + GST_WARNING ("Wrong packet start code 0x%x != 0x000001xx", val32); + return PES_PARSING_BAD; + +bad_marker_1: + GST_WARNING ("Wrong '0x10' marker before PES_scrambling_control (0x%02x)", + val8); + return PES_PARSING_BAD; + +bad_PTS_DTS_flags: + GST_WARNING ("Invalid '0x01' PTS_DTS_flags"); + return PES_PARSING_BAD; + +bad_PTS_value: + GST_WARNING ("bad PTS value"); + return PES_PARSING_BAD; + +bad_DTS_value: + GST_WARNING ("bad DTS value"); + return PES_PARSING_BAD; + +bad_ESCR_value: + GST_WARNING ("bad ESCR value"); + return PES_PARSING_BAD; + +bad_ES_rate: + GST_WARNING ("Invalid ES_rate markers 0x%0x", val32); + return PES_PARSING_BAD; + +bad_original_copy_info_marker: + GST_WARNING ("Invalid original_copy_info marker bit: 0x%0x", val8); + return PES_PARSING_BAD; + +bad_sequence_marker1: + GST_WARNING ("Invalid program_packet_sequence_counter marker 0x%0x", val8); + return PES_PARSING_BAD; + +bad_sequence_marker2: + GST_WARNING ("Invalid program_packet_sequence_counter marker 0x%0x", val8); + return PES_PARSING_BAD; + +bad_P_STD_marker: + GST_WARNING ("Invalid P-STD_buffer marker 0x%0x", val8); + return PES_PARSING_BAD; + +bad_extension_marker_2: + GST_WARNING ("Invalid extension_field_2 marker 0x%0x", val8); + return PES_PARSING_BAD; +} + +void +init_pes_parser (void) +{ + GST_DEBUG_CATEGORY_INIT (pes_parser_debug, "pesparser", 0, "MPEG PES parser"); +} diff --git a/gst/mpegtsdemux/pesparse.h b/gst/mpegtsdemux/pesparse.h new file mode 100644 index 0000000000..c5d295e393 --- /dev/null +++ b/gst/mpegtsdemux/pesparse.h @@ -0,0 +1,112 @@ +/* + * pesparse.h : MPEG PES parsing utility + * Copyright (C) 2011 Edward Hervey + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef __PES_PARSE_H__ +#define __PES_PARSE_H__ + +#include +#include "gstmpegdefs.h" + +G_BEGIN_DECLS + +typedef enum { + PES_FLAG_PRIORITY = 1 << 3, /* PES_priority (present: high-priority) */ + PES_FLAG_DATA_ALIGNMENT = 1 << 2, /* data_alignment_indicator */ + PES_FLAG_COPYRIGHT = 1 << 1, /* copyright */ + PES_FLAG_ORIGINAL_OR_COPY = 1 << 0 /* original_or_copy */ +} PESHeaderFlags; + +typedef enum { + PES_TRICK_MODE_FAST_FORWARD = 0x000, + PES_TRICK_MODE_SLOW_MOTION = 0x001, + PES_TRICK_MODE_FREEZE_FRAME = 0x010, + PES_TRICK_MODE_FAST_REVERSE = 0x011, + PES_TRICK_MODE_SLOW_REVERSE = 0x100, + /* ... */ + PES_TRICK_MODE_INVALID = 0xfff /* Not present or invalid */ +} PESTrickModeControl; + +typedef enum { + PES_FIELD_ID_TOP_ONLY = 0x00, /* Display from top field only */ + PES_FIELD_ID_BOTTOM_ONLY = 0x01, /* Display from bottom field only */ + PES_FIELD_ID_COMPLETE_FRAME = 0x10, /* Display complete frame */ + PES_FIELD_ID_INVALID = 0x11 /* Reserved/Invalid */ +} PESFieldID; + +typedef enum { + PES_PARSING_OK = 0, /* Header fully parsed and valid */ + PES_PARSING_BAD = 1, /* Header invalid (CRC error for ex) */ + PES_PARSING_NEED_MORE = 2 /* Not enough data to parse header */ +} PESParsingResult; + +typedef struct { + guint8 stream_id; /* See ID_* in gstmpegdefs.h */ + guint16 packet_length; /* The size of the PES header and PES data + * (if 0 => unbounded packet) */ + guint16 header_size; /* The complete size of the PES header */ + + /* All remaining entries in this structure are optional */ + guint8 scrambling_control; /* 0x00 : Not scrambled/unspecified, + * The following are according to ETSI TS 101 154 + * 0x01 : reserved for future DVB use + * 0x10 : PES packet scrambled with Even key + * 0x11 : PES packet scrambled with Odd key + */ + PESHeaderFlags flags; + + guint64 PTS; /* PTS (-1 if not present or invalid) */ + guint64 DTS; /* DTS (-1 if not present or invalid) */ + guint64 ESCR; /* ESCR (-1 if not present or invalid) */ + + guint32 ES_rate; /* in bytes/seconds (0 if not present or invalid) */ + PESTrickModeControl trick_mode; + + /* Only valid for _FAST_FORWARD, _FAST_REVERSE and _FREEZE_FRAME */ + PESFieldID field_id; + /* Only valid for _FAST_FORWARD and _FAST_REVERSE */ + gboolean intra_slice_refresh; + guint8 frequency_truncation; + /* Only valid for _SLOW_FORWARD and _SLOW_REVERSE */ + guint8 rep_cntrl; + + guint8 additional_copy_info; /* Private data */ + guint16 previous_PES_packet_CRC; + + /* Extension fields */ + const guint8* private_data; /* PES_private_data, 16 bytes long */ + guint8 pack_header_size; /* Size of pack_header in bytes */ + const guint8* pack_header; + gint8 program_packet_sequence_counter; /* -1 if not present or invalid */ + gboolean MPEG1_MPEG2_identifier; + guint8 original_stuff_length; + + guint32 P_STD_buffer_size; /* P-STD buffer size in bytes (0 if invalid + * or not present */ + + gsize extension_field_length; + guint8 stream_id_extension; /* Only valid if stream_id == ID_EXTENDED_STREAM_ID */ + const guint8* stream_id_extension_data; +} PESHeader; + +PESParsingResult mpegts_parse_pes_header (const guint8* data, gsize size, + PESHeader *res, gint *offset); +void init_pes_parser (void); +G_END_DECLS +#endif /* __PES_PARSE_H__ */ diff --git a/gst/mpegtsdemux/tsdemux.c b/gst/mpegtsdemux/tsdemux.c index 62fd9a0ce2..e859ea9c6f 100644 --- a/gst/mpegtsdemux/tsdemux.c +++ b/gst/mpegtsdemux/tsdemux.c @@ -38,6 +38,7 @@ #include "gstmpegdefs.h" #include "mpegtspacketizer.h" #include "payload_parsers.h" +#include "pesparse.h" /* latency in mseconds */ #define TS_LATENCY 700 @@ -91,9 +92,6 @@ struct _TSDemuxStream GstPad *pad; - /* set to FALSE before a push and TRUE after */ - gboolean pushed; - /* the return of the latest push */ GstFlowReturn flow_return; @@ -110,6 +108,7 @@ struct _TSDemuxStream GstBufferListIterator *currentit; GList *currentlist; + /* Current PTS for this stream */ GstClockTime pts; }; @@ -185,8 +184,6 @@ static gboolean gst_ts_demux_srcpad_query (GstPad * pad, GstQuery * query); /* mpegtsbase methods */ static void gst_ts_demux_program_started (MpegTSBase * base, MpegTSBaseProgram * program); -static void -gst_ts_demux_program_stopped (MpegTSBase * base, MpegTSBaseProgram * program); static void gst_ts_demux_reset (MpegTSBase * base); static GstFlowReturn gst_ts_demux_push (MpegTSBase * base, MpegTSPacketizerPacket * packet, @@ -211,6 +208,9 @@ static GstFlowReturn process_pcr (MpegTSBase * base, guint64 initoff, TSPcrOffset * pcroffset, guint numpcr, gboolean isinitial); static void gst_ts_demux_flush_streams (GstTSDemux * tsdemux); +static GstFlowReturn +gst_ts_demux_push_pending_data (GstTSDemux * demux, TSDemuxStream * stream); + static gboolean push_event (MpegTSBase * base, GstEvent * event); static void _extra_init (GType type); @@ -278,7 +278,6 @@ gst_ts_demux_class_init (GstTSDemuxClass * klass) ts_class->push = GST_DEBUG_FUNCPTR (gst_ts_demux_push); ts_class->push_event = GST_DEBUG_FUNCPTR (push_event); ts_class->program_started = GST_DEBUG_FUNCPTR (gst_ts_demux_program_started); - ts_class->program_stopped = GST_DEBUG_FUNCPTR (gst_ts_demux_program_stopped); ts_class->stream_added = gst_ts_demux_stream_added; ts_class->stream_removed = gst_ts_demux_stream_removed; ts_class->find_timestamps = GST_DEBUG_FUNCPTR (find_timestamps); @@ -305,8 +304,11 @@ static void gst_ts_demux_reset (MpegTSBase * base) { GstTSDemux *demux = (GstTSDemux *) base; - g_array_free (demux->index, TRUE); - demux->index = NULL; + + if (demux->index) { + g_array_free (demux->index, TRUE); + demux->index = NULL; + } demux->index_size = 0; demux->need_newsegment = TRUE; demux->program_number = -1; @@ -440,85 +442,15 @@ static GstFlowReturn gst_ts_demux_parse_pes_header_pts (GstTSDemux * demux, MpegTSPacketizerPacket * packet, guint64 * time) { - GstFlowReturn res = GST_FLOW_ERROR; - guint8 *data; - guint32 length; - guint32 psc_stid; - guint8 stid; - guint16 pesplength; - guint8 PES_header_data_length = 0; + PESHeader header; + gint offset = 0; - data = packet->payload; - length = packet->data_end - data; + if (mpegts_parse_pes_header (packet->payload, + packet->data_end - packet->payload, &header, &offset)) + return GST_FLOW_ERROR; - GST_MEMDUMP ("Header buffer", data, MIN (length, 32)); - - /* packet_start_code_prefix 24 - * stream_id 8*/ - psc_stid = GST_READ_UINT32_BE (data); - data += 4; - length -= 4; - if (G_UNLIKELY ((psc_stid & 0xffffff00) != 0x00000100)) { - GST_DEBUG ("WRONG PACKET START CODE! pid: 0x%x", packet->pid); - goto discont; - } - stid = psc_stid & 0x000000ff; - GST_LOG ("stream_id:0x%02x", stid); - - /* PES_packet_length 16 */ - /* FIXME : store the expected pes length somewhere ? */ - pesplength = GST_READ_UINT16_BE (data); - data += 2; - length -= 2; - GST_LOG ("PES_packet_length:%d", pesplength); - - /* FIXME : Only parse header on streams which require it (see table 2-21) */ - if (stid != 0xbf) { - guint64 pts; - guint8 p1, p2; - p1 = *data++; - p2 = *data++; - PES_header_data_length = *data++ + 3; - length -= 3; - - GST_LOG ("0x%02x 0x%02x 0x%02x", p1, p2, PES_header_data_length); - GST_LOG ("PES header data length:%d", PES_header_data_length); - - /* '10' 2 - * PES_scrambling_control 2 - * PES_priority 1 - * data_alignment_indicator 1 - * copyright 1 - * original_or_copy 1 */ - if (G_UNLIKELY ((p1 & 0xc0) != 0x80)) { - GST_WARNING ("p1 >> 6 != 0x2"); - goto discont; - } - - /* PTS_DTS_flags 2 - * ESCR_flag 1 - * ES_rate_flag 1 - * DSM_trick_mode_flag 1 - * additional_copy_info_flag 1 - * PES_CRC_flag 1 - * PES_extension_flag 1*/ - - /* PES_header_data_length 8 */ - if (G_UNLIKELY (length < PES_header_data_length)) { - GST_WARNING ("length < PES_header_data_length"); - goto discont; - } - - /* PTS 32 */ - if ((p2 & 0x80)) { /* PTS */ - READ_TS (data, pts, discont); - length -= 4; - *time = pts; - res = GST_FLOW_OK; - } - } -discont: - return res; + *time = header.PTS; + return GST_FLOW_OK; } /* performs a accurate/key_unit seek */ @@ -863,7 +795,6 @@ gst_ts_demux_do_seek (MpegTSBase * base, GstEvent * event, guint16 pid) GstTSDemux *demux = (GstTSDemux *) base; GstFlowReturn res = GST_FLOW_ERROR; gdouble rate; - gboolean accurate, flush; GstFormat format; GstSeekFlags flags; GstSeekType start_type, stop_type; @@ -882,9 +813,6 @@ gst_ts_demux_do_seek (MpegTSBase * base, GstEvent * event, guint16 pid) " stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start), GST_TIME_ARGS (stop)); - accurate = flags & GST_SEEK_FLAG_ACCURATE; - flush = flags & GST_SEEK_FLAG_FLUSH; - if (flags & (GST_SEEK_FLAG_SEGMENT | GST_SEEK_FLAG_SKIP)) { GST_WARNING ("seek flags 0x%x are not supported", (int) flags); goto done; @@ -960,18 +888,16 @@ static gboolean push_event (MpegTSBase * base, GstEvent * event) { GstTSDemux *demux = (GstTSDemux *) base; - guint i; + GList *tmp; if (G_UNLIKELY (demux->program == NULL)) return FALSE; - for (i = 0; i < 0x2000; i++) { - if (demux->program->streams[i]) { - if (((TSDemuxStream *) demux->program->streams[i])->pad) { - gst_event_ref (event); - gst_pad_push_event (((TSDemuxStream *) demux->program->streams[i])->pad, - event); - } + for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) { + TSDemuxStream *stream = (TSDemuxStream *) tmp->data; + if (stream->pad) { + gst_event_ref (event); + gst_pad_push_event (stream->pad, event); } } @@ -982,7 +908,7 @@ static GstFlowReturn tsdemux_combine_flows (GstTSDemux * demux, TSDemuxStream * stream, GstFlowReturn ret) { - guint i; + GList *tmp; /* Store the value */ stream->flow_return = ret; @@ -992,16 +918,14 @@ tsdemux_combine_flows (GstTSDemux * demux, TSDemuxStream * stream, goto done; /* Only return NOT_LINKED if all other pads returned NOT_LINKED */ - for (i = 0; i < 0x2000; i++) { - if (demux->program->streams[i]) { - stream = (TSDemuxStream *) demux->program->streams[i]; - if (stream->pad) { - ret = stream->flow_return; - /* some other return value (must be SUCCESS but we can return - * other values as well) */ - if (ret != GST_FLOW_NOT_LINKED) - goto done; - } + for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) { + stream = (TSDemuxStream *) tmp->data; + if (stream->pad) { + ret = stream->flow_return; + /* some other return value (must be SUCCESS but we can return + * other values as well) */ + if (ret != GST_FLOW_NOT_LINKED) + goto done; } /* if we get here, all other pads were unlinked and we return * NOT_LINKED then */ @@ -1120,7 +1044,7 @@ create_pad_for_stream (MpegTSBase * base, MpegTSBaseStream * bstream, case ST_DSMCC_B: case ST_DSMCC_C: case ST_DSMCC_D: - base->is_pes[bstream->pid] = FALSE; + MPEGTS_BIT_UNSET (base->is_pes, bstream->pid); break; case ST_AUDIO_AAC: template = gst_static_pad_template_get (&audio_template); @@ -1253,6 +1177,10 @@ create_pad_for_stream (MpegTSBase * base, MpegTSBaseStream * bstream, name = g_strdup_printf ("subpicture_%04x", bstream->pid); caps = gst_caps_new_simple ("subpicture/x-pgs", NULL); break; + default: + GST_WARNING ("Non-media stream (stream_type:0x%x). Not creating pad", + bstream->stream_type); + break; } if (template && name && caps) { GST_LOG ("stream:%p creating pad with name %s and caps %s", stream, name, @@ -1290,14 +1218,21 @@ static void gst_ts_demux_stream_removed (MpegTSBase * base, MpegTSBaseStream * bstream) { TSDemuxStream *stream = (TSDemuxStream *) bstream; - if (stream) { - if (stream->pad) { - /* Unref the pad, clear it */ - gst_object_unref (stream->pad); - stream->pad = NULL; + + if (stream->pad) { + if (gst_pad_is_active (stream->pad)) { + GST_DEBUG_OBJECT (stream->pad, "Flushing out pending data"); + /* Flush out all data */ + gst_ts_demux_push_pending_data ((GstTSDemux *) base, stream); + GST_DEBUG_OBJECT (stream->pad, "Pushing out EOS"); + gst_pad_push_event (stream->pad, gst_event_new_eos ()); + GST_DEBUG_OBJECT (stream->pad, "Deactivating and removing pad"); + gst_pad_set_active (stream->pad, FALSE); + gst_element_remove_pad (GST_ELEMENT_CAST (base), stream->pad); } - stream->flow_return = GST_FLOW_NOT_LINKED; + stream->pad = NULL; } + stream->flow_return = GST_FLOW_NOT_LINKED; } static void @@ -1310,7 +1245,10 @@ activate_pad_for_stream (GstTSDemux * tsdemux, TSDemuxStream * stream) gst_element_add_pad ((GstElement *) tsdemux, stream->pad); GST_DEBUG_OBJECT (stream->pad, "done adding pad"); } else - GST_WARNING_OBJECT (tsdemux, "stream %p has no pad", stream); + GST_WARNING_OBJECT (tsdemux, + "stream %p (pid 0x%04x, type:0x%03x) has no pad", stream, + ((MpegTSBaseStream *) stream)->pid, + ((MpegTSBaseStream *) stream)->stream_type); } static void @@ -1331,13 +1269,8 @@ gst_ts_demux_stream_flush (TSDemuxStream * stream) static void gst_ts_demux_flush_streams (GstTSDemux * demux) { - gint i; - - for (i = 0; i < 0x2000; i++) { - if (demux->program->streams[i]) { - gst_ts_demux_stream_flush ((TSDemuxStream *) demux->program->streams[i]); - } - } + g_list_foreach (demux->program->stream_list, + (GFunc) gst_ts_demux_stream_flush, NULL); } static void @@ -1345,59 +1278,30 @@ gst_ts_demux_program_started (MpegTSBase * base, MpegTSBaseProgram * program) { GstTSDemux *demux = GST_TS_DEMUX (base); + GST_DEBUG ("Current program %d, new program %d", + demux->program_number, program->program_number); + if (demux->program_number == -1 || demux->program_number == program->program_number) { - guint i; + GList *tmp; GST_LOG ("program %d started", program->program_number); demux->program_number = program->program_number; demux->program = program; - /* Activate all stream pads, the pads will already have been created */ - - /* FIXME : Actually, we don't want to activate *ALL* streams ! - * For example, we don't want to expose HDV AUX private streams, we will just - * be using them directly for seeking and metadata. */ - if (base->mode != BASE_MODE_SCANNING) - for (i = 0; i < 0x2000; i++) - if (program->streams[i]) - activate_pad_for_stream (demux, - (TSDemuxStream *) program->streams[i]); + /* Activate all stream pads, pads will already have been created */ + if (base->mode != BASE_MODE_SCANNING) { + for (tmp = program->stream_list; tmp; tmp = tmp->next) + activate_pad_for_stream (demux, (TSDemuxStream *) tmp->data); + gst_element_no_more_pads ((GstElement *) demux); + } /* Inform scanner we have got our program */ demux->current_program_number = program->program_number; + demux->need_newsegment = TRUE; } } -static void -gst_ts_demux_program_stopped (MpegTSBase * base, MpegTSBaseProgram * program) -{ - guint i; - GstTSDemux *demux = GST_TS_DEMUX (base); - TSDemuxStream *localstream = NULL; - - GST_LOG ("program %d stopped", program->program_number); - - if (demux->program == NULL || program != demux->program) - return; - - for (i = 0; i < 0x2000; i++) { - if (demux->program->streams[i]) { - localstream = (TSDemuxStream *) program->streams[i]; - if (localstream->pad) { - GST_DEBUG ("HAVE PAD %s:%s", GST_DEBUG_PAD_NAME (localstream->pad)); - if (gst_pad_is_active (localstream->pad)) - gst_element_remove_pad (GST_ELEMENT_CAST (demux), localstream->pad); - else - gst_object_unref (localstream->pad); - localstream->pad = NULL; - } - } - } - demux->program = NULL; - demux->program_number = -1; -} - static gboolean process_section (MpegTSBase * base) { @@ -1977,15 +1881,10 @@ calc_gsttime_from_pts (TSPcrOffset * start, guint64 pts) return time; } +#if 0 static gint TSPcrOffset_find_offset (gconstpointer a, gconstpointer b, gpointer user_data) { - -/* GST_INFO ("a: %" GST_TIME_FORMAT " offset: %" G_GINT64_FORMAT, */ -/* GST_TIME_ARGS (((TSPcrOffset *) a)->gsttime), ((TSPcrOffset *) a)->offset); */ -/* GST_INFO ("b: %" GST_TIME_FORMAT " offset: %" G_GINT64_FORMAT, */ -/* GST_TIME_ARGS (((TSPcrOffset *) b)->gsttime), ((TSPcrOffset *) b)->offset); */ - if (((TSPcrOffset *) a)->offset < ((TSPcrOffset *) b)->offset) return -1; else if (((TSPcrOffset *) a)->offset > ((TSPcrOffset *) b)->offset) @@ -1993,144 +1892,69 @@ TSPcrOffset_find_offset (gconstpointer a, gconstpointer b, gpointer user_data) else return 0; } +#endif static GstFlowReturn gst_ts_demux_parse_pes_header (GstTSDemux * demux, TSDemuxStream * stream) { + PESHeader header; GstFlowReturn res = GST_FLOW_OK; + gint offset = 0; guint8 *data; guint32 length; - guint32 psc_stid; - guint8 stid; - guint16 pesplength; - guint8 PES_header_data_length = 0; + guint64 bufferoffset; + GstClockTime time; + PESParsingResult parseres; data = GST_BUFFER_DATA (stream->pendingbuffers[0]); length = GST_BUFFER_SIZE (stream->pendingbuffers[0]); + bufferoffset = GST_BUFFER_OFFSET (stream->pendingbuffers[0]); GST_MEMDUMP ("Header buffer", data, MIN (length, 32)); - /* packet_start_code_prefix 24 - * stream_id 8*/ - psc_stid = GST_READ_UINT32_BE (data); - data += 4; - length -= 4; - if (G_UNLIKELY ((psc_stid & 0xffffff00) != 0x00000100)) { - GST_WARNING ("WRONG PACKET START CODE! pid: 0x%x stream_type: 0x%x", + parseres = mpegts_parse_pes_header (data, length, &header, &offset); + if (G_UNLIKELY (parseres == PES_PARSING_NEED_MORE)) + goto discont; + if (G_UNLIKELY (parseres == PES_PARSING_BAD)) { + GST_WARNING ("Error parsing PES header. pid: 0x%x stream_type: 0x%x", stream->stream.pid, stream->stream.stream_type); goto discont; } - stid = psc_stid & 0x000000ff; - GST_LOG ("stream_id:0x%02x", stid); - /* PES_packet_length 16 */ - /* FIXME : store the expected pes length somewhere ? */ - pesplength = GST_READ_UINT16_BE (data); - data += 2; - length -= 2; - GST_LOG ("PES_packet_length:%d", pesplength); + if (header.PTS != -1) { + gst_ts_demux_record_pts (demux, stream, header.PTS, bufferoffset); - /* FIXME : Only parse header on streams which require it (see table 2-21) */ - if (stid != 0xbf) { - guint8 p1, p2; - guint64 pts, dts; - p1 = *data++; - p2 = *data++; - PES_header_data_length = *data++ + 3; - length -= 3; +#if 0 + /* WTH IS THIS ??? */ + if (demux->index_pcr.offset + PCR_WRAP_SIZE_128KBPS + 1000 * 128 < offset + || (demux->index_pcr.offset > offset)) { + /* find next entry */ + TSPcrOffset *next; + demux->index_pcr.offset = offset; + next = gst_util_array_binary_search (demux->index->data, + demux->index_size, sizeof (*next), TSPcrOffset_find_offset, + GST_SEARCH_MODE_BEFORE, &demux->index_pcr, NULL); + if (next) { + GST_INFO ("new index_pcr %" GST_TIME_FORMAT " offset: %" + G_GINT64_FORMAT, GST_TIME_ARGS (next->gsttime), next->offset); - GST_LOG ("0x%02x 0x%02x 0x%02x", p1, p2, PES_header_data_length); - GST_LOG ("PES header data length:%d", PES_header_data_length); - - /* '10' 2 - * PES_scrambling_control 2 - * PES_priority 1 - * data_alignment_indicator 1 - * copyright 1 - * original_or_copy 1 */ - if (G_UNLIKELY ((p1 & 0xc0) != 0x80)) { - GST_WARNING ("p1 >> 6 != 0x2"); - goto discont; - } - - /* PTS_DTS_flags 2 - * ESCR_flag 1 - * ES_rate_flag 1 - * DSM_trick_mode_flag 1 - * additional_copy_info_flag 1 - * PES_CRC_flag 1 - * PES_extension_flag 1*/ - - /* PES_header_data_length 8 */ - if (G_UNLIKELY (length < PES_header_data_length)) { - GST_WARNING ("length < PES_header_data_length"); - goto discont; - } - - /* PTS 32 */ - if ((p2 & 0x80)) { /* PTS */ - GstClockTime time; - guint64 offset = GST_BUFFER_OFFSET (stream->pendingbuffers[0]); - - READ_TS (data, pts, discont); - gst_ts_demux_record_pts (demux, stream, pts, offset); - length -= 4; - - if (demux->index_pcr.offset + PCR_WRAP_SIZE_128KBPS + 1000 * 128 < offset - || (demux->index_pcr.offset > offset)) { - /* find next entry */ - TSPcrOffset *next; - demux->index_pcr.offset = offset; - next = gst_util_array_binary_search (demux->index->data, - demux->index_size, sizeof (*next), TSPcrOffset_find_offset, - GST_SEARCH_MODE_BEFORE, &demux->index_pcr, NULL); - if (next) { - GST_INFO ("new index_pcr %" GST_TIME_FORMAT " offset: %" - G_GINT64_FORMAT, GST_TIME_ARGS (next->gsttime), next->offset); - - demux->index_pcr = *next; - } + demux->index_pcr = *next; } + } + time = calc_gsttime_from_pts (&demux->index_pcr, pts); +#endif - time = calc_gsttime_from_pts (&demux->index_pcr, pts); - - GST_BUFFER_TIMESTAMP (stream->pendingbuffers[0]) = time; - - if (!GST_CLOCK_TIME_IS_VALID (stream->pts)) { - stream->pts = GST_BUFFER_TIMESTAMP (stream->pendingbuffers[0]); - } - - } - /* DTS 32 */ - if ((p2 & 0x40)) { /* DTS */ - READ_TS (data, dts, discont); - gst_ts_demux_record_dts (demux, stream, dts, - GST_BUFFER_OFFSET (stream->pendingbuffers[0])); - length -= 4; - } - /* ESCR 48 */ - if ((p2 & 0x20)) { - GST_LOG ("ESCR present"); - data += 6; - length -= 6; - } - /* ES_rate 24 */ - if ((p2 & 0x10)) { - GST_LOG ("ES_rate present"); - data += 3; - length -= 3; - } - /* DSM_trick_mode 8 */ - if ((p2 & 0x08)) { - GST_LOG ("DSM_trick_mode present"); - data += 1; - length -= 1; - } + stream->pts = time = MPEGTIME_TO_GSTTIME (header.PTS); + GST_BUFFER_TIMESTAMP (stream->pendingbuffers[0]) = time; } + if (header.DTS != -1) + gst_ts_demux_record_dts (demux, stream, header.DTS, bufferoffset); + /* Remove PES headers */ - GST_BUFFER_DATA (stream->pendingbuffers[0]) += 6 + PES_header_data_length; - GST_BUFFER_SIZE (stream->pendingbuffers[0]) -= 6 + PES_header_data_length; + GST_DEBUG ("Moving data forward by %d bytes", header.header_size); + GST_BUFFER_DATA (stream->pendingbuffers[0]) += header.header_size; + GST_BUFFER_SIZE (stream->pendingbuffers[0]) -= header.header_size; /* FIXME : responsible for switching to PENDING_PACKET_BUFFER and * creating the bufferlist */ @@ -2210,17 +2034,93 @@ gst_ts_demux_queue_data (GstTSDemux * demux, TSDemuxStream * stream, return; } +static void +calculate_and_push_newsegment (GstTSDemux * demux, TSDemuxStream * stream) +{ + MpegTSBase *base = (MpegTSBase *) demux; + GstClockTime firstpts = GST_CLOCK_TIME_NONE; + GstEvent *newsegmentevent; + GList *tmp; + gint64 start, stop, position; + + GST_DEBUG ("Creating new newsegment"); + + /* Outgoing newsegment values + * start : The first/start PTS + * stop : The last PTS (or -1) + * position : The stream time corresponding to start + * + * Except for live mode with incoming GST_TIME_FORMAT newsegment where + * it is the same values as that incoming newsegment (and we convert the + * PTS to that remote clock). + */ + + /* Find the earliest current PTS we're going to push */ + for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) { + TSDemuxStream *pstream = (TSDemuxStream *) tmp->data; + if (!GST_CLOCK_TIME_IS_VALID (firstpts) || pstream->pts < firstpts) + firstpts = pstream->pts; + } + + if (base->mode == BASE_MODE_PUSHING) { + /* FIXME : We're just ignore the upstream format for the time being */ + /* FIXME : We should use base->segment.format and a upstream latency query + * to decide if we need to use live values or not */ + GST_DEBUG ("push-based. base Segment start:%" GST_TIME_FORMAT " duration:%" + GST_TIME_FORMAT ", time:%" GST_TIME_FORMAT, + GST_TIME_ARGS (base->segment.start), + GST_TIME_ARGS (base->segment.duration), + GST_TIME_ARGS (base->segment.time)); + GST_DEBUG ("push-based. demux Segment start:%" GST_TIME_FORMAT " duration:%" + GST_TIME_FORMAT ", time:%" GST_TIME_FORMAT, + GST_TIME_ARGS (demux->segment.start), + GST_TIME_ARGS (demux->segment.duration), + GST_TIME_ARGS (demux->segment.time)); + + if (demux->segment.time == 0 && base->segment.format == GST_FORMAT_TIME) + demux->segment.time = base->segment.time; + + start = firstpts; + stop = GST_CLOCK_TIME_NONE; + position = demux->segment.time ? firstpts - demux->segment.time : 0; + demux->segment.time = start; + } else { + /* pull mode */ + GST_DEBUG ("pull-based. Segment start:%" GST_TIME_FORMAT " duration:%" + GST_TIME_FORMAT ", time:%" GST_TIME_FORMAT, + GST_TIME_ARGS (demux->segment.start), + GST_TIME_ARGS (demux->segment.duration), + GST_TIME_ARGS (demux->segment.time)); + + GST_DEBUG ("firstpcr gsttime : %" GST_TIME_FORMAT, + GST_TIME_ARGS (demux->first_pcr.gsttime)); + + /* FIXME : This is not entirely correct. We should be using the PTS time + * realm and not the PCR one. Doesn't matter *too* much if PTS/PCR values + * aren't too far apart, but still. */ + start = demux->first_pcr.gsttime + demux->segment.start; + stop = demux->first_pcr.gsttime + demux->segment.duration; + position = demux->segment.time; + } + + GST_DEBUG ("new segment: start: %" GST_TIME_FORMAT " stop: %" + GST_TIME_FORMAT " time: %" GST_TIME_FORMAT, GST_TIME_ARGS (start), + GST_TIME_ARGS (stop), GST_TIME_ARGS (position)); + newsegmentevent = + gst_event_new_new_segment (0, 1.0, GST_FORMAT_TIME, start, stop, + position); + + push_event ((MpegTSBase *) demux, newsegmentevent); + + demux->need_newsegment = FALSE; +} + static GstFlowReturn gst_ts_demux_push_pending_data (GstTSDemux * demux, TSDemuxStream * stream) { GstFlowReturn res = GST_FLOW_OK; MpegTSBaseStream *bs = (MpegTSBaseStream *) stream; - - guint i; - GstClockTime tinypts = GST_CLOCK_TIME_NONE; - GstEvent *newsegmentevent; - GST_DEBUG ("stream:%p, pid:0x%04x stream_type:%d state:%d pad:%s:%s", stream, bs->pid, bs->stream_type, stream->state, GST_DEBUG_PAD_NAME (stream->pad)); @@ -2235,82 +2135,44 @@ gst_ts_demux_push_pending_data (GstTSDemux * demux, TSDemuxStream * stream) goto beach; } - /* We have a confirmed buffer, let's push it out */ - if (stream->state == PENDING_PACKET_BUFFER) { - GST_LOG ("BUFFER: pushing out pending data"); - stream->currentlist = g_list_reverse (stream->currentlist); - gst_buffer_list_iterator_add_list (stream->currentit, stream->currentlist); + if (G_UNLIKELY (stream->state != PENDING_PACKET_BUFFER)) + goto beach; + + if (G_UNLIKELY (stream->pad == NULL)) { + g_list_foreach (stream->currentlist, (GFunc) gst_buffer_unref, NULL); + g_list_free (stream->currentlist); gst_buffer_list_iterator_free (stream->currentit); - - - if (stream->pad) { - - if (demux->need_newsegment) { - - for (i = 0; i < 0x2000; i++) { - - if (demux->program->streams[i]) { - if ((!GST_CLOCK_TIME_IS_VALID (tinypts)) - || (((TSDemuxStream *) demux->program->streams[i])->pts < - tinypts)) - tinypts = ((TSDemuxStream *) demux->program->streams[i])->pts; - } - } - - GST_DEBUG ("old segment: tinypts: %" GST_TIME_FORMAT " stop: %" - GST_TIME_FORMAT " time: %" GST_TIME_FORMAT, - GST_TIME_ARGS (tinypts), - GST_TIME_ARGS (demux->first_pcr.gsttime + demux->duration), - GST_TIME_ARGS (tinypts - demux->first_pcr.gsttime)); -/* newsegmentevent = */ -/* gst_event_new_new_segment (0, 1.0, GST_FORMAT_TIME, tinypts, */ -/* demux->first_pcr.gsttime + demux->duration, */ -/* tinypts - demux->first_pcr.gsttime); */ - GST_DEBUG ("new segment: start: %" GST_TIME_FORMAT " stop: %" - GST_TIME_FORMAT " time: %" GST_TIME_FORMAT, - GST_TIME_ARGS (demux->first_pcr.gsttime + demux->segment.start), - GST_TIME_ARGS (demux->first_pcr.gsttime + demux->segment.duration), - GST_TIME_ARGS (demux->segment.time)); - newsegmentevent = - gst_event_new_new_segment (0, 1.0, GST_FORMAT_TIME, - demux->first_pcr.gsttime + demux->segment.start, - demux->first_pcr.gsttime + demux->segment.duration, - demux->segment.time); - - push_event ((MpegTSBase *) demux, newsegmentevent); - - demux->need_newsegment = FALSE; - } - - GST_DEBUG_OBJECT (stream->pad, - "Pushing buffer list with timestamp: %" GST_TIME_FORMAT, - GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (gst_buffer_list_get - (stream->current, 0, 0)))); - - res = gst_pad_push_list (stream->pad, stream->current); - GST_DEBUG_OBJECT (stream->pad, "Returned %s", gst_flow_get_name (res)); - /* FIXME : combine flow returns */ - res = tsdemux_combine_flows (demux, stream, res); - GST_DEBUG_OBJECT (stream->pad, "combined %s", gst_flow_get_name (res)); - } else { - gst_buffer_list_unref (stream->current); - } + gst_buffer_list_unref (stream->current); + goto beach; } + if (G_UNLIKELY (demux->need_newsegment)) + calculate_and_push_newsegment (demux, stream); + + /* We have a confirmed buffer, let's push it out */ + GST_LOG ("Putting pending data into GstBufferList"); + stream->currentlist = g_list_reverse (stream->currentlist); + gst_buffer_list_iterator_add_list (stream->currentit, stream->currentlist); + gst_buffer_list_iterator_free (stream->currentit); + + GST_DEBUG_OBJECT (stream->pad, + "Pushing buffer list with timestamp: %" GST_TIME_FORMAT, + GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (gst_buffer_list_get + (stream->current, 0, 0)))); + + res = gst_pad_push_list (stream->pad, stream->current); + GST_DEBUG_OBJECT (stream->pad, "Returned %s", gst_flow_get_name (res)); + res = tsdemux_combine_flows (demux, stream, res); + GST_DEBUG_OBJECT (stream->pad, "combined %s", gst_flow_get_name (res)); + beach: /* Reset everything */ GST_LOG ("Resetting to EMPTY"); stream->state = PENDING_PACKET_EMPTY; - - /* for (i = 0; i < stream->nbpending; i++) */ - /* gst_buffer_unref (stream->pendingbuffers[i]); */ memset (stream->pendingbuffers, 0, TS_MAX_PENDING_BUFFERS); stream->nbpending = 0; - stream->current = NULL; - - return res; } @@ -2383,6 +2245,7 @@ gst_ts_demux_plugin_init (GstPlugin * plugin) { GST_DEBUG_CATEGORY_INIT (ts_demux_debug, "tsdemux", 0, "MPEG transport stream demuxer"); + init_pes_parser (); return gst_element_register (plugin, "tsdemux", GST_RANK_SECONDARY, GST_TYPE_TS_DEMUX); diff --git a/gst/mpegtsdemux/tsdemux.h b/gst/mpegtsdemux/tsdemux.h index 636bc3a67d..395cf3e134 100644 --- a/gst/mpegtsdemux/tsdemux.h +++ b/gst/mpegtsdemux/tsdemux.h @@ -70,6 +70,7 @@ struct _GstTSDemux MpegTSBaseProgram *program; /* Current program */ guint current_program_number; gboolean need_newsegment; + /* Downstream segment */ GstSegment segment; GstClockTime duration; /* Total duration */ diff --git a/gst/mpegtsmux/mpegtsmux.c b/gst/mpegtsmux/mpegtsmux.c index 6ba24ec987..606e1d7f3a 100644 --- a/gst/mpegtsmux/mpegtsmux.c +++ b/gst/mpegtsmux/mpegtsmux.c @@ -642,7 +642,9 @@ mpegtsmux_choose_best_stream (MpegTsMux * mux) } } if (c_best) { - gst_buffer_unref (gst_collect_pads_pop (mux->collect, c_best)); + GstBuffer *buffer; + if ((buffer = gst_collect_pads_pop (mux->collect, c_best))) + gst_buffer_unref (buffer); } return best; diff --git a/gst/mpegvideoparse/mpegpacketiser.c b/gst/mpegvideoparse/mpegpacketiser.c index 694c4761d1..78721daea1 100644 --- a/gst/mpegvideoparse/mpegpacketiser.c +++ b/gst/mpegvideoparse/mpegpacketiser.c @@ -511,8 +511,6 @@ mpeg_util_parse_extension_packet (MPEGSeqHdr * hdr, guint8 * data, guint8 * end) case MPEG_PACKET_EXT_SEQUENCE: { /* Parse a Sequence Extension */ - gboolean low_delay; - guint8 chroma_format; guint8 horiz_size_ext, vert_size_ext; guint8 fps_n_ext, fps_d_ext; @@ -523,10 +521,10 @@ mpeg_util_parse_extension_packet (MPEGSeqHdr * hdr, guint8 * data, guint8 * end) hdr->profile = data[0] & 0x0f; /* profile (0:2) + escape bit (3) */ hdr->level = (data[1] >> 4) & 0x0f; hdr->progressive = data[1] & 0x08; - chroma_format = (data[1] >> 2) & 0x03; + /* chroma_format = (data[1] >> 2) & 0x03; */ horiz_size_ext = ((data[1] << 1) & 0x02) | ((data[2] >> 7) & 0x01); vert_size_ext = (data[2] >> 5) & 0x03; - low_delay = data[5] >> 7; + /* low_delay = data[5] >> 7; */ fps_n_ext = (data[5] >> 5) & 0x03; fps_d_ext = data[5] & 0x1f; @@ -549,7 +547,6 @@ mpeg_util_parse_sequence_hdr (MPEGSeqHdr * hdr, guint8 * data, guint8 * end) guint32 code; guint8 dar_idx, fps_idx; guint32 sync_word = 0xffffffff; - gboolean constrained_flag; gboolean load_intra_flag; gboolean load_non_intra_flag; @@ -584,7 +581,7 @@ mpeg_util_parse_sequence_hdr (MPEGSeqHdr * hdr, guint8 * data, guint8 * end) hdr->bitrate *= 400; } - constrained_flag = (data[7] >> 2) & 0x01; + /* constrained_flag = (data[7] >> 2) & 0x01; */ load_intra_flag = (data[7] >> 1) & 0x01; if (load_intra_flag) { if (G_UNLIKELY ((end - data) < 64)) diff --git a/gst/mxf/mxfdemux.c b/gst/mxf/mxfdemux.c index 9036c4cae7..496ee38f7e 100644 --- a/gst/mxf/mxfdemux.c +++ b/gst/mxf/mxfdemux.c @@ -3358,7 +3358,7 @@ gst_mxf_demux_seek_pull (GstMXFDemux * demux, GstEvent * event) GST_PAD_STREAM_UNLOCK (demux->sinkpad); - return TRUE; + return ret; /* ERRORS */ wrong_format: diff --git a/gst/pcapparse/gstpcapparse.c b/gst/pcapparse/gstpcapparse.c index 2d3436cc60..5963916679 100644 --- a/gst/pcapparse/gstpcapparse.c +++ b/gst/pcapparse/gstpcapparse.c @@ -20,8 +20,8 @@ /** * SECTION:element-pcapparse * - * Extracts payloads from Ethernet-encapsulated IP packets, currently limited - * to UDP. Use #GstPcapParse:src-ip, #GstPcapParse:dst-ip, + * Extracts payloads from Ethernet-encapsulated IP packets. + * Use #GstPcapParse:src-ip, #GstPcapParse:dst-ip, * #GstPcapParse:src-port and #GstPcapParse:dst-port to restrict which packets * should be included. * @@ -64,6 +64,7 @@ enum PROP_SRC_PORT, PROP_DST_PORT, PROP_CAPS, + PROP_TS_OFFSET, PROP_LAST }; @@ -143,6 +144,11 @@ gst_pcap_parse_class_init (GstPcapParseClass * klass) "The caps of the source pad", GST_TYPE_CAPS, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_TS_OFFSET, + g_param_spec_int64 ("ts-offset", "Timestamp Offset", + "Relative timestamp offset (ns) to apply (-1 = use absolute packet time)", + -1, G_MAXINT64, -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + GST_DEBUG_CATEGORY_INIT (gst_pcap_parse_debug, "pcapparse", 0, "pcap parser"); } @@ -165,6 +171,7 @@ gst_pcap_parse_init (GstPcapParse * self, GstPcapParseClass * gclass) self->dst_ip = -1; self->src_port = -1; self->dst_port = -1; + self->offset = -1; self->adapter = gst_adapter_new (); @@ -234,6 +241,10 @@ gst_pcap_parse_get_property (GObject * object, guint prop_id, gst_value_set_caps (value, self->caps); break; + case PROP_TS_OFFSET: + g_value_set_int64 (value, self->offset); + break; + default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; @@ -283,6 +294,11 @@ gst_pcap_parse_set_property (GObject * object, guint prop_id, gst_pad_set_caps (self->src_pad, new_caps); break; } + + case PROP_TS_OFFSET: + self->offset = g_value_get_int64 (value); + break; + default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); break; @@ -297,6 +313,7 @@ gst_pcap_parse_reset (GstPcapParse * self) self->cur_packet_size = -1; self->buffer_offset = 0; self->cur_ts = GST_CLOCK_TIME_NONE; + self->base_ts = GST_CLOCK_TIME_NONE; self->newsegment_sent = FALSE; gst_adapter_clear (self->adapter); @@ -324,6 +341,8 @@ gst_pcap_parse_read_uint32 (GstPcapParse * self, const guint8 * p) #define UDP_HEADER_LEN 8 #define IP_PROTO_UDP 17 +#define IP_PROTO_TCP 6 + static gboolean gst_pcap_parse_scan_frame (GstPcapParse * self, @@ -331,16 +350,16 @@ gst_pcap_parse_scan_frame (GstPcapParse * self, gint buf_size, const guint8 ** payload, gint * payload_size) { const guint8 *buf_ip = 0; - const guint8 *buf_udp; + const guint8 *buf_proto; guint16 eth_type; guint8 b; guint8 ip_header_size; guint8 ip_protocol; guint32 ip_src_addr; guint32 ip_dst_addr; - guint16 udp_src_port; - guint16 udp_dst_port; - guint16 udp_len; + guint16 src_port; + guint16 dst_port; + guint16 len; switch (self->linktype) { case DLT_ETHER: @@ -348,24 +367,22 @@ gst_pcap_parse_scan_frame (GstPcapParse * self, return FALSE; eth_type = GUINT16_FROM_BE (*((guint16 *) (buf + 12))); - if (eth_type != 0x800) - return FALSE; - buf_ip = buf + ETH_HEADER_LEN; break; case DLT_SLL: if (buf_size < SLL_HEADER_LEN + IP_HEADER_MIN_LEN + UDP_HEADER_LEN) return FALSE; - eth_type = GUINT16_FROM_BE (*((guint16 *) (buf + 2))); - - if (eth_type != 1) - return FALSE; - + eth_type = GUINT16_FROM_BE (*((guint16 *) (buf + 14))); buf_ip = buf + SLL_HEADER_LEN; break; + default: + return FALSE; } + if (eth_type != 0x800) + return FALSE; + b = *buf_ip; if (((b >> 4) & 0x0f) != 4) return FALSE; @@ -375,34 +392,53 @@ gst_pcap_parse_scan_frame (GstPcapParse * self, return FALSE; ip_protocol = *(buf_ip + 9); - if (ip_protocol != IP_PROTO_UDP) + GST_LOG_OBJECT (self, "ip proto %d", (gint) ip_protocol); + + if (ip_protocol != IP_PROTO_UDP && ip_protocol != IP_PROTO_TCP) return FALSE; + /* ip info */ ip_src_addr = *((guint32 *) (buf_ip + 12)); + ip_dst_addr = *((guint32 *) (buf_ip + 16)); + buf_proto = buf_ip + ip_header_size; + + /* ok for tcp and udp */ + src_port = GUINT16_FROM_BE (*((guint16 *) (buf_proto + 0))); + dst_port = GUINT16_FROM_BE (*((guint16 *) (buf_proto + 2))); + + /* extract some params and data according to protocol */ + if (ip_protocol == IP_PROTO_UDP) { + len = GUINT16_FROM_BE (*((guint16 *) (buf_proto + 4))); + if (len < UDP_HEADER_LEN || buf_proto + len > buf + buf_size) + return FALSE; + + *payload = buf_proto + UDP_HEADER_LEN; + *payload_size = len - UDP_HEADER_LEN; + } else { + if (buf_proto + 12 >= buf + buf_size) + return FALSE; + len = (buf_proto[12] >> 4) * 4; + if (buf_proto + len > buf + buf_size) + return FALSE; + + /* all remaining data following tcp header is payload */ + *payload = buf_proto + len; + *payload_size = self->cur_packet_size - (buf_proto - buf) - len; + } + + /* but still filter as configured */ if (self->src_ip >= 0 && ip_src_addr != self->src_ip) return FALSE; - ip_dst_addr = *((guint32 *) (buf_ip + 16)); if (self->dst_ip >= 0 && ip_dst_addr != self->dst_ip) return FALSE; - buf_udp = buf_ip + ip_header_size; - - udp_src_port = GUINT16_FROM_BE (*((guint16 *) (buf_udp + 0))); - if (self->src_port >= 0 && udp_src_port != self->src_port) + if (self->src_port >= 0 && src_port != self->src_port) return FALSE; - udp_dst_port = GUINT16_FROM_BE (*((guint16 *) (buf_udp + 2))); - if (self->dst_port >= 0 && udp_dst_port != self->dst_port) + if (self->dst_port >= 0 && dst_port != self->dst_port) return FALSE; - udp_len = GUINT16_FROM_BE (*((guint16 *) (buf_udp + 4))); - if (udp_len < UDP_HEADER_LEN || buf_udp + udp_len > buf + buf_size) - return FALSE; - - *payload = buf_udp + UDP_HEADER_LEN; - *payload_size = udp_len - UDP_HEADER_LEN; - return TRUE; } @@ -431,6 +467,9 @@ gst_pcap_parse_chain (GstPad * pad, GstBuffer * buffer) data = gst_adapter_peek (self->adapter, self->cur_packet_size); + GST_LOG_OBJECT (self, "examining packet size %" G_GINT64_FORMAT, + self->cur_packet_size); + if (gst_pcap_parse_scan_frame (self, data, self->cur_packet_size, &payload_data, &payload_size)) { GstBuffer *out_buf; @@ -440,6 +479,15 @@ gst_pcap_parse_chain (GstPad * pad, GstBuffer * buffer) if (ret == GST_FLOW_OK) { + if (GST_CLOCK_TIME_IS_VALID (self->cur_ts)) { + if (!GST_CLOCK_TIME_IS_VALID (self->base_ts)) + self->base_ts = self->cur_ts; + if (self->offset >= 0) { + self->cur_ts -= self->base_ts; + self->cur_ts += self->offset; + } + } + memcpy (GST_BUFFER_DATA (out_buf), payload_data, payload_size); GST_BUFFER_TIMESTAMP (out_buf) = self->cur_ts; @@ -466,7 +514,6 @@ gst_pcap_parse_chain (GstPad * pad, GstBuffer * buffer) guint32 ts_sec; guint32 ts_usec; guint32 incl_len; - guint32 orig_len; if (avail < 16) break; @@ -476,7 +523,7 @@ gst_pcap_parse_chain (GstPad * pad, GstBuffer * buffer) ts_sec = gst_pcap_parse_read_uint32 (self, data + 0); ts_usec = gst_pcap_parse_read_uint32 (self, data + 4); incl_len = gst_pcap_parse_read_uint32 (self, data + 8); - orig_len = gst_pcap_parse_read_uint32 (self, data + 12); + /* orig_len = gst_pcap_parse_read_uint32 (self, data + 12); */ gst_adapter_flush (self->adapter, 16); @@ -525,6 +572,7 @@ gst_pcap_parse_chain (GstPad * pad, GstBuffer * buffer) goto out; } + GST_DEBUG_OBJECT (self, "linktype %u", linktype); self->linktype = linktype; gst_adapter_flush (self->adapter, 24); diff --git a/gst/pcapparse/gstpcapparse.h b/gst/pcapparse/gstpcapparse.h index 35742394ef..49e432c776 100644 --- a/gst/pcapparse/gstpcapparse.h +++ b/gst/pcapparse/gstpcapparse.h @@ -71,6 +71,7 @@ struct _GstPcapParse gint32 src_port; gint32 dst_port; GstCaps *caps; + gint64 offset; /* state */ GstAdapter * adapter; @@ -78,6 +79,7 @@ struct _GstPcapParse gboolean swap_endian; gint64 cur_packet_size; GstClockTime cur_ts; + GstClockTime base_ts; GstPcapParseLinktype linktype; gboolean newsegment_sent; diff --git a/gst/rtpvp8/gstrtpvp8pay.c b/gst/rtpvp8/gstrtpvp8pay.c index b54ec28985..c6c773dcaf 100644 --- a/gst/rtpvp8/gstrtpvp8pay.c +++ b/gst/rtpvp8/gstrtpvp8pay.c @@ -141,6 +141,11 @@ gst_rtp_vp8_pay_parse_frame (GstRtpVP8Pay * self, GstBuffer * buffer) self->is_keyframe = keyframe = ((data[0] & 0x1) == 0); version = (data[0] >> 1) & 0x7; + if (G_UNLIKELY (version > 3)) { + GST_ERROR_OBJECT (self, "Unknown VP8 version %u", version); + goto error; + } + /* keyframe, version and show_frame use 5 bits */ header_size = data[2] << 11 | data[1] << 3 | (data[0] >> 5); diff --git a/gst/scaletempo/gstscaletempo.c b/gst/scaletempo/gstscaletempo.c index ba82a85823..a057321f41 100644 --- a/gst/scaletempo/gstscaletempo.c +++ b/gst/scaletempo/gstscaletempo.c @@ -21,41 +21,39 @@ /** * SECTION:element-scaletempo * - * - * * Scale tempo while maintaining pitch * (WSOLA-like technique with cross correlation) * Inspired by SoundTouch library by Olli Parviainen - * - * + * * Use Sceletempo to apply playback rates without the chipmunk effect. - * + * + * * Example pipelines * - * + * |[ * filesrc location=media.ext ! decodebin name=d \ * d. ! queue ! audioconvert ! audioresample ! scaletempo ! audioconvert ! audioresample ! autoaudiosink \ * d. ! queue ! ffmpegcolorspace ! autovideosink - * + * ]| * OR - * + * |[ * playbin uri=... audio_sink="scaletempo ! audioconvert ! audioresample ! autoaudiosink" - * + * ]| * When an application sends a seek event with rate != 1.0, Scaletempo applies * the rate change by scaling the tempo without scaling the pitch. - * - * - * Scaletempo works by producing audio in constant sized chunks (a "stride") but - * consuming chunks proportional to the playback rate. - * - * + * + * Scaletempo works by producing audio in constant sized chunks + * (#GstScaletempo:stride) but consuming chunks proportional to the playback + * rate. + * * Scaletempo then smooths the output by blending the end of one stride with - * the next ("overlap"). - * - * + * the next (#GstScaletempo:overlap). + * * Scaletempo smooths the overlap further by searching within the input buffer - * for the best overlap position. Scaletempo uses a statistical cross correlation - * (roughly a dot-product). Scaletempo consumes most of its CPU cycles here. + * for the best overlap position. Scaletempo uses a statistical cross + * correlation (roughly a dot-product). Scaletempo consumes most of its CPU + * cycles here. One can use the #GstScaletempo:search propery to tune how far + * the algoritm looks. * * */ diff --git a/gst/sdi/gstsdidemux.c b/gst/sdi/gstsdidemux.c index 1636b5a94f..36b54f0ea5 100644 --- a/gst/sdi/gstsdidemux.c +++ b/gst/sdi/gstsdidemux.c @@ -296,7 +296,6 @@ copy_line (GstSdiDemux * sdidemux, guint8 * line) if (sdidemux->line == format->lines) { ret = gst_pad_push (sdidemux->srcpad, sdidemux->output_buffer); gst_sdi_demux_get_output_buffer (sdidemux); - output_data = GST_BUFFER_DATA (sdidemux->output_buffer); sdidemux->line = 0; } @@ -318,7 +317,6 @@ gst_sdi_demux_chain (GstPad * pad, GstBuffer * buffer) int offset = 0; guint8 *data = GST_BUFFER_DATA (buffer); int size = GST_BUFFER_SIZE (buffer); - guint8 *output_data; GstFlowReturn ret = GST_FLOW_OK; GstSdiFormat *format; @@ -366,8 +364,6 @@ gst_sdi_demux_chain (GstPad * pad, GstBuffer * buffer) if (sdidemux->output_buffer == NULL) { gst_sdi_demux_get_output_buffer (sdidemux); } - output_data = GST_BUFFER_DATA (sdidemux->output_buffer); - #if 0 if (sdidemux->offset) { int n; @@ -488,7 +484,7 @@ out: static gboolean gst_sdi_demux_sink_event (GstPad * pad, GstEvent * event) { - gboolean res; + gboolean res = TRUE; GstSdiDemux *sdidemux; sdidemux = GST_SDI_DEMUX (gst_pad_get_parent (pad)); @@ -514,7 +510,7 @@ gst_sdi_demux_sink_event (GstPad * pad, GstEvent * event) } gst_object_unref (sdidemux); - return TRUE; + return res; } static gboolean @@ -537,5 +533,5 @@ gst_sdi_demux_src_event (GstPad * pad, GstEvent * event) } gst_object_unref (sdidemux); - return TRUE; + return res; } diff --git a/gst/sdi/gstsdimux.c b/gst/sdi/gstsdimux.c index 779add42a1..d7d490bd3a 100644 --- a/gst/sdi/gstsdimux.c +++ b/gst/sdi/gstsdimux.c @@ -285,7 +285,7 @@ gst_sdi_mux_sink_event (GstPad * pad, GstEvent * event) } gst_object_unref (sdimux); - return TRUE; + return res; } static gboolean @@ -308,5 +308,5 @@ gst_sdi_mux_src_event (GstPad * pad, GstEvent * event) } gst_object_unref (sdimux); - return TRUE; + return res; } diff --git a/gst/tta/gstttadec.c b/gst/tta/gstttadec.c index e2c1f5a37a..da7693f048 100644 --- a/gst/tta/gstttadec.c +++ b/gst/tta/gstttadec.c @@ -89,7 +89,7 @@ static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", "depth = (int) { 8, 16, 24 }, " "channels = (int) { 1, 2 }, " "rate = (int) [ 8000, 96000 ], " - "endianness = (int) LITTLE_ENDIAN, " "signed = (boolean) true") + "endianness = (int) BYTE_ORDER, " "signed = (boolean) true") ); static void gst_tta_dec_class_init (GstTtaDecClass * klass); @@ -106,7 +106,6 @@ gst_tta_dec_setcaps (GstPad * pad, GstCaps * caps) GstTtaDec *ttadec = GST_TTA_DEC (gst_pad_get_parent (pad)); GstStructure *structure = gst_caps_get_structure (caps, 0); GstCaps *srccaps; - guint64 outsize; gint bits, channels; gint32 samplerate; @@ -125,7 +124,7 @@ gst_tta_dec_setcaps (GstPad * pad, GstCaps * caps) "channels", G_TYPE_INT, ttadec->channels, "depth", G_TYPE_INT, bits, "width", G_TYPE_INT, bits, - "endianness", G_TYPE_INT, G_LITTLE_ENDIAN, + "endianness", G_TYPE_INT, G_BYTE_ORDER, "signed", G_TYPE_BOOLEAN, TRUE, NULL); if (!gst_pad_set_caps (ttadec->srcpad, srccaps)) @@ -136,8 +135,6 @@ gst_tta_dec_setcaps (GstPad * pad, GstCaps * caps) ttadec->tta = g_malloc (ttadec->channels * sizeof (decoder)); ttadec->cache = g_malloc (ttadec->channels * sizeof (long)); - outsize = ttadec->channels * ttadec->frame_length * ttadec->bytes; - ttadec->decdata = (guchar *) g_malloc (ttadec->channels * ttadec->frame_length * ttadec->bytes * sizeof (guchar)); @@ -445,5 +442,5 @@ gboolean gst_tta_dec_plugin_init (GstPlugin * plugin) { return gst_element_register (plugin, "ttadec", - GST_RANK_PRIMARY, GST_TYPE_TTA_DEC); + GST_RANK_NONE, GST_TYPE_TTA_DEC); } diff --git a/gst/tta/gstttaparse.c b/gst/tta/gstttaparse.c index fc2b02b308..069408ab60 100644 --- a/gst/tta/gstttaparse.c +++ b/gst/tta/gstttaparse.c @@ -495,7 +495,7 @@ gboolean gst_tta_parse_plugin_init (GstPlugin * plugin) { if (!gst_element_register (plugin, "ttaparse", - GST_RANK_PRIMARY, GST_TYPE_TTA_PARSE)) { + GST_RANK_NONE, GST_TYPE_TTA_PARSE)) { return FALSE; } diff --git a/gst/videomeasure/gstvideomeasure_ssim.c b/gst/videomeasure/gstvideomeasure_ssim.c index 19d03d4ded..499fcf2ac0 100644 --- a/gst/videomeasure/gstvideomeasure_ssim.c +++ b/gst/videomeasure/gstvideomeasure_ssim.c @@ -215,8 +215,6 @@ calculate_mu (GstSSim * ssim, gfloat * outmu, guint8 * buf) gint winstart_x; gint wghstart_x; gint winend_x; - gint winlen_x; - gint winstride_x; gfloat weight; gint source_offset; @@ -228,8 +226,6 @@ calculate_mu (GstSSim * ssim, gfloat * outmu, guint8 * buf) winstart_y = ssim->windows[source_offset].y_window_start; wghstart_y = ssim->windows[source_offset].y_weight_start; winend_y = ssim->windows[source_offset].y_window_end; - winlen_x = winend_x - winstart_x + 1; - winstride_x = sizeof (gfloat) * winlen_x; elsumm = ssim->windows[source_offset].element_summ; switch (ssim->windowtype) { @@ -389,8 +385,6 @@ calcssim_canonical (GstSSim * ssim, guint8 * org, gfloat * orgmu, guint8 * mod, gint winstart_x; gint wghstart_x; gint winend_x; - gint winlen_x; - gint winstride_x; gfloat weight; gint source_offset; @@ -402,8 +396,6 @@ calcssim_canonical (GstSSim * ssim, guint8 * org, gfloat * orgmu, guint8 * mod, winstart_y = ssim->windows[source_offset].y_window_start; wghstart_y = ssim->windows[source_offset].y_weight_start; winend_y = ssim->windows[source_offset].y_window_end; - winlen_x = winend_x - winstart_x + 1; - winstride_x = sizeof (gfloat) * winlen_x; elsumm = ssim->windows[source_offset].element_summ; switch (ssim->windowtype) { @@ -1191,7 +1183,7 @@ gst_ssim_request_new_pad (GstElement * element, GstPadTemplate * templ, if (num >= 0) { GstSSimOutputContext *c; - GObject *asobject; + template = gst_static_pad_template_get (&gst_ssim_src_template); name = g_strdup_printf ("src%d", num); newsrc = gst_pad_new_from_template (template, name); @@ -1209,7 +1201,6 @@ gst_ssim_request_new_pad (GstElement * element, GstPadTemplate * templ, c = g_new (GstSSimOutputContext, 1); c->pad = newsrc; - asobject = G_OBJECT (newsrc); g_object_set_data (G_OBJECT (newpad), "ssim-match-output-context", c); g_ptr_array_add (ssim->src, (gpointer) c); } @@ -1427,7 +1418,6 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data) gpointer outdata = NULL; guint outsize = 0; gfloat mssim = 0, lowest = 1, highest = -1; - gboolean empty = TRUE; gboolean ready = TRUE; gint padnumber = 0; @@ -1504,7 +1494,6 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data) GstCollectData *collect_data; GstBuffer *inbuf; guint8 *indata; - guint insize; collect_data = (GstCollectData *) collected->data; @@ -1512,7 +1501,6 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data) inbuf = gst_collect_pads_pop (pads, collect_data); indata = GST_BUFFER_DATA (inbuf); - insize = GST_BUFFER_SIZE (inbuf); GST_DEBUG_OBJECT (ssim, "Modified stream - flags(0x%x), timestamp(%" GST_TIME_FORMAT "), duration(%" GST_TIME_FORMAT ")", @@ -1609,8 +1597,6 @@ gst_ssim_collected (GstCollectPads * pads, gpointer user_data) GST_BUFFER_TIMESTAMP (inbuf), "SSIM", &vmean, &vlowest, &vhighest); gst_pad_push_event (c->pad, measured); - empty = FALSE; - /* send it out */ GST_DEBUG_OBJECT (ssim, "pushing outbuf, timestamp %" GST_TIME_FORMAT ", size %d", GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)), diff --git a/gst/videoparsers/gstdiracparse.c b/gst/videoparsers/gstdiracparse.c index fbd157abb5..3feaa41926 100644 --- a/gst/videoparsers/gstdiracparse.c +++ b/gst/videoparsers/gstdiracparse.c @@ -36,7 +36,9 @@ #include #include +#include #include "gstdiracparse.h" +#include "dirac_parse.h" /* prototypes */ @@ -190,6 +192,8 @@ gst_dirac_parse_finalize (GObject * object) static gboolean gst_dirac_parse_start (GstBaseParse * parse) { + gst_base_parse_set_min_frame_size (parse, 13); + return TRUE; } @@ -206,102 +210,133 @@ gst_dirac_parse_set_sink_caps (GstBaseParse * parse, GstCaps * caps) return TRUE; } -static gboolean -gst_dirac_parse_frame_header (GstDiracParse * diracparse, - GstBuffer * buffer, guint * framesize) -{ - int next_header; - - next_header = GST_READ_UINT32_BE (GST_BUFFER_DATA (buffer) + 5); - - *framesize = next_header; - return TRUE; -} - static gboolean gst_dirac_parse_check_valid_frame (GstBaseParse * parse, GstBaseParseFrame * frame, guint * framesize, gint * skipsize) { GstByteReader reader = GST_BYTE_READER_INIT_FROM_BUFFER (frame->buffer); - GstDiracParse *diracparse = GST_DIRAC_PARSE (parse); int off; guint32 next_header; - gboolean lost_sync; - gboolean draining; + guint8 *data; + int size; + gboolean have_picture = FALSE; + int offset; - if (G_UNLIKELY (GST_BUFFER_SIZE (frame->buffer) < 13)) + data = GST_BUFFER_DATA (frame->buffer); + size = GST_BUFFER_SIZE (frame->buffer); + + if (G_UNLIKELY (size < 13)) return FALSE; - off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, - 0x42424344, 0, GST_BUFFER_SIZE (frame->buffer)); + GST_DEBUG ("%d: %02x %02x %02x %02x", size, data[0], data[1], data[2], + data[3]); - if (off < 0) { - *skipsize = GST_BUFFER_SIZE (frame->buffer) - 3; - return FALSE; - } + if (GST_READ_UINT32_BE (data) != 0x42424344) { + off = gst_byte_reader_masked_scan_uint32 (&reader, 0xffffffff, + 0x42424344, 0, GST_BUFFER_SIZE (frame->buffer)); - GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off); + if (off < 0) { + *skipsize = GST_BUFFER_SIZE (frame->buffer) - 3; + return FALSE; + } - if (off > 0) { - GST_ERROR ("skipping %d", off); + GST_LOG_OBJECT (parse, "possible sync at buffer offset %d", off); + + GST_DEBUG ("skipping %d", off); *skipsize = off; return FALSE; } - if (!gst_dirac_parse_frame_header (diracparse, frame->buffer, framesize)) { - GST_ERROR ("bad header"); - *skipsize = 3; - return FALSE; - } + /* have sync, parse chunks */ - GST_LOG ("framesize %d", *framesize); + offset = 0; + while (!have_picture) { + GST_DEBUG ("offset %d:", offset); - lost_sync = GST_BASE_PARSE_LOST_SYNC (frame); - draining = GST_BASE_PARSE_DRAINING (frame); - - if (lost_sync && !draining) { - guint32 next_sync_word = 0; - - next_header = GST_READ_UINT32_BE (GST_BUFFER_DATA (frame->buffer) + 5); - GST_LOG ("next header %d", next_header); - - if (!gst_byte_reader_skip (&reader, next_header) || - !gst_byte_reader_get_uint32_be (&reader, &next_sync_word)) { - gst_base_parse_set_min_frame_size (parse, next_header + 4); - *skipsize = 0; + if (offset + 13 >= size) { + *framesize = offset + 13; return FALSE; - } else { - if (next_sync_word != 0x42424344) { - *skipsize = 3; - return FALSE; - } else { - gst_base_parse_set_min_frame_size (parse, next_header); + } - } + GST_DEBUG ("chunk type %02x", data[offset + 4]); + + if (GST_READ_UINT32_BE (data + offset) != 0x42424344) { + GST_DEBUG ("bad header"); + *skipsize = 3; + return FALSE; + } + + next_header = GST_READ_UINT32_BE (data + offset + 5); + GST_DEBUG ("next_header %d", next_header); + if (next_header == 0) + next_header = 13; + + if (SCHRO_PARSE_CODE_IS_PICTURE (data[offset + 4])) { + have_picture = TRUE; + } + + offset += next_header; + if (offset >= size) { + *framesize = offset; + return FALSE; } } + *framesize = offset; + GST_DEBUG ("framesize %d", *framesize); + return TRUE; } static GstFlowReturn gst_dirac_parse_parse_frame (GstBaseParse * parse, GstBaseParseFrame * frame) { - //GstDiracParse * diracparse = GST_DIRAC_PARSE (parse); + GstDiracParse *diracparse = GST_DIRAC_PARSE (parse); + guint8 *data; + int size; /* Called when processing incoming buffers. Function should parse a checked frame. */ /* MUST implement */ - if (GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (parse)) == NULL) { - GstCaps *caps = gst_caps_new_simple ("video/x-dirac", NULL); + data = GST_BUFFER_DATA (frame->buffer); + size = GST_BUFFER_SIZE (frame->buffer); - gst_buffer_set_caps (frame->buffer, caps); - gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); - gst_caps_unref (caps); + //GST_ERROR("got here %d", size); + if (data[4] == SCHRO_PARSE_CODE_SEQUENCE_HEADER) { + GstCaps *caps; + DiracSequenceHeader sequence_header; + int ret; + + ret = dirac_sequence_header_parse (&sequence_header, data + 13, size - 13); + if (ret) { + memcpy (&diracparse->sequence_header, &sequence_header, + sizeof (sequence_header)); + caps = gst_caps_new_simple ("video/x-dirac", + "width", G_TYPE_INT, sequence_header.width, + "height", G_TYPE_INT, sequence_header.height, + "framerate", GST_TYPE_FRACTION, + sequence_header.frame_rate_numerator, + sequence_header.frame_rate_denominator, + "pixel-aspect-ratio", GST_TYPE_FRACTION, + sequence_header.aspect_ratio_numerator, + sequence_header.aspect_ratio_denominator, + "interlaced", G_TYPE_BOOLEAN, sequence_header.interlaced, + "profile", G_TYPE_INT, sequence_header.profile, + "level", G_TYPE_INT, sequence_header.level, NULL); + gst_pad_set_caps (GST_BASE_PARSE_SRC_PAD (parse), caps); + gst_caps_unref (caps); + + gst_base_parse_set_frame_rate (parse, + sequence_header.frame_rate_numerator, + sequence_header.frame_rate_denominator, 0, 0); + } } + gst_buffer_set_caps (frame->buffer, + GST_PAD_CAPS (GST_BASE_PARSE_SRC_PAD (parse))); + gst_base_parse_set_min_frame_size (parse, 13); return GST_FLOW_OK; diff --git a/gst/videoparsers/gstdiracparse.h b/gst/videoparsers/gstdiracparse.h index abf3821f15..7691c5790a 100644 --- a/gst/videoparsers/gstdiracparse.h +++ b/gst/videoparsers/gstdiracparse.h @@ -22,6 +22,7 @@ #include #include +#include "dirac_parse.h" G_BEGIN_DECLS @@ -38,6 +39,9 @@ struct _GstDiracParse { GstBaseParse base_diracparse; + DiracSequenceHeader sequence_header; + + guint32 frame_number; }; struct _GstDiracParseClass diff --git a/gst/videoparsers/gsth264parse.c b/gst/videoparsers/gsth264parse.c index d5b539e427..089b5a7878 100644 --- a/gst/videoparsers/gsth264parse.c +++ b/gst/videoparsers/gsth264parse.c @@ -507,6 +507,8 @@ gst_h264_parse_check_valid_frame (GstBaseParse * parse, if (sc_pos == -1) { /* SC not found, need more data */ sc_pos = GST_BUFFER_SIZE (buffer) - 3; + /* avoid going < 0 later on */ + nal_pos = next_sc_pos = sc_pos; goto more; } @@ -950,8 +952,12 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps) gst_structure_get_fraction (str, "framerate", &h264parse->fps_num, &h264parse->fps_den); + /* get upstream format and align from caps */ + gst_h264_parse_format_from_caps (caps, &format, &align); + /* packetized video has a codec_data */ - if ((value = gst_structure_get_value (str, "codec_data"))) { + if (format != GST_H264_PARSE_FORMAT_BYTE && + (value = gst_structure_get_value (str, "codec_data"))) { guint8 *data; guint num_sps, num_pps, profile, len; gint i; @@ -1011,41 +1017,41 @@ gst_h264_parse_set_caps (GstBaseParse * parse, GstCaps * caps) } h264parse->codec_data = gst_buffer_ref (codec_data); + + /* if upstream sets codec_data without setting stream-format and alignment, we + * assume stream-format=avc,alignment=au */ + if (format == GST_H264_PARSE_FORMAT_NONE) { + format = GST_H264_PARSE_FORMAT_AVC; + align = GST_H264_PARSE_ALIGN_AU; + } } else { GST_DEBUG_OBJECT (h264parse, "have bytestream h264"); /* nothing to pre-process */ h264parse->packetized = FALSE; /* we have 4 sync bytes */ h264parse->nal_length_size = 4; + + if (format == GST_H264_PARSE_FORMAT_NONE) { + format = GST_H264_PARSE_FORMAT_BYTE; + align = GST_H264_PARSE_ALIGN_AU; + } } /* negotiate with downstream, sets ->format and ->align */ gst_h264_parse_negotiate (h264parse); - /* get upstream format and align from caps */ - gst_h264_parse_format_from_caps (caps, &format, &align); - - /* if upstream sets codec_data without setting stream-format and alignment, we - * assume stream-format=avc,alignment=au */ - if (format == GST_H264_PARSE_FORMAT_NONE) { - if (codec_data == NULL) - goto unknown_input_format; - - format = GST_H264_PARSE_FORMAT_AVC; - align = GST_H264_PARSE_ALIGN_AU; - } - if (format == h264parse->format && align == h264parse->align) { gst_base_parse_set_passthrough (parse, TRUE); /* we did parse codec-data and might supplement src caps */ gst_h264_parse_update_src_caps (h264parse, caps); - } else if (format == GST_H264_PARSE_FORMAT_AVC && - h264parse->format == GST_H264_PARSE_FORMAT_BYTE) { + } else if (format == GST_H264_PARSE_FORMAT_AVC) { + /* if input != output, and input is avc, must split before anything else */ /* arrange to insert codec-data in-stream if needed. * src caps are only arranged for later on */ h264parse->push_codec = TRUE; h264parse->split_packetized = TRUE; + h264parse->packetized = TRUE; } return TRUE; @@ -1066,11 +1072,6 @@ wrong_type: GST_DEBUG_OBJECT (h264parse, "wrong codec-data type"); goto refuse_caps; } -unknown_input_format: - { - GST_DEBUG_OBJECT (h264parse, "unknown stream-format and no codec_data"); - goto refuse_caps; - } refuse_caps: { GST_WARNING_OBJECT (h264parse, "refused caps %" GST_PTR_FORMAT, caps); diff --git a/gst/videoparsers/h263parse.c b/gst/videoparsers/h263parse.c index 09cbbd4f36..14911e79f0 100644 --- a/gst/videoparsers/h263parse.c +++ b/gst/videoparsers/h263parse.c @@ -21,7 +21,7 @@ */ #ifdef HAVE_CONFIG_H -# include "config.h" +#include "config.h" #endif #include @@ -455,7 +455,7 @@ beach: gint gst_h263_parse_get_profile (const H263Params * params) { - gboolean c, d, d1, d21, d22, e, f, f2, g, h, i, j, k, k0, k1, k2, l, m, n, o, + gboolean c, d, d1, d21, e, f, f2, g, h, i, j, k, k0, k1, l, m, n, o, p, q, r, s, t, u, v, w; /* FIXME: some parts of Annex C can be discovered, others can not */ @@ -464,7 +464,7 @@ gst_h263_parse_get_profile (const H263Params * params) /* d1: Annex D.1; d21: Annex D.2 with UUI=1; d22: Annex D.2 with UUI=01 */ d1 = (d && params->uui == UUI_ABSENT); d21 = (d && params->uui == UUI_IS_1); - d22 = (d && params->uui == UUI_IS_01); + /* d22 = (d && params->uui == UUI_IS_01); */ e = (params->features & H263_OPTION_SAC_MODE) != 0; /* f:Annex F.2 or F.3 may be used; f2: only Annex F.2 is used (we have no * way of detecting this right now */ @@ -478,7 +478,7 @@ gst_h263_parse_get_profile (const H263Params * params) /* k0: Annex K without submodes; k1: Annex K with ASO; k2: Annex K with RS */ k0 = (k && params->sss == 0x0); k1 = (k && params->sss == 0x2); - k2 = (k && params->sss == 0x1); + /* k2 = (k && params->sss == 0x1); */ l = FALSE; m = (params->type == PICTURE_IMPROVED_PB); n = (params->features & H263_OPTION_RPS_MODE) != 0; diff --git a/gst/y4m/gsty4mdec.c b/gst/y4m/gsty4mdec.c index 8065bd3d51..e7c3caf42a 100644 --- a/gst/y4m/gsty4mdec.c +++ b/gst/y4m/gsty4mdec.c @@ -530,7 +530,7 @@ gst_y4m_dec_chain (GstPad * pad, GstBuffer * buffer) GST_BUFFER_TIMESTAMP (buffer) = gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index); GST_BUFFER_DURATION (buffer) = - gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index) - + gst_y4m_dec_frames_to_timestamp (y4mdec, y4mdec->frame_index + 1) - GST_BUFFER_TIMESTAMP (buffer); if (y4mdec->interlaced && y4mdec->tff) { GST_BUFFER_FLAG_SET (buffer, GST_VIDEO_BUFFER_TFF); diff --git a/sys/Makefile.am b/sys/Makefile.am index 4c7486e44a..b7a99126d9 100644 --- a/sys/Makefile.am +++ b/sys/Makefile.am @@ -113,7 +113,7 @@ else AVC_DIR= endif -SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(SHM_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) +SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(SHM_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) DIST_SUBDIRS = acmenc acmmp3dec applemedia avc d3dvideosink decklink directdraw directsound dvb linsys fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \ osxvideo qtwrapper shm vcd vdpau wasapi wininet winks winscreencap diff --git a/sys/d3dvideosink/Makefile.am b/sys/d3dvideosink/Makefile.am index 6e0ecf316c..183e460cf6 100644 --- a/sys/d3dvideosink/Makefile.am +++ b/sys/d3dvideosink/Makefile.am @@ -6,8 +6,9 @@ libgstd3dvideosink_la_SOURCES = d3dvideosink.c directx/d3d.c directx/dx.c \ directx/directx11/dx11.c directx/directx11/dx11_d3d.c libgstd3dvideosink_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) $(GST_CFLAGS) libgstd3dvideosink_la_LIBADD = $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) \ - -lgstvideo-$(GST_MAJORMINOR) -lgstinterfaces-$(GST_MAJORMINOR) -libgstd3dvideosink_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) -lgdi32 + -lgstvideo-$(GST_MAJORMINOR) -lgstinterfaces-$(GST_MAJORMINOR) \ + $(DIRECT3D_LIBS) +libgstd3dvideosink_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS) libgstd3dvideosink_la_LIBTOOLFLAGS = --tag=disable-static noinst_HEADERS = d3dvideosink.h directx/d3d.h directx/dx.h directx/directx.h \ diff --git a/sys/decklink/DeckLinkAPI.h b/sys/decklink/DeckLinkAPI.h index 19eb07e9cd..e9c1066e35 100644 --- a/sys/decklink/DeckLinkAPI.h +++ b/sys/decklink/DeckLinkAPI.h @@ -1,5 +1,5 @@ /* -LICENSE-START- -** Copyright (c) 2009 Blackmagic Design +** Copyright (c) 2011 Blackmagic Design ** ** Permission is hereby granted, free of charge, to any person or organization ** obtaining a copy of the software and accompanying documentation covered by @@ -70,7 +70,7 @@ typedef uint32_t BMDTimecodeUserBits; #define IID_IDeckLinkKeyer /* 89AFCAF5-65F8-421E-98F7-96FE5F5BFBA3 */ (REFIID){0x89,0xAF,0xCA,0xF5,0x65,0xF8,0x42,0x1E,0x98,0xF7,0x96,0xFE,0x5F,0x5B,0xFB,0xA3} #define IID_IDeckLinkVideoConversion /* 3BBCB8A2-DA2C-42D9-B5D8-88083644E99A */ (REFIID){0x3B,0xBC,0xB8,0xA2,0xDA,0x2C,0x42,0xD9,0xB5,0xD8,0x88,0x08,0x36,0x44,0xE9,0x9A} #define IID_IDeckLinkDeckControlStatusCallback /* E5F693C1-4283-4716-B18F-C1431521955B */ (REFIID){0xE5,0xF6,0x93,0xC1,0x42,0x83,0x47,0x16,0xB1,0x8F,0xC1,0x43,0x15,0x21,0x95,0x5B} -#define IID_IDeckLinkDeckControl /* A4D81043-0619-42B7-8ED6-602D29041DF7 */ (REFIID){0xA4,0xD8,0x10,0x43,0x06,0x19,0x42,0xB7,0x8E,0xD6,0x60,0x2D,0x29,0x04,0x1D,0xF7} +#define IID_IDeckLinkDeckControl /* 522A9E39-0F3C-4742-94EE-D80DE335DA1D */ (REFIID){0x52,0x2A,0x9E,0x39,0x0F,0x3C,0x47,0x42,0x94,0xEE,0xD8,0x0D,0xE3,0x35,0xDA,0x1D} /* Enum BMDDisplayMode - Video display modes */ @@ -264,7 +264,9 @@ enum _BMDDisplayModeSupport { typedef uint32_t BMDTimecodeFormat; enum _BMDTimecodeFormat { bmdTimecodeRP188 = /* 'rp18' */ 0x72703138, + bmdTimecodeRP188Field2 = /* 'rp12' */ 0x72703132, bmdTimecodeVITC = /* 'vitc' */ 0x76697463, + bmdTimecodeVITCField2 = /* 'vit2' */ 0x76697432, bmdTimecodeSerial = /* 'seri' */ 0x73657269 }; @@ -367,11 +369,24 @@ enum _BMDVideo3DPackingFormat { }; +/* Enum BMDIdleVideoOutputOperation - Video output operation when not playing video */ + +typedef uint32_t BMDIdleVideoOutputOperation; +enum _BMDIdleVideoOutputOperation { + bmdIdleVideoOutputBlack = /* 'blac' */ 0x626C6163, + bmdIdleVideoOutputLastFrame = /* 'lafa' */ 0x6C616661 +}; + + /* Enum BMDDeckLinkConfigurationID - DeckLink Configuration ID */ typedef uint32_t BMDDeckLinkConfigurationID; enum _BMDDeckLinkConfigurationID { + /* Serial port Flags */ + + bmdDeckLinkConfigSwapSerialRxTx = /* 'ssrt' */ 0x73737274, + /* Video Input/Output Flags */ bmdDeckLinkConfigUse1080pNotPsF = /* 'fpro' */ 0x6670726F, @@ -379,6 +394,7 @@ enum _BMDDeckLinkConfigurationID { /* Video Input/Output Integers */ bmdDeckLinkConfigHDMI3DPackingFormat = /* '3dpf' */ 0x33647066, + bmdDeckLinkConfigBypass = /* 'byps' */ 0x62797073, /* Audio Input/Output Flags */ @@ -399,6 +415,17 @@ enum _BMDDeckLinkConfigurationID { bmdDeckLinkConfigVideoOutputConversionMode = /* 'vocm' */ 0x766F636D, bmdDeckLinkConfigAnalogVideoOutputFlags = /* 'avof' */ 0x61766F66, bmdDeckLinkConfigReferenceInputTimingOffset = /* 'glot' */ 0x676C6F74, + bmdDeckLinkConfigVideoOutputIdleOperation = /* 'voio' */ 0x766F696F, + + /* Video Output Floats */ + + bmdDeckLinkConfigVideoOutputComponentLumaGain = /* 'oclg' */ 0x6F636C67, + bmdDeckLinkConfigVideoOutputComponentChromaBlueGain = /* 'occb' */ 0x6F636362, + bmdDeckLinkConfigVideoOutputComponentChromaRedGain = /* 'occr' */ 0x6F636372, + bmdDeckLinkConfigVideoOutputCompositeLumaGain = /* 'oilg' */ 0x6F696C67, + bmdDeckLinkConfigVideoOutputCompositeChromaGain = /* 'oicg' */ 0x6F696367, + bmdDeckLinkConfigVideoOutputSVideoLumaGain = /* 'oslg' */ 0x6F736C67, + bmdDeckLinkConfigVideoOutputSVideoChromaGain = /* 'oscg' */ 0x6F736367, /* Video Input Integers */ @@ -410,6 +437,16 @@ enum _BMDDeckLinkConfigurationID { bmdDeckLinkConfigVANCSourceLine2Mapping = /* 'vsl2' */ 0x76736C32, bmdDeckLinkConfigVANCSourceLine3Mapping = /* 'vsl3' */ 0x76736C33, + /* Video Input Floats */ + + bmdDeckLinkConfigVideoInputComponentLumaGain = /* 'iclg' */ 0x69636C67, + bmdDeckLinkConfigVideoInputComponentChromaBlueGain = /* 'iccb' */ 0x69636362, + bmdDeckLinkConfigVideoInputComponentChromaRedGain = /* 'iccr' */ 0x69636372, + bmdDeckLinkConfigVideoInputCompositeLumaGain = /* 'iilg' */ 0x69696C67, + bmdDeckLinkConfigVideoInputCompositeChromaGain = /* 'iicg' */ 0x69696367, + bmdDeckLinkConfigVideoInputSVideoLumaGain = /* 'islg' */ 0x69736C67, + bmdDeckLinkConfigVideoInputSVideoChromaGain = /* 'iscg' */ 0x69736367, + /* Audio Input Integers */ bmdDeckLinkConfigAudioInputConnection = /* 'aicn' */ 0x6169636E, @@ -449,6 +486,10 @@ enum _BMDDeckLinkAttributeID { BMDDeckLinkSupportsInputFormatDetection = /* 'infd' */ 0x696E6664, BMDDeckLinkHasReferenceInput = /* 'hrin' */ 0x6872696E, BMDDeckLinkHasSerialPort = /* 'hspt' */ 0x68737074, + BMDDeckLinkHasAnalogVideoOutputGain = /* 'avog' */ 0x61766F67, + BMDDeckLinkCanOnlyAdjustOverallVideoOutputGain = /* 'ovog' */ 0x6F766F67, + BMDDeckLinkHasVideoInputAntiAliasingFilter = /* 'aafl' */ 0x6161666C, + BMDDeckLinkHasBypass = /* 'byps' */ 0x62797073, /* Integers */ @@ -458,6 +499,13 @@ enum _BMDDeckLinkAttributeID { BMDDeckLinkVideoOutputConnections = /* 'vocn' */ 0x766F636E, BMDDeckLinkVideoInputConnections = /* 'vicn' */ 0x7669636E, + /* Floats */ + + BMDDeckLinkVideoInputGainMinimum = /* 'vigm' */ 0x7669676D, + BMDDeckLinkVideoInputGainMaximum = /* 'vigx' */ 0x76696778, + BMDDeckLinkVideoOutputGainMinimum = /* 'vogm' */ 0x766F676D, + BMDDeckLinkVideoOutputGainMaximum = /* 'vogx' */ 0x766F6778, + /* Strings */ BMDDeckLinkSerialPortDeviceName = /* 'slpn' */ 0x736C706E @@ -566,6 +614,8 @@ enum _BMDDeckControlError { bmdDeckControlNoTapeInDeckError = /* 'nter' */ 0x6E746572, bmdDeckControlNoVideoFromCardError = /* 'nvfc' */ 0x6E766663, bmdDeckControlNoCommunicationError = /* 'ncom' */ 0x6E636F6D, + bmdDeckControlBufferTooSmallError = /* 'btsm' */ 0x6274736D, + bmdDeckControlBadChecksumError = /* 'chks' */ 0x63686B73, bmdDeckControlUnknownError = /* 'uner' */ 0x756E6572 }; @@ -1046,6 +1096,7 @@ public: virtual HRESULT Close (/* in */ bool standbyOn) = 0; virtual HRESULT GetCurrentState (/* out */ BMDDeckControlMode *mode, /* out */ BMDDeckControlVTRControlState *vtrControlState, /* out */ BMDDeckControlStatusFlags *flags) = 0; virtual HRESULT SetStandby (/* in */ bool standbyOn) = 0; + virtual HRESULT SendCommand (/* in */ uint8_t *inBuffer, /* in */ uint32_t inBufferSize, /* out */ uint8_t *outBuffer, /* out */ uint32_t *outDataSize, /* in */ uint32_t outBufferSize, /* out */ BMDDeckControlError *error) = 0; virtual HRESULT Play (/* out */ BMDDeckControlError *error) = 0; virtual HRESULT Stop (/* out */ BMDDeckControlError *error) = 0; virtual HRESULT TogglePlayStop (/* out */ BMDDeckControlError *error) = 0; @@ -1085,6 +1136,7 @@ protected: extern "C" { IDeckLinkIterator* CreateDeckLinkIteratorInstance (void); + IDeckLinkAPIInformation* CreateDeckLinkAPIInformationInstance (void); IDeckLinkGLScreenPreviewHelper* CreateOpenGLScreenPreviewHelper (void); IDeckLinkVideoConversion* CreateVideoConversionInstance (void); diff --git a/sys/decklink/DeckLinkAPIDispatch.cpp b/sys/decklink/DeckLinkAPIDispatch.cpp index 72607782dc..957f9a4cb0 100644 --- a/sys/decklink/DeckLinkAPIDispatch.cpp +++ b/sys/decklink/DeckLinkAPIDispatch.cpp @@ -34,77 +34,116 @@ #define kDeckLinkAPI_Name "libDeckLinkAPI.so" #define KDeckLinkPreviewAPI_Name "libDeckLinkPreviewAPI.so" -typedef IDeckLinkIterator* (*CreateIteratorFunc)(void); -typedef IDeckLinkGLScreenPreviewHelper* (*CreateOpenGLScreenPreviewHelperFunc)(void); -typedef IDeckLinkVideoConversion* (*CreateVideoConversionInstanceFunc)(void); +typedef IDeckLinkIterator *(*CreateIteratorFunc) (void); +typedef IDeckLinkAPIInformation *(*CreateAPIInformationFunc) (void); +typedef IDeckLinkGLScreenPreviewHelper + *(*CreateOpenGLScreenPreviewHelperFunc) (void); +typedef IDeckLinkVideoConversion *(*CreateVideoConversionInstanceFunc) (void); -static pthread_once_t gDeckLinkOnceControl = PTHREAD_ONCE_INIT; -static pthread_once_t gPreviewOnceControl = PTHREAD_ONCE_INIT; +static pthread_once_t gDeckLinkOnceControl = PTHREAD_ONCE_INIT; +static pthread_once_t gPreviewOnceControl = PTHREAD_ONCE_INIT; -static CreateIteratorFunc gCreateIteratorFunc = NULL; -static CreateOpenGLScreenPreviewHelperFunc gCreateOpenGLPreviewFunc = NULL; -static CreateVideoConversionInstanceFunc gCreateVideoConversionFunc = NULL; +static bool gLoadedDeckLinkAPI = false; -static -void InitDeckLinkAPI (void) +static CreateIteratorFunc gCreateIteratorFunc = NULL; +static CreateAPIInformationFunc gCreateAPIInformationFunc = NULL; +static CreateOpenGLScreenPreviewHelperFunc gCreateOpenGLPreviewFunc = NULL; +static CreateVideoConversionInstanceFunc gCreateVideoConversionFunc = NULL; + +void InitDeckLinkAPI (void); +void +InitDeckLinkAPI (void) { - void *libraryHandle; - - libraryHandle = dlopen(kDeckLinkAPI_Name, RTLD_NOW|RTLD_GLOBAL); - if (!libraryHandle) - { - fprintf(stderr, "%s\n", dlerror()); - return; - } - gCreateIteratorFunc = (CreateIteratorFunc)dlsym(libraryHandle, "CreateDeckLinkIteratorInstance_0001"); - if (!gCreateIteratorFunc) - fprintf(stderr, "%s\n", dlerror()); - gCreateVideoConversionFunc = (CreateVideoConversionInstanceFunc)dlsym(libraryHandle, "CreateVideoConversionInstance_0001"); - if (!gCreateVideoConversionFunc) - fprintf(stderr, "%s\n", dlerror()); + void *libraryHandle; + + libraryHandle = dlopen (kDeckLinkAPI_Name, RTLD_NOW | RTLD_GLOBAL); + if (!libraryHandle) { + fprintf (stderr, "%s\n", dlerror ()); + return; + } + + gLoadedDeckLinkAPI = true; + + gCreateIteratorFunc = + (CreateIteratorFunc) dlsym (libraryHandle, + "CreateDeckLinkIteratorInstance_0001"); + if (!gCreateIteratorFunc) + fprintf (stderr, "%s\n", dlerror ()); + gCreateAPIInformationFunc = + (CreateAPIInformationFunc) dlsym (libraryHandle, + "CreateDeckLinkAPIInformationInstance_0001"); + if (!gCreateAPIInformationFunc) + fprintf (stderr, "%s\n", dlerror ()); + gCreateVideoConversionFunc = + (CreateVideoConversionInstanceFunc) dlsym (libraryHandle, + "CreateVideoConversionInstance_0001"); + if (!gCreateVideoConversionFunc) + fprintf (stderr, "%s\n", dlerror ()); } -static -void InitDeckLinkPreviewAPI (void) +void InitDeckLinkPreviewAPI (void); +void +InitDeckLinkPreviewAPI (void) { - void *libraryHandle; - - libraryHandle = dlopen(KDeckLinkPreviewAPI_Name, RTLD_NOW|RTLD_GLOBAL); - if (!libraryHandle) - { - fprintf(stderr, "%s\n", dlerror()); - return; - } - gCreateOpenGLPreviewFunc = (CreateOpenGLScreenPreviewHelperFunc)dlsym(libraryHandle, "CreateOpenGLScreenPreviewHelper_0001"); - if (!gCreateOpenGLPreviewFunc) - fprintf(stderr, "%s\n", dlerror()); + void *libraryHandle; + + libraryHandle = dlopen (KDeckLinkPreviewAPI_Name, RTLD_NOW | RTLD_GLOBAL); + if (!libraryHandle) { + fprintf (stderr, "%s\n", dlerror ()); + return; + } + gCreateOpenGLPreviewFunc = + (CreateOpenGLScreenPreviewHelperFunc) dlsym (libraryHandle, + "CreateOpenGLScreenPreviewHelper_0001"); + if (!gCreateOpenGLPreviewFunc) + fprintf (stderr, "%s\n", dlerror ()); } -IDeckLinkIterator* CreateDeckLinkIteratorInstance (void) +bool IsDeckLinkAPIPresent (void); +bool +IsDeckLinkAPIPresent (void) { - pthread_once(&gDeckLinkOnceControl, InitDeckLinkAPI); - - if (gCreateIteratorFunc == NULL) - return NULL; - return gCreateIteratorFunc(); + // If the DeckLink API dynamic library was successfully loaded, return this knowledge to the caller + return gLoadedDeckLinkAPI; } -IDeckLinkGLScreenPreviewHelper* CreateOpenGLScreenPreviewHelper (void) +IDeckLinkIterator * +CreateDeckLinkIteratorInstance (void) { - pthread_once(&gDeckLinkOnceControl, InitDeckLinkAPI); - pthread_once(&gPreviewOnceControl, InitDeckLinkPreviewAPI); - - if (gCreateOpenGLPreviewFunc == NULL) - return NULL; - return gCreateOpenGLPreviewFunc(); + pthread_once (&gDeckLinkOnceControl, InitDeckLinkAPI); + + if (gCreateIteratorFunc == NULL) + return NULL; + return gCreateIteratorFunc (); } -IDeckLinkVideoConversion* CreateVideoConversionInstance (void) +IDeckLinkAPIInformation * +CreateDeckLinkAPIInformationInstance (void) { - pthread_once(&gDeckLinkOnceControl, InitDeckLinkAPI); - - if (gCreateVideoConversionFunc == NULL) - return NULL; - return gCreateVideoConversionFunc(); + pthread_once (&gDeckLinkOnceControl, InitDeckLinkAPI); + + if (gCreateAPIInformationFunc == NULL) + return NULL; + return gCreateAPIInformationFunc (); } +IDeckLinkGLScreenPreviewHelper * +CreateOpenGLScreenPreviewHelper (void) +{ + pthread_once (&gDeckLinkOnceControl, InitDeckLinkAPI); + pthread_once (&gPreviewOnceControl, InitDeckLinkPreviewAPI); + + if (gCreateOpenGLPreviewFunc == NULL) + return NULL; + return gCreateOpenGLPreviewFunc (); +} + +IDeckLinkVideoConversion * +CreateVideoConversionInstance (void) +{ + pthread_once (&gDeckLinkOnceControl, InitDeckLinkAPI); + + if (gCreateVideoConversionFunc == NULL) + return NULL; + return gCreateVideoConversionFunc (); +} diff --git a/sys/decklink/capture.cpp b/sys/decklink/capture.cpp index 20bd660c9c..9a4b8bcf8c 100644 --- a/sys/decklink/capture.cpp +++ b/sys/decklink/capture.cpp @@ -83,15 +83,20 @@ DeckLinkCaptureDelegate::Release (void) } HRESULT -DeckLinkCaptureDelegate::VideoInputFrameArrived (IDeckLinkVideoInputFrame * + DeckLinkCaptureDelegate::VideoInputFrameArrived (IDeckLinkVideoInputFrame * videoFrame, IDeckLinkAudioInputPacket * audioFrame) { - GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (priv); + GstDecklinkSrc *decklinksrc; + + g_return_val_if_fail (priv != NULL, S_OK); + g_return_val_if_fail (GST_IS_DECKLINK_SRC (priv), S_OK); + + decklinksrc = GST_DECKLINK_SRC (priv); // Handle Video Frame if (videoFrame) { if (videoFrame->GetFlags () & bmdFrameHasNoInputSource) { - GST_DEBUG("Frame received - No input signal detected"); + GST_DEBUG ("Frame received - No input signal detected"); } else { const char *timecodeString = NULL; if (g_timecodeFormat != 0) { @@ -101,10 +106,9 @@ DeckLinkCaptureDelegate::VideoInputFrameArrived (IDeckLinkVideoInputFrame * } } - GST_DEBUG("Frame received [%s] - %s - Size: %li bytes", + GST_DEBUG ("Frame received [%s] - %s - Size: %li bytes", timecodeString != NULL ? timecodeString : "No timecode", - "Valid Frame", - videoFrame->GetRowBytes () * videoFrame->GetHeight ()); + "Valid Frame", videoFrame->GetRowBytes () * videoFrame->GetHeight ()); if (timecodeString) free ((void *) timecodeString); @@ -113,10 +117,10 @@ DeckLinkCaptureDelegate::VideoInputFrameArrived (IDeckLinkVideoInputFrame * if (decklinksrc->video_frame != NULL) { decklinksrc->dropped_frames++; } else { - videoFrame->AddRef(); + videoFrame->AddRef (); decklinksrc->video_frame = videoFrame; if (audioFrame) { - audioFrame->AddRef(); + audioFrame->AddRef (); decklinksrc->audio_frame = audioFrame; } } @@ -128,303 +132,9 @@ DeckLinkCaptureDelegate::VideoInputFrameArrived (IDeckLinkVideoInputFrame * } HRESULT -DeckLinkCaptureDelegate:: -VideoInputFormatChanged (BMDVideoInputFormatChangedEvents events, - IDeckLinkDisplayMode * mode, BMDDetectedVideoInputFormatFlags) -{ - GST_ERROR("moo"); + DeckLinkCaptureDelegate::VideoInputFormatChanged + (BMDVideoInputFormatChangedEvents events, IDeckLinkDisplayMode * mode, + BMDDetectedVideoInputFormatFlags) { + GST_ERROR ("moo"); return S_OK; } - -#ifdef unused -int -usage (int status) -{ - HRESULT result; - IDeckLinkDisplayMode *displayMode; - int displayModeCount = 0; - - fprintf (stderr, - "Usage: Capture -m [OPTIONS]\n" "\n" " -m :\n"); - - while (displayModeIterator->Next (&displayMode) == S_OK) { - char *displayModeString = NULL; - - result = displayMode->GetName ((const char **) &displayModeString); - if (result == S_OK) { - BMDTimeValue frameRateDuration, frameRateScale; - displayMode->GetFrameRate (&frameRateDuration, &frameRateScale); - - fprintf (stderr, " %2d: %-20s \t %li x %li \t %g FPS\n", - displayModeCount, displayModeString, displayMode->GetWidth (), - displayMode->GetHeight (), - (double) frameRateScale / (double) frameRateDuration); - - free (displayModeString); - displayModeCount++; - } - // Release the IDeckLinkDisplayMode object to prevent a leak - displayMode->Release (); - } - - fprintf (stderr, - " -p \n" - " 0: 8 bit YUV (4:2:2) (default)\n" - " 1: 10 bit YUV (4:2:2)\n" - " 2: 10 bit RGB (4:4:4)\n" - " -t Print timecode\n" - " rp188: RP 188\n" - " vitc: VITC\n" - " serial: Serial Timecode\n" - " -f Filename raw video will be written to\n" - " -a Filename raw audio will be written to\n" - " -c Audio Channels (2, 8 or 16 - default is 2)\n" - " -s Audio Sample Depth (16 or 32 - default is 16)\n" - " -n Number of frames to capture (default is unlimited)\n" - " -3 Capture Stereoscopic 3D (Requires 3D Hardware support)\n" - "\n" - "Capture video and/or audio to a file. Raw video and/or audio can be viewed with mplayer eg:\n" - "\n" - " Capture -m2 -n 50 -f video.raw -a audio.raw\n" - " mplayer video.raw -demuxer rawvideo -rawvideo pal:uyvy -audiofile audio.raw -audio-demuxer 20 -rawaudio rate=48000\n"); - - exit (status); -} - -int -main (int argc, char *argv[]) -{ - IDeckLinkIterator *deckLinkIterator = CreateDeckLinkIteratorInstance (); - DeckLinkCaptureDelegate *delegate; - IDeckLinkDisplayMode *displayMode; - BMDVideoInputFlags inputFlags = 0; - BMDDisplayMode selectedDisplayMode = bmdModeNTSC; - BMDPixelFormat pixelFormat = bmdFormat8BitYUV; - int displayModeCount = 0; - int exitStatus = 1; - int ch; - bool foundDisplayMode = false; - HRESULT result; - - pthread_mutex_init (&sleepMutex, NULL); - pthread_cond_init (&sleepCond, NULL); - - if (!deckLinkIterator) { - fprintf (stderr, - "This application requires the DeckLink drivers installed.\n"); - goto bail; - } - - /* Connect to the first DeckLink instance */ - result = deckLinkIterator->Next (&deckLink); - if (result != S_OK) { - fprintf (stderr, "No DeckLink PCI cards found.\n"); - goto bail; - } - - if (deckLink->QueryInterface (IID_IDeckLinkInput, - (void **) &deckLinkInput) != S_OK) - goto bail; - - delegate = new DeckLinkCaptureDelegate (); - deckLinkInput->SetCallback (delegate); - - // Obtain an IDeckLinkDisplayModeIterator to enumerate the display modes supported on output - result = deckLinkInput->GetDisplayModeIterator (&displayModeIterator); - if (result != S_OK) { - fprintf (stderr, - "Could not obtain the video output display mode iterator - result = %08x\n", - result); - goto bail; - } - // Parse command line options - while ((ch = getopt (argc, argv, "?h3c:s:f:a:m:n:p:t:")) != -1) { - switch (ch) { - case 'm': - g_videoModeIndex = atoi (optarg); - break; - case 'c': - g_audioChannels = atoi (optarg); - if (g_audioChannels != 2 && - g_audioChannels != 8 && g_audioChannels != 16) { - fprintf (stderr, - "Invalid argument: Audio Channels must be either 2, 8 or 16\n"); - goto bail; - } - break; - case 's': - g_audioSampleDepth = atoi (optarg); - if (g_audioSampleDepth != 16 && g_audioSampleDepth != 32) { - fprintf (stderr, - "Invalid argument: Audio Sample Depth must be either 16 bits or 32 bits\n"); - goto bail; - } - break; - case 'f': - g_videoOutputFile = optarg; - break; - case 'a': - g_audioOutputFile = optarg; - break; - case 'n': - g_maxFrames = atoi (optarg); - break; - case '3': - inputFlags |= bmdVideoInputDualStream3D; - break; - case 'p': - switch (atoi (optarg)) { - case 0: - pixelFormat = bmdFormat8BitYUV; - break; - case 1: - pixelFormat = bmdFormat10BitYUV; - break; - case 2: - pixelFormat = bmdFormat10BitRGB; - break; - default: - fprintf (stderr, "Invalid argument: Pixel format %d is not valid", - atoi (optarg)); - goto bail; - } - break; - case 't': - if (!strcmp (optarg, "rp188")) - g_timecodeFormat = bmdTimecodeRP188; - else if (!strcmp (optarg, "vitc")) - g_timecodeFormat = bmdTimecodeVITC; - else if (!strcmp (optarg, "serial")) - g_timecodeFormat = bmdTimecodeSerial; - else { - fprintf (stderr, - "Invalid argument: Timecode format \"%s\" is invalid\n", optarg); - goto bail; - } - break; - case '?': - case 'h': - usage (0); - } - } - - if (g_videoModeIndex < 0) { - fprintf (stderr, "No video mode specified\n"); - usage (0); - } - - if (g_videoOutputFile != NULL) { - videoOutputFile = - open (g_videoOutputFile, O_WRONLY | O_CREAT | O_TRUNC, 0664); - if (videoOutputFile < 0) { - fprintf (stderr, "Could not open video output file \"%s\"\n", - g_videoOutputFile); - goto bail; - } - } - if (g_audioOutputFile != NULL) { - audioOutputFile = - open (g_audioOutputFile, O_WRONLY | O_CREAT | O_TRUNC, 0664); - if (audioOutputFile < 0) { - fprintf (stderr, "Could not open audio output file \"%s\"\n", - g_audioOutputFile); - goto bail; - } - } - - while (displayModeIterator->Next (&displayMode) == S_OK) { - if (g_videoModeIndex == displayModeCount) { - BMDDisplayModeSupport result; - const char *displayModeName; - - foundDisplayMode = true; - displayMode->GetName (&displayModeName); - selectedDisplayMode = displayMode->GetDisplayMode (); - - deckLinkInput->DoesSupportVideoMode (selectedDisplayMode, pixelFormat, - bmdVideoInputFlagDefault, &result, NULL); - - if (result == bmdDisplayModeNotSupported) { - fprintf (stderr, - "The display mode %s is not supported with the selected pixel format\n", - displayModeName); - goto bail; - } - - if (inputFlags & bmdVideoInputDualStream3D) { - if (!(displayMode->GetFlags () & bmdDisplayModeSupports3D)) { - fprintf (stderr, "The display mode %s is not supported with 3D\n", - displayModeName); - goto bail; - } - } - - break; - } - displayModeCount++; - displayMode->Release (); - } - - if (!foundDisplayMode) { - fprintf (stderr, "Invalid mode %d specified\n", g_videoModeIndex); - goto bail; - } - - result = - deckLinkInput->EnableVideoInput (selectedDisplayMode, pixelFormat, - inputFlags); - if (result != S_OK) { - fprintf (stderr, - "Failed to enable video input. Is another application using the card?\n"); - goto bail; - } - - result = - deckLinkInput->EnableAudioInput (bmdAudioSampleRate48kHz, - g_audioSampleDepth, g_audioChannels); - if (result != S_OK) { - goto bail; - } - - result = deckLinkInput->StartStreams (); - if (result != S_OK) { - goto bail; - } - // All Okay. - exitStatus = 0; - - // Block main thread until signal occurs - pthread_mutex_lock (&sleepMutex); - pthread_cond_wait (&sleepCond, &sleepMutex); - pthread_mutex_unlock (&sleepMutex); - fprintf (stderr, "Stopping Capture\n"); - -bail: - - if (videoOutputFile) - close (videoOutputFile); - if (audioOutputFile) - close (audioOutputFile); - - if (displayModeIterator != NULL) { - displayModeIterator->Release (); - displayModeIterator = NULL; - } - - if (deckLinkInput != NULL) { - deckLinkInput->Release (); - deckLinkInput = NULL; - } - - if (deckLink != NULL) { - deckLink->Release (); - deckLink = NULL; - } - - if (deckLinkIterator != NULL) - deckLinkIterator->Release (); - - return exitStatus; -} -#endif - diff --git a/sys/decklink/gstdecklink.cpp b/sys/decklink/gstdecklink.cpp index 4d4ed445ae..e720b9a4d4 100644 --- a/sys/decklink/gstdecklink.cpp +++ b/sys/decklink/gstdecklink.cpp @@ -22,9 +22,151 @@ #endif #include +#include "gstdecklink.h" #include "gstdecklinksrc.h" #include "gstdecklinksink.h" +GType +gst_decklink_mode_get_type (void) +{ + static GType type; + + if (!type) { + static const GEnumValue modes[] = { + {GST_DECKLINK_MODE_NTSC, "ntsc", "NTSC SD 60i"}, + {GST_DECKLINK_MODE_NTSC2398, "ntsc2398", "NTSC SD 60i (24 fps)"}, + {GST_DECKLINK_MODE_PAL, "pal", "PAL SD 50i"}, + {GST_DECKLINK_MODE_NTSC_P, "ntsc-p", "NTSC SD 60p"}, + {GST_DECKLINK_MODE_PAL_P, "pal-p", "PAL SD 50p"}, + + {GST_DECKLINK_MODE_1080p2398, "1080p2398", "HD1080 23.98p"}, + {GST_DECKLINK_MODE_1080p24, "1080p24", "HD1080 24p"}, + {GST_DECKLINK_MODE_1080p25, "1080p25", "HD1080 25p"}, + {GST_DECKLINK_MODE_1080p2997, "1080p2997", "HD1080 29.97p"}, + {GST_DECKLINK_MODE_1080p30, "1080p30", "HD1080 30p"}, + + {GST_DECKLINK_MODE_1080i50, "1080i50", "HD1080 50i"}, + {GST_DECKLINK_MODE_1080i5994, "1080i5994", "HD1080 59.94i"}, + {GST_DECKLINK_MODE_1080i60, "1080i60", "HD1080 60i"}, + + {GST_DECKLINK_MODE_1080p50, "1080p50", "HD1080 50p"}, + {GST_DECKLINK_MODE_1080p5994, "1080p5994", "HD1080 59.94p"}, + {GST_DECKLINK_MODE_1080p60, "1080p60", "HD1080 60p"}, + + {GST_DECKLINK_MODE_720p50, "720p50", "HD720 50p"}, + {GST_DECKLINK_MODE_720p5994, "720p5994", "HD720 59.94p"}, + {GST_DECKLINK_MODE_720p60, "720p60", "HD720 60p"}, + + {0, NULL, NULL} + }; + + type = g_enum_register_static ("GstDecklinkModes", modes); + } + return type; +} + +GType +gst_decklink_connection_get_type (void) +{ + static GType type; + + if (!type) { + static const GEnumValue connections[] = { + {GST_DECKLINK_CONNECTION_SDI, "sdi", "SDI"}, + {GST_DECKLINK_CONNECTION_HDMI, "hdmi", "HDMI"}, + {GST_DECKLINK_CONNECTION_OPTICAL_SDI, "optical-sdi", "Optical SDI"}, + {GST_DECKLINK_CONNECTION_COMPONENT, "component", "Component"}, + {GST_DECKLINK_CONNECTION_COMPOSITE, "composite", "Composite"}, + {GST_DECKLINK_CONNECTION_SVIDEO, "svideo", "S-Video"}, + {0, NULL, NULL} + }; + + type = g_enum_register_static ("GstDecklinkConnection", connections); + } + return type; +} + +#define NTSC 10, 11, false, false +#define PAL 12, 11, true, false +#define HD 1, 1, false, true + +static const GstDecklinkMode modes[] = { + {bmdModeNTSC, 720, 486, 30000, 1001, true, NTSC }, + {bmdModeNTSC2398, 720, 486, 24000, 1001, true, NTSC }, + {bmdModePAL, 720, 576, 25, 1, true, PAL }, + {bmdModeNTSCp, 720, 486, 30000, 1001, false, NTSC }, + {bmdModePALp, 720, 576, 25, 1, false, PAL }, + + {bmdModeHD1080p2398, 1920, 1080, 24000, 1001, false, HD }, + {bmdModeHD1080p24, 1920, 1080, 24, 1, false, HD }, + {bmdModeHD1080p25, 1920, 1080, 25, 1, false, HD }, + {bmdModeHD1080p2997, 1920, 1080, 30000, 1001, false, HD }, + {bmdModeHD1080p30, 1920, 1080, 30, 1, false, HD }, + + {bmdModeHD1080i50, 1920, 1080, 25, 1, true, HD }, + {bmdModeHD1080i5994, 1920, 1080, 30000, 1001, true, HD }, + {bmdModeHD1080i6000, 1920, 1080, 30, 1, true, HD }, + + {bmdModeHD1080p50, 1920, 1080, 50, 1, false, HD }, + {bmdModeHD1080p5994, 1920, 1080, 30000, 1001, false, HD }, + {bmdModeHD1080p6000, 1920, 1080, 60, 1, false, HD }, + + {bmdModeHD720p50, 1280, 720, 50, 1, false, HD }, + {bmdModeHD720p5994, 1280, 720, 60000, 1001, false, HD }, + {bmdModeHD720p60, 1280, 720, 60, 1, false, HD } + +}; + +const GstDecklinkMode * +gst_decklink_get_mode (GstDecklinkModeEnum e) +{ + return &modes[e]; +} + +static GstStructure * +gst_decklink_mode_get_structure (GstDecklinkModeEnum e) +{ + const GstDecklinkMode *mode = &modes[e]; + + return gst_structure_new ("video/x-raw-yuv", + "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'), + "width", G_TYPE_INT, mode->width, + "height", G_TYPE_INT, mode->height, + "framerate", GST_TYPE_FRACTION, mode->fps_n, mode->fps_d, + "interlaced", G_TYPE_BOOLEAN, mode->interlaced, + "pixel-aspect-ratio", GST_TYPE_FRACTION, mode->par_n, mode->par_d, + "color-matrix", G_TYPE_STRING, mode->is_hdtv ? "hdtv" : "sdtv", + "chroma-site", G_TYPE_STRING, "mpeg2", + NULL); +} + +GstCaps * +gst_decklink_mode_get_caps (GstDecklinkModeEnum e) +{ + GstCaps *caps; + + caps = gst_caps_new_empty (); + gst_caps_append_structure (caps, gst_decklink_mode_get_structure (e)); + + return caps; +} + +GstCaps * +gst_decklink_mode_get_template_caps (void) +{ + int i; + GstCaps *caps; + GstStructure *s; + + caps = gst_caps_new_empty (); + for(i=0;i<(int)G_N_ELEMENTS(modes);i++) { + s = gst_decklink_mode_get_structure ((GstDecklinkModeEnum)i); + gst_caps_append_structure (caps, s); + } + + return caps; +} + static gboolean plugin_init (GstPlugin * plugin) { diff --git a/sys/decklink/gstdecklink.h b/sys/decklink/gstdecklink.h new file mode 100644 index 0000000000..2363dd47c4 --- /dev/null +++ b/sys/decklink/gstdecklink.h @@ -0,0 +1,114 @@ +/* GStreamer + * Copyright (C) 2011 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_DECKLINK_H_ +#define _GST_DECKLINK_H_ + +#include +#include "DeckLinkAPI.h" + + +typedef enum { + GST_DECKLINK_MODE_NTSC, + GST_DECKLINK_MODE_NTSC2398, + GST_DECKLINK_MODE_PAL, + GST_DECKLINK_MODE_NTSC_P, + GST_DECKLINK_MODE_PAL_P, + + GST_DECKLINK_MODE_1080p2398, + GST_DECKLINK_MODE_1080p24, + GST_DECKLINK_MODE_1080p25, + GST_DECKLINK_MODE_1080p2997, + GST_DECKLINK_MODE_1080p30, + + GST_DECKLINK_MODE_1080i50, + GST_DECKLINK_MODE_1080i5994, + GST_DECKLINK_MODE_1080i60, + + GST_DECKLINK_MODE_1080p50, + GST_DECKLINK_MODE_1080p5994, + GST_DECKLINK_MODE_1080p60, + + GST_DECKLINK_MODE_720p50, + GST_DECKLINK_MODE_720p5994, + GST_DECKLINK_MODE_720p60 +} GstDecklinkModeEnum; +#define GST_TYPE_DECKLINK_MODE (gst_decklink_mode_get_type ()) +GType gst_decklink_mode_get_type (void); + +typedef enum { + GST_DECKLINK_CONNECTION_SDI, + GST_DECKLINK_CONNECTION_HDMI, + GST_DECKLINK_CONNECTION_OPTICAL_SDI, + GST_DECKLINK_CONNECTION_COMPONENT, + GST_DECKLINK_CONNECTION_COMPOSITE, + GST_DECKLINK_CONNECTION_SVIDEO +} GstDecklinkConnectionEnum; +#define GST_TYPE_DECKLINK_CONNECTION (gst_decklink_connection_get_type ()) +GType gst_decklink_connection_get_type (void); + +typedef struct _GstDecklinkMode GstDecklinkMode; +struct _GstDecklinkMode { + BMDDisplayMode mode; + int width; + int height; + int fps_n; + int fps_d; + gboolean interlaced; + int par_n; + int par_d; + gboolean tff; + gboolean is_hdtv; +}; + +const GstDecklinkMode * gst_decklink_get_mode (GstDecklinkModeEnum e); +GstCaps * gst_decklink_mode_get_caps (GstDecklinkModeEnum e); +GstCaps * gst_decklink_mode_get_template_caps (void); + +#define GST_DECKLINK_MODE_CAPS(w,h,n,d,i) \ + "video/x-raw-yuv,format=(fourcc)UYVY,width=" #w ",height=" #h \ + ",framerate=" #n "/" #d ",interlaced=" #i + +#define GST_DECKLINK_CAPS \ + GST_DECKLINK_MODE_CAPS(720,486,30000,1001,true) ";" \ + GST_DECKLINK_MODE_CAPS(720,486,24000,1001,true) ";" \ + GST_DECKLINK_MODE_CAPS(720,576,25,1,true) ";" \ + GST_DECKLINK_MODE_CAPS(720,486,30000,1001,false) ";" \ + GST_DECKLINK_MODE_CAPS(720,576,25,1,false) ";" \ + \ + GST_DECKLINK_MODE_CAPS(1920,1080,24000,1001,false) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,24,1,false) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,25,1,false) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,30000,1001,false) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,30,1,false) ";" \ + \ + GST_DECKLINK_MODE_CAPS(1920,1080,25,1,true) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,30000,1001,true) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,30,1,true) ";" \ + \ + GST_DECKLINK_MODE_CAPS(1920,1080,50,1,false) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,60000,1001,false) ";" \ + GST_DECKLINK_MODE_CAPS(1920,1080,60,1,false) ";" \ + \ + GST_DECKLINK_MODE_CAPS(1280,720,50,1,false) ";" \ + GST_DECKLINK_MODE_CAPS(1280,720,60000,1001,false) ";" \ + GST_DECKLINK_MODE_CAPS(1280,720,60,1,false) + + +#endif diff --git a/sys/decklink/gstdecklinksink.cpp b/sys/decklink/gstdecklinksink.cpp index 43f78ebcf1..720c90d74f 100644 --- a/sys/decklink/gstdecklinksink.cpp +++ b/sys/decklink/gstdecklinksink.cpp @@ -36,6 +36,7 @@ #include #include +#include "gstdecklink.h" #include "gstdecklinksink.h" #include @@ -67,7 +68,8 @@ static gboolean gst_decklink_sink_query (GstElement * element, GstQuery * query); static GstCaps *gst_decklink_sink_videosink_getcaps (GstPad * pad); -static gboolean gst_decklink_sink_videosink_setcaps (GstPad * pad, GstCaps * caps); +static gboolean gst_decklink_sink_videosink_setcaps (GstPad * pad, + GstCaps * caps); static gboolean gst_decklink_sink_videosink_acceptcaps (GstPad * pad, GstCaps * caps); static gboolean gst_decklink_sink_videosink_activate (GstPad * pad); @@ -81,15 +83,18 @@ static GstFlowReturn gst_decklink_sink_videosink_chain (GstPad * pad, GstBuffer * buffer); static GstFlowReturn gst_decklink_sink_videosink_chainlist (GstPad * pad, GstBufferList * bufferlist); -static gboolean gst_decklink_sink_videosink_event (GstPad * pad, GstEvent * event); -static gboolean gst_decklink_sink_videosink_query (GstPad * pad, GstQuery * query); +static gboolean gst_decklink_sink_videosink_event (GstPad * pad, + GstEvent * event); +static gboolean gst_decklink_sink_videosink_query (GstPad * pad, + GstQuery * query); static GstFlowReturn gst_decklink_sink_videosink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf); static GstIterator *gst_decklink_sink_videosink_iterintlink (GstPad * pad); static GstCaps *gst_decklink_sink_audiosink_getcaps (GstPad * pad); -static gboolean gst_decklink_sink_audiosink_setcaps (GstPad * pad, GstCaps * caps); +static gboolean gst_decklink_sink_audiosink_setcaps (GstPad * pad, + GstCaps * caps); static gboolean gst_decklink_sink_audiosink_acceptcaps (GstPad * pad, GstCaps * caps); static gboolean gst_decklink_sink_audiosink_activate (GstPad * pad); @@ -103,8 +108,10 @@ static GstFlowReturn gst_decklink_sink_audiosink_chain (GstPad * pad, GstBuffer * buffer); static GstFlowReturn gst_decklink_sink_audiosink_chainlist (GstPad * pad, GstBufferList * bufferlist); -static gboolean gst_decklink_sink_audiosink_event (GstPad * pad, GstEvent * event); -static gboolean gst_decklink_sink_audiosink_query (GstPad * pad, GstQuery * query); +static gboolean gst_decklink_sink_audiosink_event (GstPad * pad, + GstEvent * event); +static gboolean gst_decklink_sink_audiosink_query (GstPad * pad, + GstQuery * query); static GstFlowReturn gst_decklink_sink_audiosink_bufferalloc (GstPad * pad, guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf); static GstIterator *gst_decklink_sink_audiosink_iterintlink (GstPad * pad); @@ -112,37 +119,17 @@ static GstIterator *gst_decklink_sink_audiosink_iterintlink (GstPad * pad); enum { - PROP_0 + PROP_0, + PROP_MODE }; /* pad templates */ -#define MODE(w,h,n,d,i) \ - "video/x-raw-yuv,format=(fourcc)UYVY,width=" #w ",height=" #h \ - ",framerate=" #n "/" #d ",interlaced=" #i - static GstStaticPadTemplate gst_decklink_sink_videosink_template = GST_STATIC_PAD_TEMPLATE ("videosink", GST_PAD_SINK, GST_PAD_ALWAYS, - GST_STATIC_CAPS ( - MODE(720,486,30000,1001,true) - )); -#if 0 - MODE(720,486,24000,1001,true) ";" - MODE(720,576,25,1,true) ";" - MODE(1920,1080,24000,1001,false) ";" - MODE(1920,1080,24,1,false) ";" - MODE(1920,1080,25,1,false) ";" - MODE(1920,1080,30000,1001,false) ";" - MODE(1920,1080,30,1,false) ";" - MODE(1920,1080,25,1,true) ";" - MODE(1920,1080,30000,1001,true) ";" - MODE(1920,1080,30,1,true) ";" - MODE(1280,720,50,1,true) ";" - MODE(1280,720,60000,1001,true) ";" - MODE(1280,720,60,1,true) -#endif + GST_STATIC_CAPS (GST_DECKLINK_CAPS)); static GstStaticPadTemplate gst_decklink_sink_audiosink_template = GST_STATIC_PAD_TEMPLATE ("audiosink", @@ -151,33 +138,6 @@ GST_STATIC_PAD_TEMPLATE ("audiosink", GST_STATIC_CAPS ("audio/x-raw-int,width=16,depth=16,channels=2,rate=48000") ); -typedef struct _DecklinkMode DecklinkMode; -struct _DecklinkMode { - BMDDisplayMode mode; - int width; - int height; - int fps_n; - int fps_d; - gboolean interlaced; -}; - -static DecklinkMode modes[] = { - { bmdModeNTSC, 720,486,30000,1001,true }, - { bmdModeNTSC2398, 720,486,24000,1001,true }, - { bmdModePAL, 720,576,25,1,true }, - { bmdModeHD1080p2398, 1920,1080,24000,1001,false }, - { bmdModeHD1080p24, 1920,1080,24,1,false }, - { bmdModeHD1080p25, 1920,1080,25,1,false }, - { bmdModeHD1080p2997, 1920,1080,30000,1001,false }, - { bmdModeHD1080p30, 1920,1080,30,1,false }, - { bmdModeHD1080i50, 1920,1080,25,1,true }, - { bmdModeHD1080i5994, 1920,1080,30000,1001,true }, - { bmdModeHD1080i6000, 1920,1080,30,1,true }, - { bmdModeHD720p50, 1280,720,50,1,true }, - { bmdModeHD720p5994, 1280,720,60000,1001,true }, - { bmdModeHD720p60, 1280,720,60,1,true } -}; - /* class initialization */ @@ -222,6 +182,12 @@ gst_decklink_sink_class_init (GstDecklinkSinkClass * klass) element_class->send_event = GST_DEBUG_FUNCPTR (gst_decklink_sink_send_event); element_class->query = GST_DEBUG_FUNCPTR (gst_decklink_sink_query); + g_object_class_install_property (gobject_class, PROP_MODE, + g_param_spec_enum ("mode", "Mode", "Mode", + GST_TYPE_DECKLINK_MODE, GST_DECKLINK_MODE_NTSC, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + } static void @@ -294,17 +260,11 @@ gst_decklink_sink_init (GstDecklinkSink * decklinksink, gst_element_add_pad (GST_ELEMENT (decklinksink), decklinksink->audiosinkpad); - decklinksink->cond = g_cond_new(); - decklinksink->mutex = g_mutex_new(); + decklinksink->cond = g_cond_new (); + decklinksink->mutex = g_mutex_new (); + decklinksink->audio_mutex = g_mutex_new (); - decklinksink->mode = 0; - - decklinksink->width = modes[decklinksink->mode].width; - decklinksink->height = modes[decklinksink->mode].height; - decklinksink->fps_n = modes[decklinksink->mode].fps_n; - decklinksink->fps_d = modes[decklinksink->mode].fps_d; - decklinksink->interlaced = modes[decklinksink->mode].interlaced; - decklinksink->bmd_mode = modes[decklinksink->mode].mode; + decklinksink->mode = GST_DECKLINK_MODE_NTSC; decklinksink->callback = new Output; decklinksink->callback->decklinksink = decklinksink; @@ -314,9 +274,15 @@ void gst_decklink_sink_set_property (GObject * object, guint property_id, const GValue * value, GParamSpec * pspec) { + GstDecklinkSink *decklinksink; + g_return_if_fail (GST_IS_DECKLINK_SINK (object)); + decklinksink = GST_DECKLINK_SINK (object); switch (property_id) { + case PROP_MODE: + decklinksink->mode = (GstDecklinkModeEnum) g_value_get_enum (value); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; @@ -327,9 +293,15 @@ void gst_decklink_sink_get_property (GObject * object, guint property_id, GValue * value, GParamSpec * pspec) { + GstDecklinkSink *decklinksink; + g_return_if_fail (GST_IS_DECKLINK_SINK (object)); + decklinksink = GST_DECKLINK_SINK (object); switch (property_id) { + case PROP_MODE: + g_value_set_enum (value, decklinksink->mode); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; @@ -357,6 +329,7 @@ gst_decklink_sink_finalize (GObject * object) /* clean up object here */ g_cond_free (decklinksink->cond); g_mutex_free (decklinksink->mutex); + g_mutex_free (decklinksink->audio_mutex); delete decklinksink->callback; @@ -364,29 +337,26 @@ gst_decklink_sink_finalize (GObject * object) } static gboolean -gst_decklink_sink_start (GstDecklinkSink *decklinksink) +gst_decklink_sink_start (GstDecklinkSink * decklinksink) { IDeckLinkIterator *iterator; HRESULT ret; - IDeckLinkDisplayModeIterator *mode_iterator; - IDeckLinkDisplayMode *mode; - BMDTimeValue fps_n; - BMDTimeScale fps_d; + const GstDecklinkMode *mode; iterator = CreateDeckLinkIteratorInstance (); if (iterator == NULL) { - GST_ERROR("no driver"); + GST_ERROR ("no driver"); return FALSE; } ret = iterator->Next (&decklinksink->decklink); if (ret != S_OK) { - GST_ERROR("no card"); + GST_ERROR ("no card"); return FALSE; } ret = decklinksink->decklink->QueryInterface (IID_IDeckLinkOutput, - (void **)&decklinksink->output); + (void **) &decklinksink->output); if (ret != S_OK) { GST_ERROR ("no output"); return FALSE; @@ -394,29 +364,9 @@ gst_decklink_sink_start (GstDecklinkSink *decklinksink) decklinksink->output->SetAudioCallback (decklinksink->callback); - ret = decklinksink->output->GetDisplayModeIterator (&mode_iterator); - if (ret != S_OK) { - GST_ERROR ("failed to get display mode iterator"); - return FALSE; - } + mode = gst_decklink_get_mode (decklinksink->mode); - while (mode_iterator->Next (&mode) == S_OK) { - break; - } - if (!mode) { - GST_ERROR ("bad mode"); - return FALSE; - } - - decklinksink->width = mode->GetWidth (); - decklinksink->height = mode->GetHeight (); - mode->GetFrameRate (&fps_n, &fps_d); - decklinksink->fps_n = fps_n; - decklinksink->fps_d = fps_d; - - decklinksink->display_mode = mode->GetDisplayMode (); - - ret = decklinksink->output->EnableVideoOutput (decklinksink->display_mode, + ret = decklinksink->output->EnableVideoOutput (mode->mode, bmdVideoOutputFlagDefault); if (ret != S_OK) { GST_ERROR ("failed to enable video output"); @@ -424,16 +374,16 @@ gst_decklink_sink_start (GstDecklinkSink *decklinksink) } //decklinksink->video_enabled = TRUE; - decklinksink->output->SetScheduledFrameCompletionCallback (decklinksink->callback); + decklinksink->output-> + SetScheduledFrameCompletionCallback (decklinksink->callback); - if (0) { - ret = decklinksink->output->EnableAudioOutput (bmdAudioSampleRate48kHz, - 16, 2, bmdAudioOutputStreamContinuous); - if (ret != S_OK) { - GST_ERROR ("failed to enable audio output"); - return FALSE; - } + ret = decklinksink->output->EnableAudioOutput (bmdAudioSampleRate48kHz, + 16, 2, bmdAudioOutputStreamContinuous); + if (ret != S_OK) { + GST_ERROR ("failed to enable audio output"); + return FALSE; } + decklinksink->audio_buffer = gst_buffer_new (); decklinksink->num_frames = 0; @@ -441,7 +391,7 @@ gst_decklink_sink_start (GstDecklinkSink *decklinksink) } static gboolean -gst_decklink_sink_force_stop (GstDecklinkSink *decklinksink) +gst_decklink_sink_force_stop (GstDecklinkSink * decklinksink) { g_mutex_lock (decklinksink->mutex); decklinksink->stop = TRUE; @@ -452,7 +402,7 @@ gst_decklink_sink_force_stop (GstDecklinkSink *decklinksink) } static gboolean -gst_decklink_sink_stop (GstDecklinkSink *decklinksink) +gst_decklink_sink_stop (GstDecklinkSink * decklinksink) { decklinksink->output->StopScheduledPlayback (0, NULL, 0); decklinksink->output->DisableAudioOutput (); @@ -556,7 +506,7 @@ gst_decklink_sink_videosink_getcaps (GstPad * pad) GST_DEBUG_OBJECT (decklinksink, "getcaps"); - caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); + caps = gst_decklink_mode_get_caps (decklinksink->mode); gst_object_unref (decklinksink); return caps; @@ -661,6 +611,7 @@ gst_decklink_sink_videosink_chain (GstPad * pad, GstBuffer * buffer) IDeckLinkMutableVideoFrame *frame; void *data; GstFlowReturn ret; + const GstDecklinkMode *mode; decklinksink = GST_DECKLINK_SINK (gst_pad_get_parent (pad)); @@ -679,8 +630,10 @@ gst_decklink_sink_videosink_chain (GstPad * pad, GstBuffer * buffer) } #endif - decklinksink->output->CreateVideoFrame (decklinksink->width, - decklinksink->height, decklinksink->width * 2, bmdFormat8BitYUV, + mode = gst_decklink_get_mode (decklinksink->mode); + + decklinksink->output->CreateVideoFrame (mode->width, + mode->height, mode->width * 2, bmdFormat8BitYUV, bmdFrameFlagDefault, &frame); frame->GetBytes (&data); @@ -698,12 +651,11 @@ gst_decklink_sink_videosink_chain (GstPad * pad, GstBuffer * buffer) if (!decklinksink->stop) { decklinksink->output->ScheduleVideoFrame (frame, - decklinksink->num_frames * decklinksink->fps_n, - decklinksink->fps_n, decklinksink->fps_d); + decklinksink->num_frames * mode->fps_d, mode->fps_d, mode->fps_n); decklinksink->num_frames++; if (!decklinksink->sched_started) { - decklinksink->output->StartScheduledPlayback (0, 100, 1.0); + decklinksink->output->StartScheduledPlayback (0, mode->fps_d, 1.0); decklinksink->sched_started = TRUE; } @@ -773,8 +725,8 @@ gst_decklink_sink_videosink_query (GstPad * pad, GstQuery * query) } static GstFlowReturn -gst_decklink_sink_videosink_bufferalloc (GstPad * pad, guint64 offset, guint size, - GstCaps * caps, GstBuffer ** buf) +gst_decklink_sink_videosink_bufferalloc (GstPad * pad, guint64 offset, + guint size, GstCaps * caps, GstBuffer ** buf) { GstDecklinkSink *decklinksink; @@ -919,14 +871,24 @@ static GstFlowReturn gst_decklink_sink_audiosink_chain (GstPad * pad, GstBuffer * buffer) { GstDecklinkSink *decklinksink; + GstFlowReturn ret; decklinksink = GST_DECKLINK_SINK (gst_pad_get_parent (pad)); GST_DEBUG_OBJECT (decklinksink, "chain"); + // concatenate both buffers + g_mutex_lock (decklinksink->audio_mutex); + decklinksink->audio_buffer = + gst_buffer_join (decklinksink->audio_buffer, buffer); + g_mutex_unlock (decklinksink->audio_mutex); + + // GST_DEBUG("Audio Buffer Size: %d", GST_BUFFER_SIZE (decklinksink->audio_buffer)); gst_object_unref (decklinksink); - return GST_FLOW_OK; + + ret = GST_FLOW_OK; + return ret; } static GstFlowReturn @@ -984,8 +946,8 @@ gst_decklink_sink_audiosink_query (GstPad * pad, GstQuery * query) } static GstFlowReturn -gst_decklink_sink_audiosink_bufferalloc (GstPad * pad, guint64 offset, guint size, - GstCaps * caps, GstBuffer ** buf) +gst_decklink_sink_audiosink_bufferalloc (GstPad * pad, guint64 offset, + guint size, GstCaps * caps, GstBuffer ** buf) { GstDecklinkSink *decklinksink; @@ -1020,10 +982,10 @@ gst_decklink_sink_audiosink_iterintlink (GstPad * pad) HRESULT -Output::ScheduledFrameCompleted (IDeckLinkVideoFrame * completedFrame, + Output::ScheduledFrameCompleted (IDeckLinkVideoFrame * completedFrame, BMDOutputFrameCompletionResult result) -{ - GST_DEBUG("ScheduledFrameCompleted"); +{ + GST_DEBUG ("ScheduledFrameCompleted"); g_mutex_lock (decklinksink->mutex); g_cond_signal (decklinksink->cond); @@ -1033,18 +995,47 @@ Output::ScheduledFrameCompleted (IDeckLinkVideoFrame * completedFrame, return S_OK; } -HRESULT -Output::ScheduledPlaybackHasStopped () +HRESULT Output::ScheduledPlaybackHasStopped () { - GST_ERROR("ScheduledPlaybackHasStopped"); + GST_ERROR ("ScheduledPlaybackHasStopped"); return S_OK; } -HRESULT -Output::RenderAudioSamples (bool preroll) +HRESULT Output::RenderAudioSamples (bool preroll) { - GST_ERROR("RenderAudioSamples"); - + guint + samplesWritten; + GstBuffer * + buffer; + + // guint64 samplesToWrite; + + if (decklinksink->stop) { + GST_DEBUG ("decklinksink->stop set TRUE!"); + decklinksink->output->BeginAudioPreroll (); + // running = true; + } else { + g_mutex_lock (decklinksink->audio_mutex); + decklinksink->output->ScheduleAudioSamples (GST_BUFFER_DATA (decklinksink->audio_buffer), GST_BUFFER_SIZE (decklinksink->audio_buffer) / 4, // 2 bytes per sample, stereo + 0, 0, &samplesWritten); + + buffer = + gst_buffer_new_and_alloc (GST_BUFFER_SIZE (decklinksink->audio_buffer) - + (samplesWritten * 4)); + + memcpy (GST_BUFFER_DATA (buffer), + GST_BUFFER_DATA (decklinksink->audio_buffer) + (samplesWritten * 4), + GST_BUFFER_SIZE (decklinksink->audio_buffer) - (samplesWritten * 4)); + + gst_buffer_unref (decklinksink->audio_buffer); + + decklinksink->audio_buffer = buffer; + + g_mutex_unlock (decklinksink->audio_mutex); + + } + + GST_DEBUG ("RenderAudioSamples"); + return S_OK; } - diff --git a/sys/decklink/gstdecklinksink.h b/sys/decklink/gstdecklinksink.h index 2f0d1d799b..d1b3b48fb9 100644 --- a/sys/decklink/gstdecklinksink.h +++ b/sys/decklink/gstdecklinksink.h @@ -21,6 +21,7 @@ #define _GST_DECKLINK_SINK_H_ #include +#include "gstdecklink.h" #include "DeckLinkAPI.h" G_BEGIN_DECLS @@ -51,12 +52,15 @@ public IDeckLinkAudioOutputCallback struct _GstDecklinkSink { GstElement base_decklinksink; + GstBuffer *audio_buffer; GstPad *videosinkpad; GstPad *audiosinkpad; GMutex *mutex; GCond *cond; + GMutex *audio_mutex; +// GCond *audio_cond; int queued_frames; gboolean stop; @@ -68,15 +72,9 @@ struct _GstDecklinkSink gboolean sched_started; int num_frames; - int fps_n; - int fps_d; - int width; - int height; - gboolean interlaced; - BMDDisplayMode bmd_mode; /* properties */ - int mode; + GstDecklinkModeEnum mode; }; diff --git a/sys/decklink/gstdecklinksrc.cpp b/sys/decklink/gstdecklinksrc.cpp index 373cd96156..e918b1d2d7 100644 --- a/sys/decklink/gstdecklinksrc.cpp +++ b/sys/decklink/gstdecklinksrc.cpp @@ -36,6 +36,7 @@ #endif #include +#include "gstdecklink.h" #include "gstdecklinksrc.h" #include "capture.h" #include @@ -68,43 +69,57 @@ static gboolean gst_decklink_src_send_event (GstElement * element, static gboolean gst_decklink_src_query (GstElement * element, GstQuery * query); static GstCaps *gst_decklink_src_audio_src_getcaps (GstPad * pad); -static gboolean gst_decklink_src_audio_src_setcaps (GstPad * pad, GstCaps * caps); -static gboolean gst_decklink_src_audio_src_acceptcaps (GstPad * pad, GstCaps * caps); -static void gst_decklink_src_audio_src_fixatecaps (GstPad * pad, GstCaps * caps); +static gboolean gst_decklink_src_audio_src_setcaps (GstPad * pad, + GstCaps * caps); +static gboolean gst_decklink_src_audio_src_acceptcaps (GstPad * pad, + GstCaps * caps); +static void gst_decklink_src_audio_src_fixatecaps (GstPad * pad, + GstCaps * caps); static gboolean gst_decklink_src_audio_src_activate (GstPad * pad); static gboolean gst_decklink_src_audio_src_activatepush (GstPad * pad, gboolean active); static gboolean gst_decklink_src_audio_src_activatepull (GstPad * pad, gboolean active); -static GstPadLinkReturn gst_decklink_src_audio_src_link (GstPad * pad, GstPad * peer); +static GstPadLinkReturn gst_decklink_src_audio_src_link (GstPad * pad, + GstPad * peer); static GstFlowReturn gst_decklink_src_audio_src_getrange (GstPad * pad, guint64 offset, guint length, GstBuffer ** buffer); -static gboolean gst_decklink_src_audio_src_event (GstPad * pad, GstEvent * event); -static gboolean gst_decklink_src_audio_src_query (GstPad * pad, GstQuery * query); +static gboolean gst_decklink_src_audio_src_event (GstPad * pad, + GstEvent * event); +static gboolean gst_decklink_src_audio_src_query (GstPad * pad, + GstQuery * query); static GstIterator *gst_decklink_src_audio_src_iterintlink (GstPad * pad); static GstCaps *gst_decklink_src_video_src_getcaps (GstPad * pad); -static gboolean gst_decklink_src_video_src_setcaps (GstPad * pad, GstCaps * caps); -static gboolean gst_decklink_src_video_src_acceptcaps (GstPad * pad, GstCaps * caps); -static void gst_decklink_src_video_src_fixatecaps (GstPad * pad, GstCaps * caps); +static gboolean gst_decklink_src_video_src_setcaps (GstPad * pad, + GstCaps * caps); +static gboolean gst_decklink_src_video_src_acceptcaps (GstPad * pad, + GstCaps * caps); +static void gst_decklink_src_video_src_fixatecaps (GstPad * pad, + GstCaps * caps); static gboolean gst_decklink_src_video_src_activate (GstPad * pad); static gboolean gst_decklink_src_video_src_activatepush (GstPad * pad, gboolean active); static gboolean gst_decklink_src_video_src_activatepull (GstPad * pad, gboolean active); -static GstPadLinkReturn gst_decklink_src_video_src_link (GstPad * pad, GstPad * peer); +static GstPadLinkReturn gst_decklink_src_video_src_link (GstPad * pad, + GstPad * peer); static GstFlowReturn gst_decklink_src_video_src_getrange (GstPad * pad, guint64 offset, guint length, GstBuffer ** buffer); -static gboolean gst_decklink_src_video_src_event (GstPad * pad, GstEvent * event); -static gboolean gst_decklink_src_video_src_query (GstPad * pad, GstQuery * query); +static gboolean gst_decklink_src_video_src_event (GstPad * pad, + GstEvent * event); +static gboolean gst_decklink_src_video_src_query (GstPad * pad, + GstQuery * query); static GstIterator *gst_decklink_src_video_src_iterintlink (GstPad * pad); static void gst_decklink_src_task (void *priv); enum { - PROP_0 + PROP_0, + PROP_MODE, + PROP_CONNECTION }; /* pad templates */ @@ -116,58 +131,7 @@ GST_STATIC_PAD_TEMPLATE ("audiosrc", GST_STATIC_CAPS ("audio/x-raw-int,width=16,depth=16,channels=2,rate=48000") ); -#define MODE(w,h,n,d,i) \ - "video/x-raw-yuv,format=(fourcc)UYVY,width=" #w ",height=" #h \ - ",framerate=" #n "/" #d ",interlaced=" #i - -static GstStaticPadTemplate gst_decklink_src_video_src_template = -GST_STATIC_PAD_TEMPLATE ("videosrc", - GST_PAD_SRC, - GST_PAD_ALWAYS, - GST_STATIC_CAPS ( - MODE(720,486,30000,1001,true) ";" - MODE(720,486,24000,1001,true) ";" - MODE(720,576,25,1,true) ";" - MODE(1920,1080,24000,1001,false) ";" - MODE(1920,1080,24,1,false) ";" - MODE(1920,1080,25,1,false) ";" - MODE(1920,1080,30000,1001,false) ";" - MODE(1920,1080,30,1,false) ";" - MODE(1920,1080,25,1,true) ";" - MODE(1920,1080,30000,1001,true) ";" - MODE(1920,1080,30,1,true) ";" - MODE(1280,720,50,1,true) ";" - MODE(1280,720,60000,1001,true) ";" - MODE(1280,720,60,1,true) - )); - -typedef struct _DecklinkMode DecklinkMode; -struct _DecklinkMode { - BMDDisplayMode mode; - int width; - int height; - int fps_n; - int fps_d; - gboolean interlaced; -}; - -static DecklinkMode modes[] = { - { bmdModeNTSC, 720,486,30000,1001,true }, - { bmdModeNTSC2398, 720,486,24000,1001,true }, - { bmdModePAL, 720,576,25,1,true }, - { bmdModeHD1080p2398, 1920,1080,24000,1001,false }, - { bmdModeHD1080p24, 1920,1080,24,1,false }, - { bmdModeHD1080p25, 1920,1080,25,1,false }, - { bmdModeHD1080p2997, 1920,1080,30000,1001,false }, - { bmdModeHD1080p30, 1920,1080,30,1,false }, - { bmdModeHD1080i50, 1920,1080,25,1,true }, - { bmdModeHD1080i5994, 1920,1080,30000,1001,true }, - { bmdModeHD1080i6000, 1920,1080,30,1,true }, - { bmdModeHD720p50, 1280,720,50,1,true }, - { bmdModeHD720p5994, 1280,720,60000,1001,true }, - { bmdModeHD720p60, 1280,720,60,1,true } -}; - +/* the video source pad template is created on the fly */ /* class initialization */ @@ -186,7 +150,8 @@ gst_decklink_src_base_init (gpointer g_class) gst_element_class_add_pad_template (element_class, gst_static_pad_template_get (&gst_decklink_src_audio_src_template)); gst_element_class_add_pad_template (element_class, - gst_static_pad_template_get (&gst_decklink_src_video_src_template)); + gst_pad_template_new ("videosrc", GST_PAD_SRC, GST_PAD_ALWAYS, + gst_decklink_mode_get_template_caps ())); gst_element_class_set_details_simple (element_class, "Decklink source", "Source/Video", "DeckLink Source", "David Schleef "); @@ -215,6 +180,17 @@ gst_decklink_src_class_init (GstDecklinkSrcClass * klass) element_class->send_event = GST_DEBUG_FUNCPTR (gst_decklink_src_send_event); element_class->query = GST_DEBUG_FUNCPTR (gst_decklink_src_query); + g_object_class_install_property (gobject_class, PROP_MODE, + g_param_spec_enum ("mode", "Mode", "Mode", + GST_TYPE_DECKLINK_MODE, GST_DECKLINK_MODE_NTSC, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); + + g_object_class_install_property (gobject_class, PROP_CONNECTION, + g_param_spec_enum ("connection", "Connection", "Connection", + GST_TYPE_DECKLINK_CONNECTION, GST_DECKLINK_CONNECTION_SDI, + (GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS | + G_PARAM_CONSTRUCT))); } static void @@ -226,7 +202,8 @@ gst_decklink_src_init (GstDecklinkSrc * decklinksrc, gst_task_set_lock (decklinksrc->task, &decklinksrc->task_mutex); decklinksrc->audiosrcpad = - gst_pad_new_from_static_template (&gst_decklink_src_audio_src_template, "audiosrc"); + gst_pad_new_from_static_template (&gst_decklink_src_audio_src_template, + "audiosrc"); gst_pad_set_getcaps_function (decklinksrc->audiosrcpad, GST_DEBUG_FUNCPTR (gst_decklink_src_audio_src_getcaps)); gst_pad_set_setcaps_function (decklinksrc->audiosrcpad, @@ -255,8 +232,9 @@ gst_decklink_src_init (GstDecklinkSrc * decklinksrc, - decklinksrc->videosrcpad = - gst_pad_new_from_static_template (&gst_decklink_src_video_src_template, "videosrc"); + decklinksrc->videosrcpad = gst_pad_new_from_template ( + gst_element_class_get_pad_template(GST_ELEMENT_CLASS(decklinksrc_class), + "videosrc"), "videosrc"); gst_pad_set_getcaps_function (decklinksrc->videosrcpad, GST_DEBUG_FUNCPTR (gst_decklink_src_video_src_getcaps)); gst_pad_set_setcaps_function (decklinksrc->videosrcpad, @@ -284,18 +262,11 @@ gst_decklink_src_init (GstDecklinkSrc * decklinksrc, gst_element_add_pad (GST_ELEMENT (decklinksrc), decklinksrc->videosrcpad); - decklinksrc->cond = g_cond_new(); - decklinksrc->mutex = g_mutex_new(); + decklinksrc->cond = g_cond_new (); + decklinksrc->mutex = g_mutex_new (); decklinksrc->copy_data = TRUE; - decklinksrc->mode = 0; - - decklinksrc->width = modes[decklinksrc->mode].width; - decklinksrc->height = modes[decklinksrc->mode].height; - decklinksrc->fps_n = modes[decklinksrc->mode].fps_n; - decklinksrc->fps_d = modes[decklinksrc->mode].fps_d; - decklinksrc->interlaced = modes[decklinksrc->mode].interlaced; - decklinksrc->bmd_mode = modes[decklinksrc->mode].mode; + decklinksrc->mode = GST_DECKLINK_MODE_NTSC; } @@ -303,9 +274,19 @@ void gst_decklink_src_set_property (GObject * object, guint property_id, const GValue * value, GParamSpec * pspec) { + GstDecklinkSrc *decklinksrc; + g_return_if_fail (GST_IS_DECKLINK_SRC (object)); + decklinksrc = GST_DECKLINK_SRC (object); switch (property_id) { + case PROP_MODE: + decklinksrc->mode = (GstDecklinkModeEnum) g_value_get_enum (value); + break; + case PROP_CONNECTION: + decklinksrc->connection = + (GstDecklinkConnectionEnum) g_value_get_enum (value); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; @@ -316,9 +297,18 @@ void gst_decklink_src_get_property (GObject * object, guint property_id, GValue * value, GParamSpec * pspec) { + GstDecklinkSrc *decklinksrc; + g_return_if_fail (GST_IS_DECKLINK_SRC (object)); + decklinksrc = GST_DECKLINK_SRC (object); switch (property_id) { + case PROP_MODE: + g_value_set_enum (value, decklinksrc->mode); + break; + case PROP_CONNECTION: + g_value_set_enum (value, decklinksrc->connection); + break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); break; @@ -381,27 +371,26 @@ gst_decklink_src_start (GstElement * element) GstDecklinkSrc *decklinksrc = GST_DECKLINK_SRC (element); IDeckLinkIterator *iterator; DeckLinkCaptureDelegate *delegate; - IDeckLinkDisplayModeIterator *mode_iterator; - IDeckLinkDisplayMode *mode; - int i; + //IDeckLinkDisplayModeIterator *mode_iterator; + //IDeckLinkDisplayMode *mode; int sample_depth; int channels; - BMDVideoInputFlags input_flags; - BMDDisplayMode selected_mode; - BMDPixelFormat pixel_format; HRESULT ret; + const GstDecklinkMode *mode; + IDeckLinkConfiguration *config; + BMDVideoConnection conn; GST_DEBUG_OBJECT (decklinksrc, "start"); iterator = CreateDeckLinkIteratorInstance (); if (iterator == NULL) { - GST_ERROR("no driver"); + GST_ERROR ("no driver"); return FALSE; } ret = iterator->Next (&decklinksrc->decklink); if (ret != S_OK) { - GST_ERROR("no card"); + GST_ERROR ("no card"); return FALSE; } @@ -416,9 +405,65 @@ gst_decklink_src_start (GstElement * element) delegate->priv = decklinksrc; decklinksrc->input->SetCallback (delegate); + ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkConfiguration, + (void **) &config); + if (ret != S_OK) { + GST_ERROR ("query interface failed"); + return FALSE; + } + + switch (decklinksrc->connection) { + default: + case GST_DECKLINK_CONNECTION_SDI: + conn = bmdVideoConnectionSDI; + break; + case GST_DECKLINK_CONNECTION_HDMI: + conn = bmdVideoConnectionHDMI; + break; + case GST_DECKLINK_CONNECTION_OPTICAL_SDI: + conn = bmdVideoConnectionOpticalSDI; + break; + case GST_DECKLINK_CONNECTION_COMPONENT: + conn = bmdVideoConnectionComponent; + break; + case GST_DECKLINK_CONNECTION_COMPOSITE: + conn = bmdVideoConnectionComposite; + break; + case GST_DECKLINK_CONNECTION_SVIDEO: + conn = bmdVideoConnectionSVideo; + break; + } + + ret = config->SetInt (bmdDeckLinkConfigVideoInputConnection, conn); + if (ret != S_OK) { + GST_ERROR ("set configuration (input source)"); + return FALSE; + } + + if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE) { + ret = config->SetInt (bmdDeckLinkConfigAnalogVideoInputFlags, + bmdAnalogVideoFlagCompositeSetup75); + if (ret != S_OK) { + GST_ERROR ("set configuration (composite setup)"); + return FALSE; + } + } + + if (decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPOSITE || + decklinksrc->connection == GST_DECKLINK_CONNECTION_COMPONENT || + decklinksrc->connection == GST_DECKLINK_CONNECTION_SVIDEO) { + ret = config->SetInt (bmdDeckLinkConfigAudioInputConnection, + bmdAudioConnectionAnalog); + if (ret != S_OK) { + GST_ERROR ("set configuration (audio input connection)"); + return FALSE; + } + } + +#if 0 ret = decklinksrc->input->GetDisplayModeIterator (&mode_iterator); if (ret != S_OK) { - GST_ERROR("failed to get display mode iterator"); + GST_ERROR ("failed to get display mode iterator"); return FALSE; } @@ -428,34 +473,33 @@ gst_decklink_src_start (GstElement * element) mode->GetName (&mode_name); - GST_ERROR("%d: mode name: %s", i, mode_name); + GST_DEBUG ("%d: mode name: %s", i, mode_name); mode->Release (); i++; } +#endif - pixel_format = bmdFormat8BitYUV; - selected_mode = decklinksrc->bmd_mode; - input_flags = 0; - ret = decklinksrc->input->EnableVideoInput (selected_mode, pixel_format, - input_flags); - if (ret != S_OK){ - GST_ERROR("enable video input failed"); + mode = gst_decklink_get_mode (decklinksrc->mode); + + ret = decklinksrc->input->EnableVideoInput (mode->mode, bmdFormat8BitYUV, 0); + if (ret != S_OK) { + GST_ERROR ("enable video input failed"); return FALSE; } sample_depth = 16; channels = 2; - ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz, sample_depth, - channels); - if (ret != S_OK){ - GST_ERROR("enable video input failed"); + ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz, + sample_depth, channels); + if (ret != S_OK) { + GST_ERROR ("enable video input failed"); return FALSE; } ret = decklinksrc->input->StartStreams (); if (ret != S_OK) { - GST_ERROR("start streams failed"); + GST_ERROR ("start streams failed"); return FALSE; } @@ -480,6 +524,13 @@ gst_decklink_src_stop (GstElement * element) gst_task_join (decklinksrc->task); + decklinksrc->input->StopStreams (); + decklinksrc->input->DisableVideoInput (); + decklinksrc->input->DisableAudioInput (); + + decklinksrc->input->Release (); + decklinksrc->input = NULL; + return TRUE; } @@ -774,7 +825,7 @@ gst_decklink_src_video_src_getcaps (GstPad * pad) GST_DEBUG_OBJECT (decklinksrc, "getcaps"); - caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad)); + caps = gst_decklink_mode_get_caps (decklinksrc->mode); gst_object_unref (decklinksrc); return caps; @@ -960,7 +1011,7 @@ gst_decklink_src_video_src_iterintlink (GstPad * pad) static void video_frame_free (void *data) { - IDeckLinkVideoInputFrame *video_frame = (IDeckLinkVideoInputFrame *)data; + IDeckLinkVideoInputFrame *video_frame = (IDeckLinkVideoInputFrame *) data; video_frame->Release (); } @@ -977,6 +1028,7 @@ gst_decklink_src_task (void *priv) void *data; int n_samples; GstFlowReturn ret; + const GstDecklinkMode *mode; GST_DEBUG_OBJECT (decklinksrc, "task"); @@ -992,94 +1044,86 @@ gst_decklink_src_task (void *priv) g_mutex_unlock (decklinksrc->mutex); if (decklinksrc->stop) { - GST_ERROR("stopping task"); + GST_DEBUG ("stopping task"); return; } if (dropped_frames > 0) { - GST_ELEMENT_ERROR(decklinksrc, RESOURCE, READ, (NULL), (NULL)); + GST_ELEMENT_ERROR (decklinksrc, RESOURCE, READ, (NULL), (NULL)); /* ERROR */ return; } + mode = gst_decklink_get_mode (decklinksrc->mode); + video_frame->GetBytes (&data); if (decklinksrc->copy_data) { - buffer = gst_buffer_new_and_alloc (decklinksrc->width * decklinksrc->height * 2); + buffer = gst_buffer_new_and_alloc (mode->width * mode->height * 2); - memcpy (GST_BUFFER_DATA (buffer), data, decklinksrc->width * decklinksrc->height * 2); + memcpy (GST_BUFFER_DATA (buffer), data, mode->width * mode->height * 2); video_frame->Release (); } else { buffer = gst_buffer_new (); - GST_BUFFER_SIZE (buffer) = decklinksrc->width * decklinksrc->height * 2; + GST_BUFFER_SIZE (buffer) = mode->width * mode->height * 2; - GST_BUFFER_DATA (buffer) = (guint8 *)data; + GST_BUFFER_DATA (buffer) = (guint8 *) data; GST_BUFFER_FREE_FUNC (buffer) = video_frame_free; - GST_BUFFER_MALLOCDATA (buffer) = (guint8 *)video_frame; + GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) video_frame; } GST_BUFFER_TIMESTAMP (buffer) = - gst_util_uint64_scale_int (decklinksrc->num_frames * GST_SECOND, - decklinksrc->fps_d, decklinksrc->fps_n); + gst_util_uint64_scale_int (decklinksrc->num_frames * GST_SECOND, + mode->fps_d, mode->fps_n); GST_BUFFER_DURATION (buffer) = - gst_util_uint64_scale_int ((decklinksrc->num_frames + 1) * GST_SECOND, - decklinksrc->fps_d, decklinksrc->fps_n) - - GST_BUFFER_TIMESTAMP (buffer); + gst_util_uint64_scale_int ((decklinksrc->num_frames + 1) * GST_SECOND, + mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer); GST_BUFFER_OFFSET (buffer) = decklinksrc->num_frames; if (decklinksrc->num_frames == 0) { - GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_FLAG_DISCONT); + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); } - decklinksrc->num_frames ++; + decklinksrc->num_frames++; if (decklinksrc->video_caps == NULL) { - decklinksrc->video_caps = gst_caps_new_simple ("video/x-raw-yuv", - "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC('U','Y','V','Y'), - "width", G_TYPE_INT, decklinksrc->width, - "height", G_TYPE_INT, decklinksrc->height, - "framerate", GST_TYPE_FRACTION, - decklinksrc->fps_n, decklinksrc->fps_d, - "interlaced", G_TYPE_BOOLEAN, decklinksrc->interlaced, - NULL); + decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode); } gst_buffer_set_caps (buffer, decklinksrc->video_caps); ret = gst_pad_push (decklinksrc->videosrcpad, buffer); if (ret != GST_FLOW_OK) { - GST_ELEMENT_ERROR(decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); + GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); } + if (gst_pad_is_linked (decklinksrc->audiosrcpad)) { + n_samples = audio_frame->GetSampleFrameCount (); + audio_frame->GetBytes (&data); + audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2); + memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2); - n_samples = audio_frame->GetSampleFrameCount(); - audio_frame->GetBytes (&data); - audio_buffer = gst_buffer_new_and_alloc (n_samples * 2 * 2); - memcpy (GST_BUFFER_DATA (audio_buffer), data, n_samples * 2 * 2); - audio_frame->Release (); - - GST_BUFFER_TIMESTAMP (audio_buffer) = - gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND, + GST_BUFFER_TIMESTAMP (audio_buffer) = + gst_util_uint64_scale_int (decklinksrc->num_audio_samples * GST_SECOND, 1, 48000); - GST_BUFFER_DURATION (audio_buffer) = - gst_util_uint64_scale_int ((decklinksrc->num_audio_samples + n_samples) * GST_SECOND, - 1, 48000) - GST_BUFFER_TIMESTAMP (audio_buffer); - decklinksrc->num_audio_samples += n_samples; + GST_BUFFER_DURATION (audio_buffer) = + gst_util_uint64_scale_int ((decklinksrc->num_audio_samples + + n_samples) * GST_SECOND, 1, + 48000) - GST_BUFFER_TIMESTAMP (audio_buffer); + decklinksrc->num_audio_samples += n_samples; - if (decklinksrc->audio_caps == NULL) { - decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int", - "endianness", G_TYPE_INT, G_LITTLE_ENDIAN, - "signed", G_TYPE_BOOLEAN, TRUE, - "depth", G_TYPE_INT, 16, - "width", G_TYPE_INT, 16, - "channels", G_TYPE_INT, 2, - "rate", G_TYPE_INT, 48000, - NULL); - } - gst_buffer_set_caps (audio_buffer, decklinksrc->audio_caps); + if (decklinksrc->audio_caps == NULL) { + decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int", + "endianness", G_TYPE_INT, LITTLE_ENDIAN, + "signed", G_TYPE_BOOLEAN, TRUE, + "depth", G_TYPE_INT, 16, + "width", G_TYPE_INT, 16, + "channels", G_TYPE_INT, 2, "rate", G_TYPE_INT, 48000, NULL); + } + gst_buffer_set_caps (audio_buffer, decklinksrc->audio_caps); - ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer); - if (ret != GST_FLOW_OK) { - GST_ELEMENT_ERROR(decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); + ret = gst_pad_push (decklinksrc->audiosrcpad, audio_buffer); + if (ret != GST_FLOW_OK) { + GST_ELEMENT_ERROR (decklinksrc, CORE, NEGOTIATION, (NULL), (NULL)); + } } + audio_frame->Release (); } - - diff --git a/sys/decklink/gstdecklinksrc.h b/sys/decklink/gstdecklinksrc.h index e191675fc1..40ef4f10a1 100644 --- a/sys/decklink/gstdecklinksrc.h +++ b/sys/decklink/gstdecklinksrc.h @@ -21,6 +21,7 @@ #define _GST_DECKLINK_SRC_H_ #include +#include "gstdecklink.h" #include "DeckLinkAPI.h" G_BEGIN_DECLS @@ -69,7 +70,8 @@ struct _GstDecklinkSrc /* properties */ gboolean copy_data; - int mode; + GstDecklinkModeEnum mode; + GstDecklinkConnectionEnum connection; }; struct _GstDecklinkSrcClass diff --git a/sys/dvb/camresourcemanager.c b/sys/dvb/camresourcemanager.c index 2b6169fde1..f7b473a0e6 100644 --- a/sys/dvb/camresourcemanager.c +++ b/sys/dvb/camresourcemanager.c @@ -171,22 +171,22 @@ static CamReturn data_impl (CamALApplication * application, CamSLSession * session, guint tag, guint8 * buffer, guint length) { - CamReturn ret; CamResourceManager *mgr = CAM_RESOURCE_MANAGER (application); switch (tag) { case TAG_PROFILE_ENQUIRY: - ret = send_profile_reply (mgr, session); + send_profile_reply (mgr, session); break; case TAG_PROFILE_REPLY: - ret = handle_profile_reply (mgr, session, buffer, length); + handle_profile_reply (mgr, session, buffer, length); break; case TAG_PROFILE_CHANGE: - ret = send_profile_enquiry (mgr, session); + send_profile_enquiry (mgr, session); break; default: g_return_val_if_reached (CAM_RETURN_APPLICATION_ERROR); } + /* FIXME: Shouldn't this return the retval from the functions above ? */ return CAM_RETURN_OK; } diff --git a/sys/dvb/camsession.c b/sys/dvb/camsession.c index 4fe728e363..4e4e391d7a 100644 --- a/sys/dvb/camsession.c +++ b/sys/dvb/camsession.c @@ -379,8 +379,6 @@ static CamReturn handle_create_session_response (CamSL * sl, CamTLConnection * connection, guint8 * spdu, guint spdu_length) { - guint8 status; - guint resource_id; guint16 session_nb; CamSLSession *session; @@ -398,8 +396,8 @@ handle_create_session_response (CamSL * sl, CamTLConnection * connection, } /* skip tag and length */ - status = spdu[2]; - resource_id = GST_READ_UINT32_BE (&spdu[3]); + /* status = spdu[2]; */ + /* resource_id = GST_READ_UINT32_BE (&spdu[3]); */ session_nb = GST_READ_UINT16_BE (&spdu[7]); session = g_hash_table_lookup (sl->sessions, diff --git a/sys/dvb/camtransport.c b/sys/dvb/camtransport.c index 98467f0643..30780db720 100644 --- a/sys/dvb/camtransport.c +++ b/sys/dvb/camtransport.c @@ -224,7 +224,6 @@ cam_tl_read_tpdu_next (CamTL * tl, CamTLConnection ** out_connection) { CamReturn ret; CamTLConnection *connection; - guint8 slot; guint8 connection_id; guint8 *tpdu; guint8 length_field_len; @@ -244,7 +243,7 @@ cam_tl_read_tpdu_next (CamTL * tl, CamTLConnection ** out_connection) } /* LPDU slot */ - slot = tpdu[0]; + /* slot = tpdu[0]; */ /* LPDU connection id */ connection_id = tpdu[1]; diff --git a/sys/shm/shmpipe.h b/sys/shm/shmpipe.h index c4475b8534..9cf0d6c61c 100644 --- a/sys/shm/shmpipe.h +++ b/sys/shm/shmpipe.h @@ -23,35 +23,39 @@ */ /* - * None of this code is thread safe, if you want to use it in a multi-threaded - * context, please protect it with a mutex. + * None of this code is thread safe, if you want to use it in a + * multi-threaded context, please protect it with a mutex. * - * First, create a writer with sp_writer_create() - * And selectes() on the socket from sp_get_fd() - * If the socket is closed or there are errors from any function, the app - * should call sp_close() and assume the writer is dead - * The server calls sp_writer_accept_client() when there is something to read - * from the server fd - * It then needs to select() on the socket from sp_writer_get_client_fd() - * If it gets an error on that socket, it call sp_writer_close_client(). - * If there is something to read, it calls sp_writer_recv(). + * First, create a writer with sp_writer_create(), then select() on + * the socket returned by sp_get_fd(). If the socket is closed or any + * function returns an error, the app should call sp_close() and + * assume the other side is dead. The writer calls + * sp_writer_accept_client() when there is something to read from the + * main server fd. This returns a new ShmClient (representing a client + * connection), the writer needs to do a select() on the socket + * returned by sp_writer_get_client_fd(). If it gets an error on that + * socket, it calls sp_writer_close_client(). If there is something to + * read, it calls sp_writer_recv(). * - * The writer allocates buffers with sp_writer_alloc_block(), - * writes something in the buffer (retrieved with sp_writer_block_get_buf(), - * then calls sp_writer_send_buf() to send the buffer or a subsection to - * the other side. When it is done with the block, it calls - * sp_writer_free_block(). - * If alloc fails, then the server must wait for events from the clients before - * trying again. + * The writer allocates a block containing a free buffer with + * sp_writer_alloc_block(), then writes something in the buffer + * (retrieved with sp_writer_block_get_buf(), then calls + * sp_writer_send_buf() to send the buffer or a subsection to the + * other side. When it is done with the block, it calls + * sp_writer_free_block(). If alloc fails, then the server must wait + * for events on the client fd (the ones where sp_writer_recv() is + * called), and then try to re-alloc. * - * - * The clients connect with sp_client_open() - * And select() on the fd from sp_get_fd() until there is something to read. - * Then they must read using sp_client_recv() which will return > 0 if there - * is a valid buffer (which is read only). It will return 0 if it is an internal - * message and <0 if there was an error. If there was an error, one must close - * it with sp_close(). If was valid buffer was received, the client must release - * it with sp_client_recv_finish() when it is done reading from it. + * The reader (client) connect to the writer with sp_client_open() And + * select()s on the fd from sp_get_fd() until there is something to + * read. Then they must read using sp_client_recv() which will return + * the size of the buffer (positive) if there is a valid buffer (which + * is read only). It will return 0 if it is an internal message and a + * negative number if there was an error. If there was an error, the + * application must close the pipe with sp_close() and assume that all + * buffers are no longer valid. If was valid buffer was received, the + * client must release it with sp_client_recv_finish() when it is done + * reading from it. */ diff --git a/sys/vdpau/basevideodecoder/gstbasevideodecoder.c b/sys/vdpau/basevideodecoder/gstbasevideodecoder.c index 7bbfb4ea57..d2a1b145de 100644 --- a/sys/vdpau/basevideodecoder/gstbasevideodecoder.c +++ b/sys/vdpau/basevideodecoder/gstbasevideodecoder.c @@ -266,12 +266,9 @@ static gboolean gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) { GstBaseVideoDecoder *base_video_decoder; - GstBaseVideoDecoderClass *base_video_decoder_class; gboolean res = FALSE; base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); - base_video_decoder_class = - GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); switch (GST_EVENT_TYPE (event)) { case GST_EVENT_EOS: @@ -634,7 +631,6 @@ static GstFlowReturn gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) { GstBaseVideoDecoder *base_video_decoder; - GstBaseVideoDecoderClass *base_video_decoder_class; GstFlowReturn ret; GST_DEBUG ("chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT, @@ -651,8 +647,6 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) #endif base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); - base_video_decoder_class = - GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); GST_DEBUG_OBJECT (base_video_decoder, "chain"); @@ -968,16 +962,11 @@ void gst_base_video_decoder_skip_frame (GstBaseVideoDecoder * base_video_decoder, GstVideoFrame * frame) { - GstBaseVideoDecoderClass *base_video_decoder_class; - GstClockTime presentation_timestamp; GstClockTime presentation_duration; GST_DEBUG ("skip frame"); - base_video_decoder_class = - GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); - gst_base_video_decoder_calculate_timestamps (base_video_decoder, frame, &presentation_timestamp, &presentation_duration); @@ -1112,11 +1101,9 @@ static void gst_base_video_decoder_finalize (GObject * object) { GstBaseVideoDecoder *base_video_decoder; - GstBaseVideoDecoderClass *base_video_decoder_class; g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (object)); base_video_decoder = GST_BASE_VIDEO_DECODER (object); - base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (object); g_object_unref (base_video_decoder->input_adapter); diff --git a/sys/vdpau/gstvdpsink.c b/sys/vdpau/gstvdpsink.c index 31a3d5926d..c3d097fdd4 100644 --- a/sys/vdpau/gstvdpsink.c +++ b/sys/vdpau/gstvdpsink.c @@ -1391,11 +1391,9 @@ static void gst_vdp_sink_class_init (VdpSinkClass * klass) { GObjectClass *gobject_class; - GstElementClass *gstelement_class; GstBaseSinkClass *gstbasesink_class; gobject_class = (GObjectClass *) klass; - gstelement_class = (GstElementClass *) klass; gstbasesink_class = (GstBaseSinkClass *) klass; parent_class = g_type_class_peek_parent (klass); diff --git a/sys/vdpau/h264/gsth264dpb.c b/sys/vdpau/h264/gsth264dpb.c index 71a04e9635..4fadc171c2 100644 --- a/sys/vdpau/h264/gsth264dpb.c +++ b/sys/vdpau/h264/gsth264dpb.c @@ -132,13 +132,10 @@ gst_h264_dpb_bump (GstH264DPB * dpb, guint poc, GstFlowReturn * ret) GstFlowReturn gst_h264_dpb_add (GstH264DPB * dpb, GstH264Frame * h264_frame) { - GstH264Frame **frames; GstFlowReturn ret; GST_DEBUG ("add frame with poc: %d", h264_frame->poc); - frames = dpb->frames; - if (h264_frame->is_reference && h264_frame->is_long_term && (h264_frame->frame_idx > dpb->max_longterm_frame_idx)) h264_frame->is_reference = FALSE; diff --git a/sys/vdpau/h264/gsth264parser.c b/sys/vdpau/h264/gsth264parser.c index b154a54cc7..a2a3cf0b82 100644 --- a/sys/vdpau/h264/gsth264parser.c +++ b/sys/vdpau/h264/gsth264parser.c @@ -768,7 +768,7 @@ gst_h264_slice_parse_ref_pic_list_reordering (GstH264Slice * slice, do { READ_UE_ALLOWED (reader, reordering_of_pic_nums_idc, 0, 3); if (reordering_of_pic_nums_idc == 0 || reordering_of_pic_nums_idc == 1) { - guint32 abs_diff_pic_num_minus1; + guint32 abs_diff_pic_num_minus1 G_GNUC_UNUSED; READ_UE_ALLOWED (reader, abs_diff_pic_num_minus1, 0, slice->MaxPicNum - 1); diff --git a/sys/vdpau/h264/gstvdph264dec.c b/sys/vdpau/h264/gstvdph264dec.c index 8d7e3fa4ec..6c181aff01 100644 --- a/sys/vdpau/h264/gstvdph264dec.c +++ b/sys/vdpau/h264/gstvdph264dec.c @@ -471,8 +471,8 @@ gst_vdp_h264_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, GstH264Frame *h264_frame; GstH264Slice *slice; - GstH264Picture *pic; - GstH264Sequence *seq; + GstH264Picture *pic G_GNUC_UNUSED; + GstH264Sequence *seq G_GNUC_UNUSED; GstFlowReturn ret; GstVdpVideoBuffer *outbuf; diff --git a/tests/check/Makefile.am b/tests/check/Makefile.am index 8b76b2a2ae..430d5ac6a6 100644 --- a/tests/check/Makefile.am +++ b/tests/check/Makefile.am @@ -138,7 +138,8 @@ endif VALGRIND_TO_FIX = \ elements/mpeg2enc \ elements/mplex \ - elements/zbar + elements/zbar \ + pipeline/colorspace # valgrind testing VALGRIND_TESTS_DISABLE = \ @@ -183,6 +184,7 @@ check_PROGRAMS = \ elements/mxfmux \ elements/id3mux \ pipelines/mxf \ + pipelines/colorspace \ $(check_mimic) \ elements/rtpmux \ $(check_schro) \ diff --git a/tests/check/elements/.gitignore b/tests/check/elements/.gitignore index 4554222b92..6df3baedad 100644 --- a/tests/check/elements/.gitignore +++ b/tests/check/elements/.gitignore @@ -12,6 +12,8 @@ faac faad gdpdepay gdppay +h263parse +h264parse id3mux imagecapturebin interleave @@ -21,6 +23,8 @@ kate legacyresample logoinsert mpeg2enc +mpegvideoparse +mpeg4videoparse mplex mxfdemux mxfmux diff --git a/tests/check/elements/camerabin2.c b/tests/check/elements/camerabin2.c index 6f58a1e25a..5277b619a6 100644 --- a/tests/check/elements/camerabin2.c +++ b/tests/check/elements/camerabin2.c @@ -164,6 +164,7 @@ gst_test_camera_src_init (GstTestCameraSrc * self, static GstElement *camera; static guint bus_source; static GMainLoop *main_loop; +static gint capture_count = 0; guint32 test_id = 0; static GstBuffer *preview_buffer; @@ -257,11 +258,8 @@ capture_bus_cb (GstBus * bus, GstMessage * message, gpointer data) break; default: st = gst_message_get_structure (message); - if (st && gst_structure_has_name (st, "image-captured")) { - gboolean ready = FALSE; + if (st && gst_structure_has_name (st, "image-done")) { GST_INFO ("image captured"); - g_object_get (camera, "ready-for-capture", &ready, NULL); - fail_if (!ready, "not ready for capture"); } else if (st && gst_structure_has_name (st, GST_BASE_CAMERA_SRC_PREVIEW_MESSAGE_NAME)) { GstBuffer *buf; @@ -359,6 +357,7 @@ setup_wrappercamerabinsrc_videotestsrc (void) gst_object_unref (bus); tags_found = NULL; + capture_count = 0; GST_INFO ("init finished"); } @@ -744,14 +743,6 @@ GST_START_TEST (test_image_video_cycle) if (!camera) return; - /* set filepaths for image and videos */ - g_object_set (camera, "mode", 1, NULL); - g_object_set (camera, "location", make_test_file_name (IMAGE_FILENAME, -1), - NULL); - g_object_set (camera, "mode", 2, NULL); - g_object_set (camera, "location", make_test_file_name (VIDEO_FILENAME, -1), - NULL); - if (gst_element_set_state (GST_ELEMENT (camera), GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) { GST_WARNING ("setting camerabin to PLAYING failed"); @@ -767,6 +758,8 @@ GST_START_TEST (test_image_video_cycle) /* take a picture */ g_object_set (camera, "mode", 1, NULL); + g_object_set (camera, "location", make_test_file_name (IMAGE_FILENAME, i), + NULL); g_signal_emit_by_name (camera, "start-capture", NULL); g_timeout_add_seconds (3, (GSourceFunc) g_main_loop_quit, main_loop); g_main_loop_run (main_loop); @@ -775,6 +768,8 @@ GST_START_TEST (test_image_video_cycle) /* now go to video */ g_object_set (camera, "mode", 2, NULL); + g_object_set (camera, "location", make_test_file_name (VIDEO_FILENAME, i), + NULL); g_signal_emit_by_name (camera, "start-capture", NULL); g_timeout_add_seconds (VIDEO_DURATION, (GSourceFunc) g_main_loop_quit, main_loop); @@ -1233,6 +1228,103 @@ GST_START_TEST (test_video_custom_filter) GST_END_TEST; +#define LOCATION_SWITCHING_FILENAMES_COUNT 5 + +static gboolean +image_location_switch_do_capture (gpointer data) +{ + gchar **filenames = data; + if (capture_count >= LOCATION_SWITCHING_FILENAMES_COUNT) { + g_main_loop_quit (main_loop); + } + + g_object_set (camera, "location", filenames[capture_count], NULL); + g_signal_emit_by_name (camera, "start-capture", NULL); + capture_count++; + return FALSE; +} + +static void +image_location_switch_readyforcapture (GObject * obj, GParamSpec * pspec, + gpointer user_data) +{ + gboolean ready; + + g_object_get (obj, "ready-for-capture", &ready, NULL); + if (ready) { + g_idle_add (image_location_switch_do_capture, user_data); + } +}; + +/* + * Tests that setting the location and then doing an image + * capture will set this capture resulting filename to the + * correct location. + * + * There was a bug in which setting the location, issuing a capture + * and then setting a new location would cause this capture to have + * the location set after this capture. This test should prevent it + * from happening again. + */ +GST_START_TEST (test_image_location_switching) +{ + gchar *filenames[LOCATION_SWITCHING_FILENAMES_COUNT + 1]; + gint i; + glong notify_id; + GstCaps *caps; + GstElement *src; + + if (!camera) + return; + + g_object_get (camera, "camera-source", &src, NULL); + + for (i = 0; i < LOCATION_SWITCHING_FILENAMES_COUNT; i++) { + filenames[i] = + g_strdup (make_test_file_name ("image-switching-filename-test", i)); + } + filenames[LOCATION_SWITCHING_FILENAMES_COUNT] = NULL; + + /* set still image mode */ + g_object_set (camera, "mode", 1, NULL); + caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT, + 800, "height", G_TYPE_INT, 600, NULL); + g_object_set (camera, "image-capture-caps", caps, NULL); + gst_caps_unref (caps); + + if (gst_element_set_state (GST_ELEMENT (camera), GST_STATE_PLAYING) == + GST_STATE_CHANGE_FAILURE) { + GST_WARNING ("setting camerabin to PLAYING failed"); + gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL); + gst_object_unref (camera); + camera = NULL; + } + fail_unless (camera != NULL); + GST_INFO ("starting capture"); + + notify_id = g_signal_connect (G_OBJECT (src), + "notify::ready-for-capture", + G_CALLBACK (image_location_switch_readyforcapture), filenames); + + g_idle_add (image_location_switch_do_capture, filenames); + g_main_loop_run (main_loop); + + g_usleep (G_USEC_PER_SEC * 3); + gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL); + + for (i = 0; i < LOCATION_SWITCHING_FILENAMES_COUNT; i++) { + GST_INFO ("Checking for file: %s", filenames[i]); + fail_unless (g_file_test (filenames[i], G_FILE_TEST_IS_REGULAR)); + } + + for (i = 0; i < LOCATION_SWITCHING_FILENAMES_COUNT; i++) { + g_free (filenames[i]); + } + g_signal_handler_disconnect (src, notify_id); +} + +GST_END_TEST; + typedef struct _TestCaseDef { @@ -1291,6 +1383,8 @@ camerabin_suite (void) tcase_add_test (tc_basic, test_image_custom_filter); tcase_add_test (tc_basic, test_video_custom_filter); + + tcase_add_test (tc_basic, test_image_location_switching); } end: diff --git a/tests/check/elements/faad.c b/tests/check/elements/faad.c index c911048e92..8478b51117 100644 --- a/tests/check/elements/faad.c +++ b/tests/check/elements/faad.c @@ -111,12 +111,10 @@ do_test (GstBuffer * inbuffer) /* clean up buffers */ for (i = 0; i < num_buffers; ++i) { gint size; - guint8 *data; outbuffer = GST_BUFFER (buffers->data); fail_if (outbuffer == NULL); - data = GST_BUFFER_DATA (outbuffer); size = GST_BUFFER_SIZE (outbuffer); /* 2 16-bit channels */ diff --git a/tests/check/pipelines/.gitignore b/tests/check/pipelines/.gitignore index ec9623ff33..909ffabee7 100644 --- a/tests/check/pipelines/.gitignore +++ b/tests/check/pipelines/.gitignore @@ -2,3 +2,4 @@ mxf mimic tagschecking +colorspace diff --git a/tests/check/pipelines/colorspace.c b/tests/check/pipelines/colorspace.c new file mode 100644 index 0000000000..3704e86e38 --- /dev/null +++ b/tests/check/pipelines/colorspace.c @@ -0,0 +1,310 @@ +/* GStreamer + * + * unit comparison test for colorspace + * + * Copyright 2011 Collabora Ltd. + * @author: Mark Nauwelaerts + * Copyright 2011 Nokia Corp. + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#include + +static GMainLoop *loop; + +static void +message_cb (GstBus * bus, GstMessage * message, gpointer user_data) +{ + switch (GST_MESSAGE_TYPE (message)) { + case GST_MESSAGE_ERROR: + case GST_MESSAGE_WARNING: + g_assert_not_reached (); + break; + case GST_MESSAGE_EOS: + g_main_loop_quit (loop); + break; + case GST_MESSAGE_ELEMENT: + { + const GstStructure *s = gst_message_get_structure (message); + const gchar *name = gst_structure_get_name (s); + + fail_unless (strcmp (name, "delta") == 0); + break; + } + default: + break; + } +} + +/* compare output with ffmpegcolorspace */ +static void +colorspace_compare (gint width, gint height, gboolean comp) +{ + GstBus *bus; + GstElement *pipeline, *src, *filter1, *filter2, *csp, *fcsp, *fakesink; + GstElement *queue1, *queue2, *tee, *compare; + GstCaps *caps, *tcaps, *rcaps, *fcaps; + const GstCaps *ccaps; + GstPad *pad; + + gint i, j; + + /* create elements */ + pipeline = gst_pipeline_new ("pipeline"); + src = gst_element_factory_make ("videotestsrc", "videotestsrc"); + fail_unless (src != NULL); + filter1 = gst_element_factory_make ("capsfilter", "capsfilter1"); + fail_unless (filter1 != NULL); + csp = gst_element_factory_make ("colorspace", "colorspace"); + fail_unless (csp != NULL); + filter2 = gst_element_factory_make ("capsfilter", "capsfilter2"); + fail_unless (filter2 != NULL); + + if (comp) { + fcsp = gst_element_factory_make ("ffmpegcolorspace", "ffmpegcolorspace"); + fail_unless (fcsp != NULL); + tee = gst_element_factory_make ("tee", "tee"); + fail_unless (tee != NULL); + queue1 = gst_element_factory_make ("queue", "queue1"); + fail_unless (queue1 != NULL); + queue2 = gst_element_factory_make ("queue", "queue2"); + fail_unless (queue2 != NULL); + compare = gst_element_factory_make ("compare", "compare"); + fail_unless (compare != NULL); + } else { + fcsp = tee = queue1 = queue2 = compare = NULL; + } + + fakesink = gst_element_factory_make ("fakesink", "fakesink"); + fail_unless (fakesink != NULL); + + /* add and link */ + gst_bin_add_many (GST_BIN (pipeline), src, filter1, filter2, csp, fakesink, + tee, queue1, queue2, fcsp, compare, NULL); + + fail_unless (gst_element_link (src, filter1)); + + if (comp) { + fail_unless (gst_element_link (filter1, tee)); + + fail_unless (gst_element_link (tee, queue1)); + fail_unless (gst_element_link (queue1, fcsp)); + fail_unless (gst_element_link_pads (fcsp, NULL, compare, "sink")); + + fail_unless (gst_element_link (tee, queue2)); + fail_unless (gst_element_link (queue2, csp)); + fail_unless (gst_element_link_pads (csp, NULL, compare, "check")); + + fail_unless (gst_element_link (compare, filter2)); + } else { + fail_unless (gst_element_link (filter1, csp)); + fail_unless (gst_element_link (csp, filter2)); + } + fail_unless (gst_element_link (filter2, fakesink)); + + /* obtain possible caps combinations */ + if (comp) { + pad = gst_element_get_static_pad (fcsp, "sink"); + fail_unless (pad != NULL); + ccaps = gst_pad_get_pad_template_caps (pad); + fail_unless (ccaps != NULL); + fcaps = gst_caps_copy (ccaps); + gst_object_unref (pad); + } else { + fcaps = gst_caps_new_any (); + } + + pad = gst_element_get_static_pad (csp, "sink"); + fail_unless (pad != NULL); + ccaps = gst_pad_get_pad_template_caps (pad); + fail_unless (ccaps != NULL); + gst_object_unref (pad); + + /* handle videotestsrc limitations */ + pad = gst_element_get_static_pad (src, "src"); + fail_unless (pad != NULL); + caps = (GstCaps *) gst_pad_get_pad_template_caps (pad); + fail_unless (caps != NULL); + gst_object_unref (pad); + + rcaps = gst_caps_new_simple ("video/x-raw-yuv", + "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, + "framerate", GST_TYPE_FRACTION, 25, 1, + "color-matrix", G_TYPE_STRING, "sdtv", + "chroma-site", G_TYPE_STRING, "mpeg2", NULL); + gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", + "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, + "framerate", GST_TYPE_FRACTION, 25, 1, + "depth", G_TYPE_INT, 32, NULL)); + + /* FIXME also allow x-raw-gray if/when colorspace actually handles those */ + + /* limit to supported compare types */ + if (comp) { + gst_caps_append (rcaps, gst_caps_new_simple ("video/x-raw-rgb", + "width", G_TYPE_INT, width, "height", G_TYPE_INT, height, + "framerate", GST_TYPE_FRACTION, 25, 1, + "depth", G_TYPE_INT, 24, NULL)); + } + + tcaps = gst_caps_intersect (fcaps, ccaps); + gst_caps_unref (fcaps); + caps = gst_caps_intersect (tcaps, caps); + gst_caps_unref (tcaps); + tcaps = caps; + caps = gst_caps_intersect (tcaps, rcaps); + gst_caps_unref (tcaps); + gst_caps_unref (rcaps); + + /* normalize to finally have a list of acceptable fixed formats */ + gst_caps_do_simplify (caps); + tcaps = caps; + caps = gst_caps_normalize (tcaps); + gst_caps_unref (tcaps); + + /* set up for running stuff */ + loop = g_main_loop_new (NULL, FALSE); + bus = gst_element_get_bus (pipeline); + gst_bus_add_signal_watch (bus); + g_signal_connect (bus, "message::eos", (GCallback) message_cb, NULL); + gst_object_unref (bus); + + g_object_set (src, "num-buffers", 5, NULL); + if (comp) { + /* set lower bound for ssim comparison, and allow slightly different caps */ + g_object_set (compare, "method", 2, NULL); + g_object_set (compare, "meta", 3, NULL); + g_object_set (compare, "threshold", 0.90, NULL); + g_object_set (compare, "upper", FALSE, NULL); + } + + GST_INFO ("possible caps to check %d", gst_caps_get_size (caps)); + + /* loop over all input and output combinations */ + for (i = 0; i < gst_caps_get_size (caps); i++) { + for (j = 0; j < gst_caps_get_size (caps); j++) { + GstCaps *in_caps, *out_caps; + GstStructure *s; + guint32 fourcc; + + in_caps = gst_caps_copy_nth (caps, i); + out_caps = gst_caps_copy_nth (caps, j); + + /* FIXME remove if videotestsrc and video format handle these properly */ + s = gst_caps_get_structure (in_caps, 0); + if (gst_structure_get_fourcc (s, "format", &fourcc)) { + if (fourcc == GST_MAKE_FOURCC ('Y', 'U', 'V', '9') || + fourcc == GST_MAKE_FOURCC ('Y', 'V', 'U', '9') || + fourcc == GST_MAKE_FOURCC ('v', '2', '1', '6')) { + gst_caps_unref (in_caps); + gst_caps_unref (out_caps); + continue; + } + } + + GST_INFO ("checking conversion from %" GST_PTR_FORMAT " (%d)" + " to %" GST_PTR_FORMAT " (%d)", in_caps, i, out_caps, j); + + g_object_set (filter1, "caps", in_caps, NULL); + g_object_set (filter2, "caps", out_caps, NULL); + + fail_unless (gst_element_set_state (pipeline, GST_STATE_PLAYING) + != GST_STATE_CHANGE_FAILURE); + + g_main_loop_run (loop); + + fail_unless (gst_element_set_state (pipeline, GST_STATE_NULL) + == GST_STATE_CHANGE_SUCCESS); + + gst_caps_unref (in_caps); + gst_caps_unref (out_caps); + } + } + + gst_caps_unref (caps); + gst_object_unref (pipeline); + g_main_loop_unref (loop); +} + +#define WIDTH 176 +#define HEIGHT 120 + +GST_START_TEST (test_colorspace_compare) +{ + colorspace_compare (WIDTH, HEIGHT, TRUE); +} + +GST_END_TEST; + +/* enable if you like stuff (ffmpegcolorspace) crashing */ +#ifdef TEST_ODD + +GST_START_TEST (test_colorspace_compare_odd_height) +{ + colorspace_compare (WIDTH, HEIGHT + 1, TRUE); +} + +GST_END_TEST; + +GST_START_TEST (test_colorspace_compare_odd_width) +{ + colorspace_compare (WIDTH + 1, HEIGHT, TRUE); +} + +GST_END_TEST; + +GST_START_TEST (test_colorspace_compare_odd) +{ + colorspace_compare (WIDTH + 1, HEIGHT + 1, TRUE); +} + +GST_END_TEST; + +#endif + +/* useful for crash and valgrind check */ + +GST_START_TEST (test_colorspace) +{ + colorspace_compare (WIDTH + 1, HEIGHT + 1, FALSE); +} + +GST_END_TEST; + +static Suite * +colorspace_suite (void) +{ + Suite *s = suite_create ("colorspace"); + TCase *tc_chain; + + tc_chain = tcase_create ("colorspace_compare"); + tcase_add_test (tc_chain, test_colorspace_compare); +#ifdef TEST_ODD + tcase_add_test (tc_chain, test_colorspace_compare_odd_height); + tcase_add_test (tc_chain, test_colorspace_compare_odd_width); + tcase_add_test (tc_chain, test_colorspace_compare_odd); +#endif + tcase_add_test (tc_chain, test_colorspace); + suite_add_tcase (s, tc_chain); + + /* test may take some time */ + tcase_set_timeout (tc_chain, 600); + + return s; +} + +GST_CHECK_MAIN (colorspace) diff --git a/tests/examples/Makefile.am b/tests/examples/Makefile.am index 26833fb619..84c178797b 100644 --- a/tests/examples/Makefile.am +++ b/tests/examples/Makefile.am @@ -16,7 +16,9 @@ else CAMERABIN2= endif -SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) -DIST_SUBDIRS= camerabin camerabin2 directfb mxf scaletempo +OPENCV_EXAMPLES=opencv + +SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) $(OPENCV_EXAMPLES) +DIST_SUBDIRS= camerabin camerabin2 directfb mxf scaletempo opencv include $(top_srcdir)/common/parallel-subdirs.mak diff --git a/tests/examples/camerabin/gst-camera-perf.c b/tests/examples/camerabin/gst-camera-perf.c index bb37ceecd0..c5554a0e3b 100644 --- a/tests/examples/camerabin/gst-camera-perf.c +++ b/tests/examples/camerabin/gst-camera-perf.c @@ -44,7 +44,7 @@ * Includes */ #ifdef HAVE_CONFIG_H -# include "config.h" +#include "config.h" #endif /* save the snapshot images @@ -300,8 +300,6 @@ img_capture_done (GstElement * camera, GString * fname, gpointer user_data) GstClockTime max = 0; GstClockTime min = -1; GstClockTime total = 0; - GstClockTime first_shot = 0; - GstClockTime snd_shot = 0; num_pics_cont = 0; signal_cont = FALSE; @@ -309,13 +307,11 @@ img_capture_done (GstElement * camera, GString * fname, gpointer user_data) DIFF_TIME (t_final[0], t_initial, diff); max < diff ? max = diff : max; min > diff ? min = diff : min; - first_shot = diff; total += diff; DIFF_TIME (t_final[1], t_final[0], diff); max < diff ? max = diff : max; min > diff ? min = diff : min; - snd_shot = diff; total += diff; for (i = 2; i < CONT_SHOTS; ++i) { diff --git a/tests/examples/camerabin2/gst-camerabin2-test.c b/tests/examples/camerabin2/gst-camerabin2-test.c index 55d1a04d1f..f112dbcb34 100644 --- a/tests/examples/camerabin2/gst-camerabin2-test.c +++ b/tests/examples/camerabin2/gst-camerabin2-test.c @@ -74,6 +74,24 @@ --image-capture-caps Image capture caps (e.g. video/x-raw-rgb,width=640,height=480) --viewfinder-caps Viewfinder caps (e.g. video/x-raw-rgb,width=640,height=480) --video-capture-caps Video capture caps (e.g. video/x-raw-rgb,width=640,height=480) + --performance-measure Collect timing information about the + captures and provides performance statistics at the end + --performance-targets A list of doubles that are the performance target + times for each of the measured timestamps. The order is + startup time, change mode time, shot to save, shot to snapshot, + shot to shot, preview to precapture, shot to buffer. + e.g. 3.5,1.0,5.0,2.5,5.0,1.5,1.0 + * Startup time -> time it takes for camerabin2 to reach playing + * Change mode time -> time it takes for camerabin2 to change to the selected + mode in playing + * Shot to save -> time it takes from start-capture to having the image saved + to disk + * Shot to snapshot -> time it takes from start-capture to getting a snapshot + * Shot to shot -> time from one start-capture to the next one + * Preview to precapture -> time it takes from getting the snapshot to the + next buffer that reaches the viewfinder + * Shot to buffer -> time it takes from start-capture to the moment a buffer + is pushed out of the camera source */ @@ -105,18 +123,78 @@ */ GST_DEBUG_CATEGORY_STATIC (camerabin_test); #define GST_CAT_DEFAULT camerabin_test -typedef struct _ResultType + +#define TIME_DIFF(a,b) ((((gint64)(a)) - ((gint64)(b))) / (gdouble) GST_SECOND) + +#define TIME_FORMAT "02d.%09u" +#define TIMEDIFF_FORMAT "0.6lf" + +#define TIME_ARGS(t) \ + (GST_CLOCK_TIME_IS_VALID (t) && (t) < 99 * GST_SECOND) ? \ + (gint) ((((GstClockTime)(t)) / GST_SECOND) % 60) : 99, \ + (GST_CLOCK_TIME_IS_VALID (t) && ((t) < 99 * GST_SECOND)) ? \ + (guint) (((GstClockTime)(t)) % GST_SECOND) : 999999999 + +#define TIMEDIFF_ARGS(t) (t) + +typedef struct _CaptureTiming { - GstClockTime avg; - GstClockTime min; - GstClockTime max; - guint32 times; -} ResultType; + GstClockTime start_capture; + GstClockTime got_preview; + GstClockTime capture_done; + GstClockTime precapture; + GstClockTime camera_capture; +} CaptureTiming; + +typedef struct _CaptureTimingStats +{ + GstClockTime shot_to_shot; + GstClockTime shot_to_save; + GstClockTime shot_to_snapshot; + GstClockTime preview_to_precapture; + GstClockTime shot_to_buffer; +} CaptureTimingStats; + +static void +capture_timing_stats_add (CaptureTimingStats * a, CaptureTimingStats * b) +{ + a->shot_to_shot += b->shot_to_shot; + a->shot_to_snapshot += b->shot_to_snapshot; + a->shot_to_save += b->shot_to_save; + a->preview_to_precapture += b->preview_to_precapture; + a->shot_to_buffer += b->shot_to_buffer; +} + +static void +capture_timing_stats_div (CaptureTimingStats * stats, gint div) +{ + stats->shot_to_shot /= div; + stats->shot_to_snapshot /= div; + stats->shot_to_save /= div; + stats->preview_to_precapture /= div; + stats->shot_to_buffer /= div; +} + +#define PRINT_STATS(d,s) g_print ("%02d | %" TIME_FORMAT " | %" \ + TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT \ + " | %" TIME_FORMAT "\n", d, \ + TIME_ARGS ((s)->shot_to_save), TIME_ARGS ((s)->shot_to_snapshot), \ + TIME_ARGS ((s)->shot_to_shot), \ + TIME_ARGS ((s)->preview_to_precapture), \ + TIME_ARGS ((s)->shot_to_buffer)) + +#define SHOT_TO_SAVE(t) ((t)->capture_done - (t)->start_capture) +#define SHOT_TO_SNAPSHOT(t) ((t)->got_preview - (t)->start_capture) +#define PREVIEW_TO_PRECAPTURE(t) ((t)->precapture - (t)->got_preview) +#define SHOT_TO_BUFFER(t) ((t)->camera_capture - (t)->start_capture) /* * Global vars */ static GstElement *camerabin = NULL; +static GstElement *viewfinder_sink = NULL; +static gulong camera_probe_id = 0; +static gulong viewfinder_probe_id = 0; static GMainLoop *loop = NULL; /* commandline options */ @@ -137,6 +215,10 @@ static gchar *gep_filename = NULL; static gchar *image_capture_caps_str = NULL; static gchar *viewfinder_caps_str = NULL; static gchar *video_capture_caps_str = NULL; +static gchar *audio_capture_caps_str = NULL; +static gboolean performance_measure = FALSE; +static gchar *performance_targets_str = NULL; +static gchar *camerabin2_flags = NULL; #define MODE_VIDEO 2 @@ -181,7 +263,21 @@ static gchar *preview_caps_name = NULL; static Display *display = NULL; static Window window = 0; -GTimer *timer = NULL; +/* timing data */ +static GstClockTime initial_time = 0; +static GstClockTime startup_time = 0; +static GstClockTime change_mode_before = 0; +static GstClockTime change_mode_after = 0; +static GList *capture_times = NULL; + +static GstClockTime target_startup; +static GstClockTime target_change_mode; +static GstClockTime target_shot_to_shot; +static GstClockTime target_shot_to_save; +static GstClockTime target_shot_to_snapshot; +static GstClockTime target_preview_to_precapture; +static GstClockTime target_shot_to_buffer; + /* * Prototypes @@ -215,6 +311,34 @@ create_host_window (void) } } +static gboolean +camera_src_get_timestamp_probe (GstPad * pad, GstMiniObject * obj, + gpointer udata) +{ + CaptureTiming *timing; + + timing = (CaptureTiming *) g_list_first (capture_times)->data; + timing->camera_capture = gst_util_get_timestamp (); + + gst_pad_remove_data_probe (pad, camera_probe_id); + + return TRUE; +} + +static gboolean +viewfinder_get_timestamp_probe (GstPad * pad, GstMiniObject * obj, + gpointer udata) +{ + CaptureTiming *timing; + + timing = (CaptureTiming *) g_list_first (capture_times)->data; + timing->precapture = gst_util_get_timestamp (); + + gst_pad_remove_data_probe (pad, viewfinder_probe_id); + + return TRUE; +} + static GstBusSyncReply sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data) { @@ -241,7 +365,23 @@ sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data) return GST_BUS_DROP; } } else if (gst_structure_has_name (st, "preview-image")) { + CaptureTiming *timing; + GST_DEBUG ("preview-image"); + + timing = (CaptureTiming *) g_list_first (capture_times)->data; + timing->got_preview = gst_util_get_timestamp (); + + { + /* set up probe to check when the viewfinder gets data */ + GstPad *pad = gst_element_get_static_pad (viewfinder_sink, "sink"); + + viewfinder_probe_id = gst_pad_add_buffer_probe (pad, + (GCallback) viewfinder_get_timestamp_probe, NULL); + + gst_object_unref (pad); + } + /* extract preview-image from msg */ image = gst_structure_get_value (st, "buffer"); if (image) { @@ -250,8 +390,6 @@ sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data) size = GST_BUFFER_SIZE (buf); preview_filename = g_strdup_printf ("test_vga.rgb"); caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf)); - g_print ("writing buffer to %s, elapsed: %.2fs, buffer caps: %s\n", - preview_filename, g_timer_elapsed (timer, NULL), caps_string); g_free (caps_string); f = g_fopen (preview_filename, "w"); if (f) { @@ -269,6 +407,16 @@ sync_bus_callback (GstBus * bus, GstMessage * message, gpointer data) } break; } + case GST_MESSAGE_STATE_CHANGED: + if (GST_MESSAGE_SRC (message) == (GstObject *) camerabin) { + GstState newstate; + + gst_message_parse_state_changed (message, NULL, &newstate, NULL); + if (newstate == GST_STATE_PLAYING) { + startup_time = gst_util_get_timestamp (); + } + } + break; default: /* unhandled message */ break; @@ -316,11 +464,15 @@ bus_callback (GstBus * bus, GstMessage * message, gpointer data) const GstStructure *structure = gst_message_get_structure (message); if (gst_structure_has_name (structure, "image-done")) { + CaptureTiming *timing; #ifndef GST_DISABLE_GST_DEBUG const gchar *fname = gst_structure_get_string (structure, "filename"); GST_DEBUG ("image done: %s", fname); #endif + timing = (CaptureTiming *) g_list_first (capture_times)->data; + timing->capture_done = gst_util_get_timestamp (); + if (capture_count < capture_total) { g_idle_add ((GSourceFunc) run_pipeline, NULL); } else { @@ -499,6 +651,13 @@ set_camerabin2_caps_from_string (void) g_object_set (camerabin, "video-capture-caps", caps, NULL); gst_caps_unref (caps); } + + if (audio_capture_caps_str != NULL) { + caps = gst_caps_from_string (audio_capture_caps_str); + GST_DEBUG ("setting audio-capture-caps: %" GST_PTR_FORMAT, caps); + g_object_set (camerabin, "audio-capture-caps", caps, NULL); + gst_caps_unref (caps); + } } static gboolean @@ -508,6 +667,9 @@ setup_pipeline (void) GstBus *bus; GstElement *sink = NULL, *ipp = NULL; GstEncodingProfile *prof = NULL; + + initial_time = gst_util_get_timestamp (); + camerabin = gst_element_factory_make ("camerabin2", NULL); if (NULL == camerabin) { g_warning ("can't create camerabin element\n"); @@ -523,6 +685,8 @@ setup_pipeline (void) GST_INFO_OBJECT (camerabin, "camerabin2 created"); + gst_util_set_object_arg (G_OBJECT (camerabin), "flags", camerabin2_flags); + if (videosrc_name) { GstElement *wrapper; GstElement *videosrc; @@ -569,8 +733,20 @@ setup_pipeline (void) GST_INFO_OBJECT (camerabin, "elements created"); - if (sink) + if (sink) { g_object_set (sink, "sync", TRUE, NULL); + } else { + /* Get the inner viewfinder sink, this uses fixed names given + * by default in camerabin2 */ + sink = gst_bin_get_by_name (GST_BIN (camerabin), "vf-bin"); + g_assert (sink); + gst_object_unref (sink); + + sink = gst_bin_get_by_name (GST_BIN (sink), "vfbin-sink"); + g_assert (sink); + gst_object_unref (sink); + } + viewfinder_sink = sink; GST_INFO_OBJECT (camerabin, "elements configured"); @@ -614,6 +790,13 @@ setup_pipeline (void) set_camerabin2_caps_from_string (); + /* change to the wrong mode if timestamping if performance mode is on so + * we can change it back and measure the time after in playing */ + if (performance_measure) { + g_object_set (camerabin, "mode", + mode == MODE_VIDEO ? MODE_IMAGE : MODE_VIDEO, NULL); + } + if (GST_STATE_CHANGE_FAILURE == gst_element_set_state (camerabin, GST_STATE_READY)) { g_warning ("can't set camerabin to ready\n"); @@ -628,6 +811,14 @@ setup_pipeline (void) } GST_INFO_OBJECT (camerabin, "camera started"); + + /* do the mode change timestamping if performance mode is on */ + if (performance_measure) { + change_mode_before = gst_util_get_timestamp (); + g_object_set (camerabin, "mode", mode, NULL); + change_mode_after = gst_util_get_timestamp (); + } + return TRUE; error: cleanup_pipeline (); @@ -695,6 +886,7 @@ run_pipeline (gpointer user_data) gchar *filename_str = NULL; GstElement *video_source = NULL; const gchar *filename_suffix; + CaptureTiming *timing; g_object_set (camerabin, "mode", mode, NULL); @@ -745,13 +937,29 @@ run_pipeline (gpointer user_data) g_object_set (video_source, "colour-tone-mode", color_mode, NULL); } g_object_unref (video_source); + } else { + video_source = gst_bin_get_by_name (GST_BIN (camerabin), "camerasrc"); + gst_object_unref (video_source); } g_object_set (camerabin, "zoom", zoom / 100.0f, NULL); capture_count++; - g_timer_start (timer); - g_signal_emit_by_name (camerabin, "start-capture", 0); + timing = g_slice_new0 (CaptureTiming); + capture_times = g_list_prepend (capture_times, timing); + + /* set pad probe to check when buffer leaves the camera source */ + if (mode == MODE_IMAGE) { + GstPad *pad; + + pad = gst_element_get_static_pad (video_source, "imgsrc"); + camera_probe_id = gst_pad_add_buffer_probe (pad, + (GCallback) camera_src_get_timestamp_probe, NULL); + + gst_object_unref (pad); + } + timing->start_capture = gst_util_get_timestamp (); + g_signal_emit_by_name (camerabin, "start-capture", 0); if (mode == MODE_VIDEO) { g_timeout_add ((capture_time * 1000), (GSourceFunc) stop_capture, NULL); @@ -760,6 +968,183 @@ run_pipeline (gpointer user_data) return FALSE; } +static void +parse_target_values (void) +{ + gdouble startup = 0, change_mode = 0, shot_to_save = 0, shot_to_snapshot = 0; + gdouble shot_to_shot = 0, preview_to_precapture = 0, shot_to_buffer = 0; + + if (performance_targets_str == NULL) + return; + + /* + startup time, change mode time, shot to save, shot to snapshot, + shot to shot, preview to precapture, shot to buffer. + */ + sscanf (performance_targets_str, "%lf,%lf,%lf,%lf,%lf,%lf,%lf", + &startup, &change_mode, &shot_to_save, + &shot_to_snapshot, &shot_to_shot, &preview_to_precapture, + &shot_to_buffer); + + target_startup = (GstClockTime) (startup * GST_SECOND); + target_change_mode = (GstClockTime) (change_mode * GST_SECOND); + target_shot_to_save = (GstClockTime) (shot_to_save * GST_SECOND); + target_shot_to_snapshot = (GstClockTime) (shot_to_snapshot * GST_SECOND); + target_shot_to_shot = (GstClockTime) (shot_to_shot * GST_SECOND); + target_preview_to_precapture = + (GstClockTime) (preview_to_precapture * GST_SECOND); + target_shot_to_buffer = (GstClockTime) (shot_to_buffer * GST_SECOND); +} + +static void +print_performance_data (void) +{ + GList *iter; + gint i = 0; + GstClockTime last_start = 0; + CaptureTimingStats max; + CaptureTimingStats min; + CaptureTimingStats avg; + CaptureTimingStats avg_wo_first; + GstClockTime shot_to_shot; + + if (!performance_measure) + return; + + parse_target_values (); + + /* Initialize stats */ + min.shot_to_shot = -1; + min.shot_to_save = -1; + min.shot_to_snapshot = -1; + min.preview_to_precapture = -1; + min.shot_to_buffer = -1; + memset (&avg, 0, sizeof (CaptureTimingStats)); + memset (&avg_wo_first, 0, sizeof (CaptureTimingStats)); + memset (&max, 0, sizeof (CaptureTimingStats)); + + g_print ("-- Performance results --\n"); + g_print ("Startup time: %" TIME_FORMAT "; Target: %" TIME_FORMAT "\n", + TIME_ARGS (startup_time - initial_time), TIME_ARGS (target_startup)); + g_print ("Change mode time: %" TIME_FORMAT "; Target: %" TIME_FORMAT "\n", + TIME_ARGS (change_mode_after - change_mode_before), + TIME_ARGS (target_change_mode)); + + g_print + ("\n | Shot to save |Shot to snapshot| Shot to shot |" + "Preview to precap| Shot to buffer\n"); + capture_times = g_list_reverse (capture_times); + for (iter = capture_times; iter; iter = g_list_next (iter)) { + CaptureTiming *t = (CaptureTiming *) iter->data; + CaptureTimingStats stats; + + stats.shot_to_save = SHOT_TO_SAVE (t); + stats.shot_to_snapshot = SHOT_TO_SNAPSHOT (t); + stats.shot_to_shot = i == 0 ? 0 : t->start_capture - last_start; + stats.preview_to_precapture = PREVIEW_TO_PRECAPTURE (t); + stats.shot_to_buffer = SHOT_TO_BUFFER (t); + + PRINT_STATS (i, &stats); + + if (i != 0) { + capture_timing_stats_add (&avg_wo_first, &stats); + } + capture_timing_stats_add (&avg, &stats); + + if (stats.shot_to_save < min.shot_to_save) { + min.shot_to_save = stats.shot_to_save; + } + if (stats.shot_to_snapshot < min.shot_to_snapshot) { + min.shot_to_snapshot = stats.shot_to_snapshot; + } + if (stats.shot_to_shot < min.shot_to_shot && stats.shot_to_shot > 0) { + min.shot_to_shot = stats.shot_to_shot; + } + if (stats.preview_to_precapture < min.preview_to_precapture) { + min.preview_to_precapture = stats.preview_to_precapture; + } + if (stats.shot_to_buffer < min.shot_to_buffer) { + min.shot_to_buffer = stats.shot_to_buffer; + } + + + if (stats.shot_to_save > max.shot_to_save) { + max.shot_to_save = stats.shot_to_save; + } + if (stats.shot_to_snapshot > max.shot_to_snapshot) { + max.shot_to_snapshot = stats.shot_to_snapshot; + } + if (stats.shot_to_shot > max.shot_to_shot) { + max.shot_to_shot = stats.shot_to_shot; + } + if (stats.preview_to_precapture > max.preview_to_precapture) { + max.preview_to_precapture = stats.preview_to_precapture; + } + if (stats.shot_to_buffer > max.shot_to_buffer) { + max.shot_to_buffer = stats.shot_to_buffer; + } + + last_start = t->start_capture; + i++; + } + + if (i > 1) + shot_to_shot = avg.shot_to_shot / (i - 1); + else + shot_to_shot = GST_CLOCK_TIME_NONE; + capture_timing_stats_div (&avg, i); + avg.shot_to_shot = shot_to_shot; + if (i > 1) + capture_timing_stats_div (&avg_wo_first, i - 1); + else { + memset (&avg_wo_first, 0, sizeof (CaptureTimingStats)); + } + + g_print ("\n Stats | MIN | MAX |" + " AVG | AVG wo First | Target | Diff \n"); + g_print ("Shot to shot | %" TIME_FORMAT " | %" TIME_FORMAT + " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT + " | %" TIMEDIFF_FORMAT "\n", + TIME_ARGS (min.shot_to_shot), TIME_ARGS (max.shot_to_shot), + TIME_ARGS (avg.shot_to_shot), + TIME_ARGS (avg_wo_first.shot_to_shot), + TIME_ARGS (target_shot_to_shot), + TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_shot, target_shot_to_shot))); + g_print ("Shot to save | %" TIME_FORMAT " | %" TIME_FORMAT + " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT + " | %" TIMEDIFF_FORMAT "\n", + TIME_ARGS (min.shot_to_save), TIME_ARGS (max.shot_to_save), + TIME_ARGS (avg.shot_to_save), + TIME_ARGS (avg_wo_first.shot_to_save), + TIME_ARGS (target_shot_to_save), + TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_save, target_shot_to_save))); + g_print ("Shot to snapshot | %" TIME_FORMAT " | %" TIME_FORMAT + " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT + " | %" TIMEDIFF_FORMAT "\n", + TIME_ARGS (min.shot_to_snapshot), + TIME_ARGS (max.shot_to_snapshot), + TIME_ARGS (avg.shot_to_snapshot), + TIME_ARGS (avg_wo_first.shot_to_snapshot), + TIME_ARGS (target_shot_to_snapshot), + TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_snapshot, + target_shot_to_snapshot))); + g_print ("Preview to precapture | %" TIME_FORMAT " | %" TIME_FORMAT " | %" + TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIMEDIFF_FORMAT + "\n", TIME_ARGS (min.preview_to_precapture), + TIME_ARGS (max.preview_to_precapture), + TIME_ARGS (avg.preview_to_precapture), + TIME_ARGS (avg_wo_first.preview_to_precapture), + TIME_ARGS (target_preview_to_precapture), + TIMEDIFF_ARGS (TIME_DIFF (avg.preview_to_precapture, + target_preview_to_precapture))); + g_print ("Shot to buffer | %" TIME_FORMAT " | %" TIME_FORMAT " | %" + TIME_FORMAT " | %" TIME_FORMAT " | %" TIME_FORMAT " | %" TIMEDIFF_FORMAT + "\n", TIME_ARGS (min.shot_to_buffer), TIME_ARGS (max.shot_to_buffer), + TIME_ARGS (avg.shot_to_buffer), TIME_ARGS (avg_wo_first.shot_to_buffer), + TIME_ARGS (target_shot_to_buffer), + TIMEDIFF_ARGS (TIME_DIFF (avg.shot_to_buffer, target_shot_to_buffer))); +} + int main (int argc, char *argv[]) { @@ -849,6 +1234,23 @@ main (int argc, char *argv[]) {"video-capture-caps", '\0', 0, G_OPTION_ARG_STRING, &video_capture_caps_str, "Video capture caps (e.g. video/x-raw-rgb,width=640,height=480)", NULL}, + {"audio-capture-caps", '\0', 0, + G_OPTION_ARG_STRING, &audio_capture_caps_str, + "Audio capture caps (e.g. audio/x-raw-int,width=16,depth=16,rate=44100,channels=2)", + NULL}, + {"performance-measure", '\0', 0, + G_OPTION_ARG_NONE, &performance_measure, + "If performance information should be printed at the end of execution", + NULL}, + {"performance-targets", '\0', 0, + G_OPTION_ARG_STRING, &performance_targets_str, + "Comma separated list of doubles representing the target values in " + "seconds. The order is: startup time, change mode time, shot to save" + ", shot to snapshot, shot to shot, preview to shot, shot to buffer. " + "e.g. 3.5,1.0,5.0,2.5,5.0,1.5,1.0", + NULL}, + {"flags", '\0', 0, G_OPTION_ARG_STRING, &camerabin2_flags, + "camerabin2 element flags (default = 0)", NULL}, {NULL} }; @@ -885,8 +1287,6 @@ main (int argc, char *argv[]) if (filename->len == 0) filename = g_string_append (filename, "."); - timer = g_timer_new (); - /* init */ if (setup_pipeline ()) { loop = g_main_loop_new (NULL, FALSE); @@ -895,7 +1295,22 @@ main (int argc, char *argv[]) cleanup_pipeline (); g_main_loop_unref (loop); } + + /* performance */ + if (performance_measure) { + print_performance_data (); + } + /* free */ + { + GList *iter; + + for (iter = capture_times; iter; iter = g_list_next (iter)) { + g_slice_free (CaptureTiming, iter->data); + } + g_list_free (capture_times); + } + g_string_free (filename, TRUE); g_free (ev_option); g_free (wrappersrc_name); @@ -908,7 +1323,7 @@ main (int argc, char *argv[]) g_free (gep_targetname); g_free (gep_profilename); g_free (gep_filename); - g_timer_destroy (timer); + g_free (performance_targets_str); if (window) XDestroyWindow (display, window); diff --git a/tests/examples/opencv/Makefile.am b/tests/examples/opencv/Makefile.am new file mode 100644 index 0000000000..16af077a29 --- /dev/null +++ b/tests/examples/opencv/Makefile.am @@ -0,0 +1,7 @@ +noinst_PROGRAMS = gstmotioncells_dynamic_test + +gstmotioncells_dynamic_test_SOURCES = gstmotioncells_dynamic_test.c gst_element_print_properties.c +gstmotioncells_dynamic_test_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) +gstmotioncells_dynamic_test_LDFLAGS = $(GST_LIBS) $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) $(GSTPB_BASE_LIBS) + +noinst_HEADERS = gst_element_print_properties.h diff --git a/tests/examples/opencv/gst_element_print_properties.c b/tests/examples/opencv/gst_element_print_properties.c new file mode 100644 index 0000000000..4192fe84e1 --- /dev/null +++ b/tests/examples/opencv/gst_element_print_properties.c @@ -0,0 +1,599 @@ +/* GStreamer + * Copyright (C) 2010 Wesley Miller + * + * + * gst_element_print_properties(): a tool to inspect GStreamer + * element properties + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + */ + + +#include +#include +#include +#include + +#include "gst_element_print_properties.h" + + +void +gst_element_print_properties (GstElement * element) +{ + ///////////////////////////////////////////////////////////////////////////// + // + // Formatting setup + // + // Change the valuses of c2w, c3w and c4w to adjust the 2nd, 3rd and 4th + // column widths, respectively. The gutter width is fixed at 3 and + // alwasys prints as " | ". Column 1 has a fixed width of 3. + // + // The first two rows for each element's output are its element class + // name (e.g. "GstAudioResample") and its element factory name + // ("audioresample"). The long element factory name ("Audio resampler") + // is in column 4 following the element factory name. + // + // Most properties use this format. Multivalued items like CAPS, certain + // GST_TYPEs and enums are different. + // + // Column 1 contains the rwc, "readable", "writable", "controllable" + // flags of the property. + // Column 2 contains the property name + // Column 3 contains the current value + // Column 4 contains the property type, e.g. G_TYPE_INT + // Column 5 contains the range, if there is one, and the default. + // The range is encosed in parentheses. e.g. "(1-10) 5" + // + // CAPS, enums, flags and some undefined items have no columns 4 or 5 and + // column 3 will contain a description of the item. Additional rows may + // list specific valused (CAPS and flags). + // + // String values are enclosed in double quotes. A missing right quote + // inidicates the string had been truncated. + // + // Screen column + // ----+----1----+----2----+----3----+----4----+----5----+----6----+----7----+----8----+----9---> + // + // formatted columns with built in gutters + // --- | ---------c2---------- | ---------c3-------- | -----------c4---------- | --> unspecified + // + // <-->|<--- property name --->|<-- current value -->|<-------- type --------->|<----- range and default -----> + // | ELEMENT CLASS NAME | GstAudioResample | | + // | ELEMENT FACTORY NAME | audioresample | Audio resampler | + // RW- | name | "audioResampler" | G_TYPE_STRING | null + // RW- | qos | false | G_TYPE_BOOLEAN | false + // RW- | quality | 8 | G_TYPE_INT | (0 - 10) 4 + // + ///////////////////////////////////////////////////////////////////////////// + + const guint c2w = 21; // column 2 width + const guint c3w = 19; // column 3 width + const guint c4w = 23; // column 4 width + + ///////////////////////////////////////////////////////////////////////////// + // end configuration variables. + ///////////////////////////////////////////////////////////////////////////// + + GParamSpec **property_specs; + guint num_properties, i; + gboolean readable; + + + g_return_if_fail (element != NULL); + + property_specs = g_object_class_list_properties (G_OBJECT_GET_CLASS (element), + &num_properties); + + /*--- draw the header information ---*/ + print_column_titles (c2w, c3w, c4w); + print_element_info (element, c2w, c3w, c4w); + + + for (i = 0; i < num_properties; i++) { + gchar flags[4]; + GValue value = { 0, }; + GParamSpec *param = property_specs[i]; + + readable = FALSE; + + g_value_init (&value, param->value_type); + + flags[0] = '-'; + flags[1] = '-'; + flags[2] = '-'; + flags[3] = 0x0; + + if (param->flags & G_PARAM_READABLE) { + g_object_get_property (G_OBJECT (element), param->name, &value); + readable = TRUE; + flags[0] = 'r'; + } + + if (param->flags & G_PARAM_WRITABLE) + flags[1] = 'w'; + + if (param->flags & GST_PARAM_CONTROLLABLE) + flags[2] = 'c'; + + g_print ("%s |", flags); + g_print (" %-*s | ", c2w, g_param_spec_get_name (param)); + + switch (G_VALUE_TYPE (&value)) { + case G_TYPE_STRING: // String + { + GParamSpecString *pstring = G_PARAM_SPEC_STRING (param); + if (readable) { /* current */ + const char *string_val = g_value_get_string (&value); + gchar work_string[100]; + + if (string_val == NULL) + sprintf (work_string, "\"%s\"", "null"); + else + sprintf (work_string, "\"%s\"", string_val); + g_print ("%-*.*s", c3w, c3w, work_string); + } else { + g_print ("%-*s", c3w, ""); /* alt current */ + } + g_print (" | %-*s", c4w, "G_TYPE_STRING"); /* type */ + + if (pstring->default_value == NULL) + g_print (" | %s", "null"); /* default */ + else + g_print (" | \"%s\"", pstring->default_value); /* default */ + break; + } + + case G_TYPE_BOOLEAN: // Boolean + { + GParamSpecBoolean *pboolean = G_PARAM_SPEC_BOOLEAN (param); + if (readable) /* current */ + g_print ("%-*s", c3w, + (g_value_get_boolean (&value) ? "true" : "false")); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_BOOLEAN"); /* type */ + g_print (" | %s ", /* default */ + (pboolean->default_value ? "true" : "false")); + break; + } + + case G_TYPE_ULONG: // Unsigned Long + { + GParamSpecULong *pulong = G_PARAM_SPEC_ULONG (param); + if (readable) /* current */ + g_print ("%-*lu", c3w, g_value_get_ulong (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_ULONG"); /* type */ + g_print (" | (%lu - %lu) %lu ", pulong->minimum, pulong->maximum, /* range */ + pulong->default_value); /* default */ + break; + } + + case G_TYPE_LONG: // Long + { + GParamSpecLong *plong = G_PARAM_SPEC_LONG (param); + if (readable) /* current */ + g_print ("%-*ld", c3w, g_value_get_long (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_LONG"); /* type */ + g_print (" | (%ld - %ld) %ld ", plong->minimum, plong->maximum, /* range */ + plong->default_value); /* default */ + break; + } + + case G_TYPE_UINT: // Unsigned Integer + { + GParamSpecUInt *puint = G_PARAM_SPEC_UINT (param); + if (readable) /* current */ + g_print ("%-*u", c3w, g_value_get_uint (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_UINT"); /* type */ + g_print (" | (%u - %u) %u ", puint->minimum, puint->maximum, /* range */ + puint->default_value); /* default */ + break; + } + + case G_TYPE_INT: // Integer + { + GParamSpecInt *pint = G_PARAM_SPEC_INT (param); + if (readable) /* current */ + g_print ("%-*d", c3w, g_value_get_int (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_INT"); /* type */ + g_print (" | (%d - %d) %d ", pint->minimum, pint->maximum, /* range */ + pint->default_value); /* default */ + break; + } + + case G_TYPE_UINT64: // Unsigned Integer64. + { + GParamSpecUInt64 *puint64 = G_PARAM_SPEC_UINT64 (param); + if (readable) /* current */ + g_print ("%-*" G_GUINT64_FORMAT, c3w, g_value_get_uint64 (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_UINT64"); /* type */ + g_print (" | (%" G_GUINT64_FORMAT " - %" G_GUINT64_FORMAT ")" " %" G_GUINT64_FORMAT " ", puint64->minimum, puint64->maximum, /* range */ + puint64->default_value); /* default */ + break; + } + + case G_TYPE_INT64: // Integer64 + { + GParamSpecInt64 *pint64 = G_PARAM_SPEC_INT64 (param); + if (readable) /* current */ + g_print ("%-*" G_GINT64_FORMAT, c3w, g_value_get_int64 (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_INT64"); /* type */ + g_print (" | (%" G_GINT64_FORMAT " - %" G_GINT64_FORMAT ")" " %" G_GINT64_FORMAT " ", pint64->minimum, pint64->maximum, /* range */ + pint64->default_value); /* default */ + break; + } + + case G_TYPE_FLOAT: // Float. + { + GParamSpecFloat *pfloat = G_PARAM_SPEC_FLOAT (param); + if (readable) /* current */ + g_print ("%-*g", c3w, g_value_get_float (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_FLOAT"); /* type */ + g_print (" | (%g - %g) %g ", pfloat->minimum, pfloat->maximum, /* range */ + pfloat->default_value); /* default */ + break; + } + + case G_TYPE_DOUBLE: // Double + { + GParamSpecDouble *pdouble = G_PARAM_SPEC_DOUBLE (param); + if (readable) /* current */ + g_print ("%-*g", c3w, g_value_get_double (&value)); + else + g_print ("%-*s", c3w, ""); + g_print (" | %-*s", c4w, "G_TYPE_DOUBLE"); /* type */ + g_print (" | (%g - %g) %g ", pdouble->minimum, pdouble->maximum, /* range */ + pdouble->default_value); /* default */ + break; + } + + default: + if (param->value_type == GST_TYPE_CAPS) { + const GstCaps *caps = gst_value_get_caps (&value); + if (!caps) + g_print ("%-*s | %-*.*s |", c3w, "Caps (NULL)", c4w, c4w, " "); + else { + gchar prefix_string[100]; + sprintf (prefix_string, " | %-*.*s | ", c2w, c2w, " "); + print_caps (caps, prefix_string); + } + } + + else if (G_IS_PARAM_SPEC_ENUM (param)) { + GParamSpecEnum *penum = G_PARAM_SPEC_ENUM (param); + GEnumValue *values; + guint j = 0; + gint enum_value; + const gchar *def_val_nick = "", *cur_val_nick = ""; + gchar work_string[100]; + + values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values; + enum_value = g_value_get_enum (&value); + + while (values[j].value_name) { + if (values[j].value == enum_value) + cur_val_nick = values[j].value_nick; + if (values[j].value == penum->default_value) + def_val_nick = values[j].value_nick; + j++; + } + + sprintf (work_string, "%d, \"%s\"", enum_value, cur_val_nick); + g_print ("%-*.*s", c3w, c3w, work_string); + g_print (" | Enum \"%s\" : %d, \"%s\"", + g_type_name (G_VALUE_TYPE (&value)), + penum->default_value, def_val_nick); + } + + else if (G_IS_PARAM_SPEC_FLAGS (param)) { + GParamSpecFlags *pflags = G_PARAM_SPEC_FLAGS (param); + GFlagsValue *vals; + gchar *cur, *def; + gchar work_string[100]; + + vals = pflags->flags_class->values; + cur = flags_to_string (vals, g_value_get_flags (&value)); /* current */ + def = flags_to_string (vals, pflags->default_value); /* default */ + + /* current */ + sprintf (work_string, "0x%08x, \"%s\"", + g_value_get_flags (&value), cur); + g_print ("%-*.*s", c3w, c3w, work_string); + + /* type */ + sprintf (work_string, "Flags \"%s\"", + g_type_name (G_VALUE_TYPE (&value))); + g_print ("%-*.*s", c4w, c4w, work_string); + + /* default */ + g_print (" | 0x%08x, \"%s\"", pflags->default_value, def); + + /* values list */ + while (vals[0].value_name) { + sprintf (work_string, "\n | %-*.*s | (0x%08x): %-16s - %s", + c2w, c2w, "", + vals[0].value, vals[0].value_nick, vals[0].value_name); + g_print ("%s", work_string); + ++vals; + } + + g_free (cur); + g_free (def); + } + + else if (G_IS_PARAM_SPEC_OBJECT (param)) { + g_print ("%-*.*s | Object of type \"%s\"", + c3w, c3w, + g_type_name (param->value_type), g_type_name (param->value_type)); + } + + else if (G_IS_PARAM_SPEC_BOXED (param)) { + g_print ("%-*.*s | Boxed pointer of type \"%s\"", + c3w, c3w, + g_type_name (param->value_type), g_type_name (param->value_type)); + } + + else if (G_IS_PARAM_SPEC_POINTER (param)) { + if (param->value_type != G_TYPE_POINTER) { + g_print ("%-*.*s | Pointer of type \"%s\"", + c3w, c3w, + g_type_name (param->value_type), + g_type_name (param->value_type)); + } else { + g_print ("%-*.*s |", c3w, c3w, "Pointer."); + } + } + + else if (param->value_type == G_TYPE_VALUE_ARRAY) { + GParamSpecValueArray *pvarray = G_PARAM_SPEC_VALUE_ARRAY (param); + if (pvarray->element_spec) { + g_print ("%-*.*s :Array of GValues of type \"%s\"", + c3w, c3w, + g_type_name (pvarray->element_spec->value_type), + g_type_name (pvarray->element_spec->value_type)); + } else { + g_print ("%-*.*s :", c3w, c3w, "Array of GValues"); + } + } + + else if (GST_IS_PARAM_SPEC_FRACTION (param)) { + GstParamSpecFraction *pfraction = GST_PARAM_SPEC_FRACTION (param); + gchar work_string[100]; + + if (readable) { /* current */ + sprintf (work_string, "%d/%d", + gst_value_get_fraction_numerator (&value), + gst_value_get_fraction_denominator (&value)); + g_print ("%-*.*s", c3w, c3w, work_string); + } else + g_print ("%-*s", c3w, ""); + + g_print (" | %-*.*s", /* type */ + c3w, c3w, " Fraction. "); + g_print (" | (%d/%d - %d/%d)", /* range */ + pfraction->min_num, pfraction->min_den, + pfraction->max_num, pfraction->max_den); + g_print (" %d/%d ", /* default */ + pfraction->def_num, pfraction->def_den); + } + + else if (GST_IS_PARAM_SPEC_MINI_OBJECT (param)) { + g_print ("%-*.*s | MiniObject of type \"%s\"", + c3w, c3w, + g_type_name (param->value_type), g_type_name (param->value_type)); + } + + else { + g_print ("Unknown type %ld \"%s\"", + (glong) param->value_type, g_type_name (param->value_type)); + + } + break; + } + + if (!readable) + g_print (" Write only\n"); + else + g_print ("\n"); + + g_value_reset (&value); + } + + if (0 == num_properties) + g_print (" none\n"); + + g_free (property_specs); +} + +//------------------------------------------------------------------------------ +void +print_column_titles (guint c2w, guint c3w, guint c4w) +{ + ////////////////////////////////////////////////////////////////////////// + // + // Create Header for property listing + // RWF | --- element name ---- | ---------c3-------- | -----------c4---------- | --> unspecified + // + ////////////////////////////////////////////////////////////////////////// + gchar work_string[200]; + gchar dashes[] = "-----------------------------"; + gint llen = 0; + gint rlen = 0; + + /*--- column 1 - RWC ---*/ + sprintf (work_string, "<-->|<"); + + /*--- column 2 - property name ---*/ + llen = (c2w - 15) / 2; /* width of " property name " = 15 */ + rlen = c2w - 15 - llen; + + strncat (work_string, dashes, llen); + strcat (work_string, " property name "); + strncat (work_string, dashes, rlen); + strcat (work_string, ">|<"); + + /*--- column 3 - current value ---*/ + llen = (c3w - 15) / 2; /* width of " current value " = 15 */ + rlen = c3w - 15 - llen; + + strncat (work_string, dashes, llen); + strcat (work_string, " current value "); + strncat (work_string, dashes, rlen); + strcat (work_string, ">|<"); + + /*--- column 4 - type ---*/ + llen = (c4w - 6) / 2; /* width of " type " = 6 */ + rlen = c4w - 6 - llen; + + strncat (work_string, dashes, llen); + strcat (work_string, " type "); + strncat (work_string, dashes, rlen); + strcat (work_string, ">|<"); + + /*--- column 5 - range and default ---*/ + strcat (work_string, "----- range and default ----->"); + + g_print ("\n%s\n", work_string); +} + +//------------------------------------------------------------------------------ +void +print_element_info (GstElement * element, guint c2w, guint c3w, guint c4w) +{ + ///////////////////////////////////////////////////////////////////////////// + // + // Print element factory and class information as part of each header + // + ///////////////////////////////////////////////////////////////////////////// + gchar work_string[100]; + GstElementFactory *factory = gst_element_get_factory (element); + + sprintf (work_string, "ELEMENT CLASS NAME"); + g_print (" | %-*s", c2w, work_string); + g_print (" | %-*s", c3w, g_type_name (G_OBJECT_TYPE (element))); + g_print (" | %-*s | \n", c4w, ""); + + + sprintf (work_string, "ELEMENT FACTORY NAME"); + g_print (" | %-*s", c2w, work_string); + + g_print (" | %-*s", c3w, + gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory))); + g_print (" | %-*s | \n", c4w, gst_element_factory_get_longname (factory)); + +// "Audio Resampler" g_print( " | %-*s", c3w, gst_element_factory_get_longname( gst_element_get_factory( element )) ); + + +} + +//------------------------------------------------------------------------------ +gchar * +flags_to_string (GFlagsValue * vals, guint flags) +{ + ///////////////////////////////////////////////////////////////////////////// + // + // List individual flags in separate rows + // + ///////////////////////////////////////////////////////////////////////////// + GString *s = NULL; + guint flags_left, i; + + /* first look for an exact match and count the number of values */ + for (i = 0; vals[i].value_name != NULL; ++i) { + if (vals[i].value == flags) + return g_strdup (vals[i].value_nick); + } + + s = g_string_new (NULL); + + /* we assume the values are sorted from lowest to highest value */ + flags_left = flags; + while (i > 0) { + --i; + if (0 != vals[i].value && (flags_left & vals[i].value) == vals[i].value) { + if (0 < s->len) + g_string_append (s, " | "); + g_string_append (s, vals[i].value_nick); + flags_left -= vals[i].value; + if (0 == flags_left) + break; + } + } + + if (0 == s->len) + g_string_assign (s, "(none)"); + + return g_string_free (s, FALSE); +} + + +//------------------------------------------------------------------------------ +void +print_caps (const GstCaps * caps, const gchar * pfx) +{ + ///////////////////////////////////////////////////////////////////////////// + // + // Print each caps value on a separate line + // + ///////////////////////////////////////////////////////////////////////////// + guint i; + + g_return_if_fail (caps != NULL); + + if (gst_caps_is_any (caps)) { + g_print ("%s | %s", pfx, "ANY | |"); + return; + } + if (gst_caps_is_empty (caps)) { + g_print ("%s | %s", pfx, "EMPTY | |"); + return; + } + + for (i = 0; i < gst_caps_get_size (caps); i++) { + GstStructure *structure = gst_caps_get_structure (caps, i); + g_print ("%s", gst_structure_get_name (structure)); + gst_structure_foreach (structure, print_field, (gpointer) pfx); + } +} + +//------------------------------------------------------------------------------ +gboolean +print_field (GQuark field, const GValue * value, gpointer pfx) +{ + ///////////////////////////////////////////////////////////////////////////// + // + // printing function for individual caps fields + // + ///////////////////////////////////////////////////////////////////////////// + gchar *str = gst_value_serialize (value); + g_print ("\n%s %-15.15s - %s", + (gchar *) pfx, g_quark_to_string (field), str); + g_free (str); + return TRUE; +} diff --git a/tests/examples/opencv/gst_element_print_properties.h b/tests/examples/opencv/gst_element_print_properties.h new file mode 100644 index 0000000000..49beee35ca --- /dev/null +++ b/tests/examples/opencv/gst_element_print_properties.h @@ -0,0 +1,34 @@ +/* GStreamer + * Copyright (C) 2010 Wesley Miller + * + * + * gst_element_print_properties(): a tool to inspect GStreamer + * element properties + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + */ + +#ifndef GST_ELEMENT_PRINT_PROPERTIES_H +#define GST_ELEMENT_PRINT_PROPERTIES_H + +extern void gst_element_print_properties (GstElement * element); +extern void print_column_titles (guint c2w, guint c3w, guint c4w); +extern void print_element_info (GstElement * element, guint c2w, guint c3w, + guint c4w); +extern gchar *flags_to_string (GFlagsValue * vals, guint flags); +extern void print_caps (const GstCaps * caps, const gchar * pfx); +extern gboolean print_field (GQuark field, const GValue * value, gpointer pfx); + +#endif diff --git a/tests/examples/opencv/gstmotioncells_dynamic_test.c b/tests/examples/opencv/gstmotioncells_dynamic_test.c new file mode 100644 index 0000000000..83834065be --- /dev/null +++ b/tests/examples/opencv/gstmotioncells_dynamic_test.c @@ -0,0 +1,241 @@ +/* GStreamer + * Copyright (C) 2011 Robert Jobbagy + * + * + * gst_motioncells_dynamic_test(): a test tool what can to do dynamic change properties + * + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + */ +#include +#include +#include +#include +#include +#include +#include +#include +#include "gstmotioncells_dynamic_test.h" +#include "gst_element_print_properties.h" + +const guint c2w = 21; // column 2 width +const guint c3w = 19; // column 3 width +const guint c4w = 23; // column 4 width + +void +setProperty (GstElement * mcells, char *property, char *prop_value, GType type, + GValue * value) +{ + + switch (type) { + case G_TYPE_STRING: + { + g_object_set (G_OBJECT (mcells), property, prop_value, NULL); + break; + } + case G_TYPE_BOOLEAN: + { + gboolean flag = (g_strcmp0 (prop_value, "true") == 0) ? TRUE : FALSE; + g_object_set (G_OBJECT (mcells), property, flag, NULL); + break; + } + case G_TYPE_ULONG: + { + unsigned long ulongval = strtoul (prop_value, NULL, 0); + g_object_set (G_OBJECT (mcells), property, ulongval, NULL); + break; + } + case G_TYPE_LONG: + { + long longval = atol (prop_value); + g_object_set (G_OBJECT (mcells), property, longval, NULL); + break; + } + case G_TYPE_UINT: + { + unsigned int uintval = atoi (prop_value); + g_object_set (G_OBJECT (mcells), property, uintval, NULL); + break; + } + case G_TYPE_INT: + { + int intval = atoi (prop_value); + g_object_set (G_OBJECT (mcells), property, intval, NULL); + break; + } + case G_TYPE_UINT64: + { + guint64 guint64val = atoi (prop_value); + g_object_set (G_OBJECT (mcells), property, guint64val, NULL); + break; + } + case G_TYPE_INT64: + { + gint64 gint64val = atoi (prop_value); + g_object_set (G_OBJECT (mcells), property, gint64val, NULL); + break; + } + case G_TYPE_FLOAT: + { + float floatval = atof (prop_value); + g_object_set (G_OBJECT (mcells), property, floatval, NULL); + break; + } + case G_TYPE_DOUBLE: + { + double doubleval = strtod (prop_value, NULL); + g_object_set (G_OBJECT (mcells), property, doubleval, NULL); + break; + } + default: + fprintf (stderr, "You gave me something wrong type of data !!! \n"); + break; + } +} + +// gst-launch v4l2src ! videoscale ! videorate ! capsfilter "caps=video/x-raw-yuv,width=320,height=240,framerate=10/1" ! ffmpegcolorspace ! motioncells ! ffmpegcolorspace ! xvimagesink +int +main (int argc, char *argv[]) +{ + GstElement *pipeline, *source, *videor, *videos, *decodebin, *capsf, + *colorsp0, *colorsp1, *mcells, *sink; + GstCaps *caps; + gchar property[20]; + gchar prop_value[100]; + GParamSpec **property_specs; + guint num_properties, i; + GValue value = { 0, }; + gboolean found_property = FALSE; + int ret; + + // Initialisation // + gst_init (&argc, &argv); + fprintf (stderr, "Usage: %s test or rtsp rtsp://your/cam/address\n", argv[0]); + // Create gstreamer elements // + pipeline = gst_pipeline_new ("moitoncells-pipeline"); + if (argc == 2 && (g_strcmp0 (argv[1], "test") == 0)) + source = gst_element_factory_make ("videotestsrc", "vidsrc"); + else if (argc == 3 && (g_strcmp0 (argv[1], "rtsp") == 0)) + source = gst_element_factory_make ("rtspsrc", "rtspsrc0"); + else if (argc == 1) + source = gst_element_factory_make ("v4l2src", "v4l2"); + else { + fprintf (stderr, "Usage: %s test or rtsp rtsp://your/cam/address\n", + argv[0]); + exit (-1); + } + + videor = gst_element_factory_make ("videorate", "videor"); + videos = gst_element_factory_make ("videoscale", "videos"); + capsf = gst_element_factory_make ("capsfilter", "capsf"); + if (argc == 3 && (g_strcmp0 (argv[1], "rtsp") == 0)) + decodebin = gst_element_factory_make ("decodebin", "decode"); + else + decodebin = NULL; + colorsp0 = gst_element_factory_make ("ffmpegcolorspace", "colorspace0"); + mcells = gst_element_factory_make ("motioncells", "mcells"); + colorsp1 = gst_element_factory_make ("ffmpegcolorspace", "colorspace1"); + sink = gst_element_factory_make ("xvimagesink", "xv-image-sink"); + if (!pipeline || !source || !videor || !videos || !capsf || !colorsp0 + || !mcells || !colorsp1 || !sink) { + g_printerr ("One element could not be created. Exiting.\n"); + return -1; + } + if (argc == 3 && (g_strcmp0 (argv[1], "rtsp") == 0) && !decodebin) { + g_printerr ("Decodebin could not be created. Exiting.\n"); + return -1; + } + if ((g_strcmp0 (argv[1], "rtsp") == 0)) { + g_object_set (G_OBJECT (source), "location", argv[2], NULL); + g_object_set (G_OBJECT (source), "latency", 1000, NULL); + } else if ((g_strcmp0 (argv[1], "test") == 0)) + g_object_set (G_OBJECT (source), "pattern", 18, NULL); + + caps = + gst_caps_from_string + ("video/x-raw-yuv,width=320,height=240,framerate=10/1"); + g_object_set (G_OBJECT (capsf), "caps", caps, NULL); + //g_object_set (G_OBJECT (sink), "sync",FALSE,NULL); + + if (argc > 1) { + if (g_strcmp0 (argv[1], "test") == 0) { + gst_bin_add_many (GST_BIN (pipeline), + source, videor, videos, capsf, colorsp0, mcells, colorsp1, sink, + NULL); + + gst_element_link_many (source, videor, videos, capsf, colorsp0, mcells, + colorsp1, sink, NULL); + } else if (g_strcmp0 (argv[1], "rtsp") == 0) { + gst_bin_add_many (GST_BIN (pipeline), + source, videor, videos, capsf, decodebin, colorsp0, mcells, colorsp1, + sink, NULL); + + gst_element_link_many (source, videor, videos, capsf, decodebin, colorsp0, + mcells, colorsp1, sink, NULL); + } + } else { //default + gst_bin_add_many (GST_BIN (pipeline), + source, videor, videos, capsf, colorsp0, mcells, colorsp1, sink, NULL); + + gst_element_link_many (source, videor, videos, capsf, colorsp0, mcells, + colorsp1, sink, NULL); + } + + g_print ("Now playing\n"); + gst_element_set_state (pipeline, GST_STATE_PLAYING); + g_print ("Running...\n"); + g_print ("You can use these properties : \n"); + gst_element_print_properties (mcells); + g_print ("change property here: example some_property property_value \n"); + g_print ("Quit with 'q' \n"); + //get all properties + property_specs = g_object_class_list_properties (G_OBJECT_GET_CLASS (mcells), + &num_properties); + while (TRUE) { + found_property = FALSE; + i = 0; + + ret = scanf ("%19s %99s", property, prop_value); + + if (ret < 1) + g_printerr ("Error parsing command.\n"); + + if ((g_strcmp0 (property, "q") == 0) || (g_strcmp0 (prop_value, "q") == 0)) + break; + printf ("property: %s -> value: %s \n", property, prop_value); + for (i = 0; i < num_properties; i++) { + GParamSpec *param = property_specs[i]; + g_value_init (&value, param->value_type); + g_object_get_property (G_OBJECT (mcells), param->name, &value); + //fprintf(stderr,"property: %s and param name: %s and property value: %s \n",property,param->name,prop_value); + if ((g_strcmp0 (property, param->name) == 0) && !found_property && + (g_strcmp0 (prop_value, "") != 0) + && (g_strcmp0 (prop_value, "\"") != 0) + && (g_strcmp0 (prop_value, "\'") != 0)) { + GType type; + found_property = TRUE; + type = param->value_type; + setProperty (mcells, property, prop_value, type, &value); + } + g_value_unset (&value); + if (found_property) + break; + } + } + + gst_element_set_state (pipeline, GST_STATE_NULL); + gst_object_unref (pipeline); + return 0; +} diff --git a/tests/examples/opencv/gstmotioncells_dynamic_test.h b/tests/examples/opencv/gstmotioncells_dynamic_test.h new file mode 100644 index 0000000000..6760c66669 --- /dev/null +++ b/tests/examples/opencv/gstmotioncells_dynamic_test.h @@ -0,0 +1,30 @@ +/* GStreamer + * Copyright (C) 2011 Robert Jobbagy + * + * + * gst_motioncells_dynamic_test(): a test tool what can to do dynamic change properties + * + * + * This program is free software: you can redistribute it and/or modify + * it under the terms of the GNU General Public License as published by + * the Free Software Foundation, either version 3 of the License, or + * (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU General Public License for more details. + * + * You should have received a copy of the GNU General Public License + * along with this program. If not, see . + * + */ + +#ifndef GST_MOTIONCELLS_DYNAMIC_TEST_H +#define GST_MOTIONCELLS_DYNAMIC_TEST_H + +extern void setProperty (GstElement * mcells, char *property, char *prop_value, + GType type, GValue * value); + + +#endif