dshowdecwrapper: Port to 1.x

https://bugzilla.gnome.org/show_bug.cgi?id=740945
This commit is contained in:
Matthew Bader 2014-12-04 15:52:44 -05:00 committed by Sebastian Dröge
parent 89eb1e9a03
commit 54a7bdc0be
6 changed files with 350 additions and 228 deletions

View file

@ -0,0 +1,58 @@
CMAKE_MINIMUM_REQUIRED (VERSION 2.8)
PROJECT(dshowsdecwrapper)
SET(GST_INSTALL_BASE "C:\\gstreamer\\1.0\\x86" CACHE PATH "Path to the GStreamer install base")
SET(SDK_INSTALL_BASE "C:\\Program Files\\Microsoft SDKs\\Windows\\v7.0" CACHE PATH "Path to the Windows SDK root")
SET_PROPERTY(
DIRECTORY
APPEND PROPERTY COMPILE_DEFINITIONS
HAVE_CONFIG_H
)
INCLUDE_DIRECTORIES(
${GST_INSTALL_BASE}/include
${GST_INSTALL_BASE}/include/gstreamer-1.0
${GST_INSTALL_BASE}/include/glib-2.0
${GST_INSTALL_BASE}/lib/glib-2.0/include
${SDK_INSTALL_BASE}/Samples/multimedia/directshow/baseclasses
${PROJECT_SOURCE_DIR}/../../win32/common
)
LINK_DIRECTORIES(
${GST_INSTALL_BASE}/lib
${SDK_INSTALL_BASE}/Samples/multimedia/directshow/baseclasses/Release_MBCS
${SDK_INSTALL_BASE}/Samples/multimedia/directshow/baseclasses/Debug_MBCS
)
ADD_LIBRARY(libgstdshowdecwrapper SHARED
gstdshowaudiodec.cpp
gstdshowaudiodec.h
gstdshowvideodec.cpp
gstdshowvideodec.h
gstdshowdecwrapper.cpp
gstdshowdecwrapper.h
gstdshowfakesrc.cpp
gstdshowfakesrc.h
gstdshowutil.cpp
gstdshowutil.h
)
TARGET_LINK_LIBRARIES(libgstdshowdecwrapper
gstreamer-1.0
gstaudio-1.0
gstvideo-1.0
gstbase-1.0
glib-2.0
gobject-2.0
debug strmbasd
optimized strmbase
rpcrt4
uuid
winmm
dmoguids
wmcodecdspuuid
msdmo
)

View file

@ -51,12 +51,13 @@
#include <mmreg.h>
#include <dmoreg.h>
#include <wmcodecdsp.h>
#include <gst/audio/audio.h>
GST_DEBUG_CATEGORY_STATIC (dshowaudiodec_debug);
#define GST_CAT_DEFAULT dshowaudiodec_debug
GST_BOILERPLATE (GstDshowAudioDec, gst_dshowaudiodec, GstElement,
GST_TYPE_ELEMENT);
#define gst_dshowaudiodec_parent_class parent_class
G_DEFINE_TYPE(GstDshowAudioDec, gst_dshowaudiodec, GST_TYPE_ELEMENT)
static void gst_dshowaudiodec_finalize (GObject * object);
static GstStateChangeReturn gst_dshowaudiodec_change_state
@ -64,8 +65,8 @@ static GstStateChangeReturn gst_dshowaudiodec_change_state
/* sink pad overwrites */
static gboolean gst_dshowaudiodec_sink_setcaps (GstPad * pad, GstCaps * caps);
static GstFlowReturn gst_dshowaudiodec_chain (GstPad * pad, GstBuffer * buffer);
static gboolean gst_dshowaudiodec_sink_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_dshowaudiodec_chain (GstPad * pad, GstObject *parent, GstBuffer * buffer);
static gboolean gst_dshowaudiodec_sink_event (GstPad * pad, GstObject *parent, GstEvent * event);
/* utils */
static gboolean gst_dshowaudiodec_create_graph_and_filters (GstDshowAudioDec *
@ -158,7 +159,7 @@ HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
GstBuffer *out_buf = NULL;
gboolean in_seg = FALSE;
GstClockTime buf_start, buf_stop;
gint64 clip_start = 0, clip_stop = 0;
guint64 clip_start = 0, clip_stop = 0;
guint start_offset = 0, stop_offset;
GstClockTime duration;
@ -199,20 +200,21 @@ HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
* GstBuffer for output, and clip if required */
/* allocate a new buffer for raw audio */
mDec->last_ret = gst_pad_alloc_buffer (mDec->srcpad,
GST_BUFFER_OFFSET_NONE,
size,
GST_PAD_CAPS (mDec->srcpad), &out_buf);
out_buf = gst_buffer_new_and_alloc(size);
if (!out_buf) {
GST_WARNING_OBJECT (mDec, "cannot allocate a new GstBuffer");
goto done;
}
/* set buffer properties */
GST_BUFFER_TIMESTAMP (out_buf) = buf_start;
GST_BUFFER_DURATION (out_buf) = duration;
memcpy (GST_BUFFER_DATA (out_buf), pBuffer,
MIN ((unsigned int)size, GST_BUFFER_SIZE (out_buf)));
if (gst_buffer_fill(out_buf, 0, pBuffer, size) != size) {
gst_buffer_unref (out_buf);
GST_WARNING_OBJECT (mDec, "unable to fill output buffer");
goto done;
}
/* we have to remove some heading samples */
if ((GstClockTime) clip_start > buf_start) {
@ -231,11 +233,11 @@ HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
/* truncating */
if ((start_offset != 0) || (stop_offset != (size_t) size)) {
GstBuffer *subbuf = gst_buffer_create_sub (out_buf, start_offset,
stop_offset - start_offset);
GstBuffer *subbuf = gst_buffer_copy_region (out_buf, GST_BUFFER_COPY_ALL,
start_offset, stop_offset - start_offset);
if (subbuf) {
gst_buffer_set_caps (subbuf, GST_PAD_CAPS (mDec->srcpad));
gst_buffer_unref (out_buf);
out_buf = subbuf;
}
@ -305,6 +307,21 @@ HRESULT AudioFakeSink::CheckMediaType(const CMediaType *pmt)
return S_FALSE;
}
int AudioFakeSink::GetBufferSize()
{
IMemAllocator *allocator = NULL;
if (m_pInputPin) {
allocator = m_pInputPin->Allocator();
if(allocator) {
ALLOCATOR_PROPERTIES props;
allocator->GetProperties(&props);
return props.cbBuffer;
}
}
return 0;
}
static void
gst_dshowaudiodec_base_init (gpointer klass)
{
@ -312,36 +329,34 @@ gst_dshowaudiodec_base_init (gpointer klass)
GstPadTemplate *src, *sink;
GstCaps *srccaps, *sinkcaps;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstElementDetails details;
const AudioCodecEntry *tmp;
gpointer qdata;
gchar *longname, *description;
qdata = g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass), DSHOW_CODEC_QDATA);
/* element details */
tmp = audiodec_class->entry = (AudioCodecEntry *) qdata;
details.longname = g_strdup_printf ("DirectShow %s Decoder Wrapper",
longname = g_strdup_printf ("DirectShow %s Decoder Wrapper",
tmp->element_longname);
details.klass = g_strdup ("Codec/Decoder/Audio");
details.description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
tmp->element_longname);
details.author = "Sebastien Moutte <sebastien@moutte.net>";
gst_element_class_set_details (element_class, &details);
g_free (details.longname);
g_free (details.klass);
g_free (details.description);
gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Audio", description,
"Sebastien Moutte <sebastien@moutte.net>");
g_free (longname);
g_free (description);
sinkcaps = gst_caps_from_string (tmp->sinkcaps);
srccaps = gst_caps_from_string (
"audio/x-raw-int,"
"width = (int)[1, 32],"
"depth = (int)[1, 32],"
"audio/x-raw,"
"format = (string)" GST_AUDIO_FORMATS_ALL ","
"rate = (int)[1, MAX],"
"channels = (int)[1, MAX],"
"signed = (boolean)true,"
"endianness = (int)" G_STRINGIFY(G_LITTLE_ENDIAN));
"layout = (string)interleaved");
sink = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, sinkcaps);
src = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, srccaps);
@ -370,7 +385,7 @@ gst_dshowaudiodec_com_thread (GstDshowAudioDec * adec)
{
HRESULT res;
g_mutex_lock (adec->com_init_lock);
g_mutex_lock (&adec->com_init_lock);
/* Initialize COM with a MTA for this process. This thread will
* be the first one to enter the apartement and the last one to leave
@ -387,24 +402,23 @@ gst_dshowaudiodec_com_thread (GstDshowAudioDec * adec)
adec->comInitialized = TRUE;
/* Signal other threads waiting on this condition that COM was initialized */
g_cond_signal (adec->com_initialized);
g_cond_signal (&adec->com_initialized);
g_mutex_unlock (adec->com_init_lock);
g_mutex_unlock (&adec->com_init_lock);
/* Wait until the unitialize condition is met to leave the COM apartement */
g_mutex_lock (adec->com_deinit_lock);
g_cond_wait (adec->com_uninitialize, adec->com_deinit_lock);
g_mutex_lock (&adec->com_deinit_lock);
g_cond_wait (&adec->com_uninitialize, &adec->com_deinit_lock);
CoUninitialize ();
GST_INFO_OBJECT (adec, "COM unintialized succesfully");
adec->comInitialized = FALSE;
g_cond_signal (adec->com_uninitialized);
g_mutex_unlock (adec->com_deinit_lock);
g_cond_signal (&adec->com_uninitialized);
g_mutex_unlock (&adec->com_deinit_lock);
}
static void
gst_dshowaudiodec_init (GstDshowAudioDec * adec,
GstDshowAudioDecClass * adec_class)
gst_dshowaudiodec_init (GstDshowAudioDec * adec)
{
GstElementClass *element_class = GST_ELEMENT_GET_CLASS (adec);
@ -413,7 +427,6 @@ gst_dshowaudiodec_init (GstDshowAudioDec * adec,
gst_pad_new_from_template (gst_element_class_get_pad_template
(element_class, "sink"), "sink");
gst_pad_set_setcaps_function (adec->sinkpad, gst_dshowaudiodec_sink_setcaps);
gst_pad_set_event_function (adec->sinkpad, gst_dshowaudiodec_sink_event);
gst_pad_set_chain_function (adec->sinkpad, gst_dshowaudiodec_chain);
gst_element_add_pad (GST_ELEMENT (adec), adec->sinkpad);
@ -443,21 +456,21 @@ gst_dshowaudiodec_init (GstDshowAudioDec * adec,
adec->last_ret = GST_FLOW_OK;
adec->com_init_lock = g_mutex_new();
adec->com_deinit_lock = g_mutex_new();
adec->com_initialized = g_cond_new();
adec->com_uninitialize = g_cond_new();
adec->com_uninitialized = g_cond_new();
g_mutex_init(&adec->com_init_lock);
g_mutex_init(&adec->com_deinit_lock);
g_cond_init(&adec->com_initialized);
g_cond_init(&adec->com_uninitialize);
g_cond_init(&adec->com_uninitialized);
g_mutex_lock (adec->com_init_lock);
g_mutex_lock (&adec->com_init_lock);
/* create the COM initialization thread */
g_thread_create ((GThreadFunc)gst_dshowaudiodec_com_thread,
adec, FALSE, NULL);
g_thread_new ("COM init thread", (GThreadFunc)gst_dshowaudiodec_com_thread,
adec);
/* wait until the COM thread signals that COM has been initialized */
g_cond_wait (adec->com_initialized, adec->com_init_lock);
g_mutex_unlock (adec->com_init_lock);
g_cond_wait (&adec->com_initialized, &adec->com_init_lock);
g_mutex_unlock (&adec->com_init_lock);
}
static void
@ -477,17 +490,17 @@ gst_dshowaudiodec_finalize (GObject * object)
/* signal the COM thread that it sould uninitialize COM */
if (adec->comInitialized) {
g_mutex_lock (adec->com_deinit_lock);
g_cond_signal (adec->com_uninitialize);
g_cond_wait (adec->com_uninitialized, adec->com_deinit_lock);
g_mutex_unlock (adec->com_deinit_lock);
g_mutex_lock (&adec->com_deinit_lock);
g_cond_signal (&adec->com_uninitialize);
g_cond_wait (&adec->com_uninitialized, &adec->com_deinit_lock);
g_mutex_unlock (&adec->com_deinit_lock);
}
g_mutex_free (adec->com_init_lock);
g_mutex_free (adec->com_deinit_lock);
g_cond_free (adec->com_initialized);
g_cond_free (adec->com_uninitialize);
g_cond_free (adec->com_uninitialized);
g_mutex_clear (&adec->com_init_lock);
g_mutex_clear (&adec->com_deinit_lock);
g_cond_clear (&adec->com_initialized);
g_cond_clear (&adec->com_uninitialize);
g_cond_clear (&adec->com_uninitialized);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -529,7 +542,7 @@ gst_dshowaudiodec_change_state (GstElement * element, GstStateChange transition)
break;
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
return GST_ELEMENT_CLASS(parent_class)->change_state (element, transition);
}
static gboolean
@ -571,9 +584,10 @@ end:
}
static GstFlowReturn
gst_dshowaudiodec_chain (GstPad * pad, GstBuffer * buffer)
gst_dshowaudiodec_chain (GstPad *pad, GstObject *parent, GstBuffer *buffer)
{
GstDshowAudioDec *adec = (GstDshowAudioDec *) gst_pad_get_parent (pad);
GstMapInfo map;
bool discont = FALSE;
if (!adec->setup) {
@ -583,7 +597,7 @@ gst_dshowaudiodec_chain (GstPad * pad, GstBuffer * buffer)
goto beach;
}
if (GST_FLOW_IS_FATAL (adec->last_ret)) {
if (adec->last_ret != GST_FLOW_OK) {
GST_DEBUG_OBJECT (adec, "last decoding iteration generated a fatal error "
"%s", gst_flow_get_name (adec->last_ret));
goto beach;
@ -591,7 +605,7 @@ gst_dshowaudiodec_chain (GstPad * pad, GstBuffer * buffer)
GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec, "chain (size %d)=> pts %"
GST_TIME_FORMAT " stop %" GST_TIME_FORMAT,
GST_BUFFER_SIZE (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
gst_buffer_get_size(buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer) +
GST_BUFFER_DURATION (buffer)));
@ -605,10 +619,12 @@ gst_dshowaudiodec_chain (GstPad * pad, GstBuffer * buffer)
}
/* push the buffer to the directshow decoder */
gst_buffer_map(buffer, &map, GST_MAP_READ);
adec->fakesrc->GetOutputPin()->PushBuffer (
GST_BUFFER_DATA (buffer), GST_BUFFER_TIMESTAMP (buffer),
map.data, GST_BUFFER_TIMESTAMP (buffer),
GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer),
GST_BUFFER_SIZE (buffer), (bool)discont);
map.size, (bool)discont);
gst_buffer_unmap(buffer, &map);
beach:
gst_buffer_unref (buffer);
@ -617,51 +633,45 @@ beach:
}
static gboolean
gst_dshowaudiodec_sink_event (GstPad * pad, GstEvent * event)
gst_dshowaudiodec_sink_event (GstPad * pad, GstObject *parent, GstEvent * event)
{
gboolean ret = TRUE;
GstDshowAudioDec *adec = (GstDshowAudioDec *) gst_pad_get_parent (pad);
GstDshowAudioDec *adec = (GstDshowAudioDec *) parent;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:{
gst_dshowaudiodec_flush (adec);
ret = gst_pad_event_default (pad, event);
case GST_EVENT_CAPS:{
GstCaps *caps;
gst_event_parse_caps(event, &caps);
ret = gst_dshowaudiodec_sink_setcaps(pad, caps);
break;
}
case GST_EVENT_NEWSEGMENT:
{
GstFormat format;
gdouble rate;
gint64 start, stop, time;
gboolean update;
gst_event_parse_new_segment (event, &update, &rate, &format, &start,
&stop, &time);
case GST_EVENT_FLUSH_STOP:{
gst_dshowaudiodec_flush (adec);
ret = gst_pad_event_default (pad, parent, event);
break;
}
case GST_EVENT_SEGMENT:{
const GstSegment *segment;
gst_event_parse_segment (event, &segment);
GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec,
"received new segment from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
if (update) {
GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec,
"closing current segment flushing..");
gst_dshowaudiodec_flush (adec);
}
GST_TIME_ARGS (segment->start), GST_TIME_ARGS (segment->stop));
/* save the new segment in our local current segment */
gst_segment_set_newsegment (adec->segment, update, rate, format, start,
stop, time);
gst_segment_copy_into(segment, adec->segment);
ret = gst_pad_event_default (pad, event);
ret = gst_pad_event_default (pad, parent, event);
break;
}
default:
ret = gst_pad_event_default (pad, event);
ret = gst_pad_event_default (pad, parent, event);
break;
}
gst_object_unref (adec);
return ret;
}
@ -757,7 +767,7 @@ dshowaudiodec_set_input_format (GstDshowAudioDec *adec, GstCaps *caps)
else
{
size = sizeof (WAVEFORMATEX) +
(adec->codec_data ? GST_BUFFER_SIZE (adec->codec_data) : 0);
(adec->codec_data ? gst_buffer_get_size(adec->codec_data) : 0);
if (adec->layer == 3) {
MPEGLAYER3WAVEFORMAT *mp3format;
@ -783,11 +793,12 @@ dshowaudiodec_set_input_format (GstDshowAudioDec *adec, GstCaps *caps)
}
else {
format = (WAVEFORMATEX *)g_malloc0 (size);
if (adec->codec_data) { /* Codec data is appended after our header */
memcpy (((guchar *) format) + sizeof (WAVEFORMATEX),
GST_BUFFER_DATA (adec->codec_data),
GST_BUFFER_SIZE (adec->codec_data));
format->cbSize = GST_BUFFER_SIZE (adec->codec_data);
gsize codec_size = gst_buffer_get_size(adec->codec_data);
gst_buffer_extract(adec->codec_data, 0, ((guchar *) format) + sizeof (WAVEFORMATEX),
codec_size);
format->cbSize = codec_size;
}
}
@ -855,14 +866,15 @@ gst_dshowaudiodec_setup_graph (GstDshowAudioDec * adec, GstCaps *caps)
GstDshowAudioDecClass *klass =
(GstDshowAudioDecClass *) G_OBJECT_GET_CLASS (adec);
HRESULT hres;
GstCaps *outcaps;
GstCaps *outcaps = NULL;
AM_MEDIA_TYPE *output_mediatype = NULL;
AM_MEDIA_TYPE *input_mediatype = NULL;
CComPtr<IPin> output_pin;
CComPtr<IPin> input_pin;
CComPtr<IPin> output_pin = NULL;
CComPtr<IPin> input_pin = NULL;
const AudioCodecEntry *codec_entry = klass->entry;
CComQIPtr<IBaseFilter> srcfilter;
CComQIPtr<IBaseFilter> sinkfilter;
GstAudioInfo audio_info;
input_mediatype = dshowaudiodec_set_input_format (adec, caps);
@ -901,22 +913,18 @@ gst_dshowaudiodec_setup_graph (GstDshowAudioDec * adec, GstCaps *caps)
adec->fakesink->SetMediaType(output_mediatype);
outcaps = gst_caps_new_simple ("audio/x-raw-int",
"width", G_TYPE_INT, adec->depth,
"depth", G_TYPE_INT, adec->depth,
"rate", G_TYPE_INT, adec->rate,
"channels", G_TYPE_INT, adec->channels,
"signed", G_TYPE_BOOLEAN, TRUE,
"endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
NULL);
gst_audio_info_init(&audio_info);
gst_audio_info_set_format(&audio_info,
gst_audio_format_build_integer(TRUE, G_BYTE_ORDER, adec->depth, adec->depth),
adec->rate, adec->channels, NULL);
outcaps = gst_audio_info_to_caps(&audio_info);
if (!gst_pad_set_caps (adec->srcpad, outcaps)) {
gst_caps_unref (outcaps);
GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
("Failed to negotiate output"), (NULL));
goto end;
}
gst_caps_unref (outcaps);
/* connect the decoder to our fake sink */
output_pin = gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_OUTPUT);
@ -951,6 +959,8 @@ gst_dshowaudiodec_setup_graph (GstDshowAudioDec * adec, GstCaps *caps)
ret = TRUE;
adec->setup = TRUE;
end:
if (outcaps)
gst_caps_unref(outcaps);
if (input_mediatype)
dshowadec_free_mediatype (input_mediatype);
if (output_mediatype)

View file

@ -113,11 +113,11 @@ struct _GstDshowAudioDec
GstClockTime timestamp;
gboolean comInitialized;
GMutex *com_init_lock;
GMutex *com_deinit_lock;
GCond *com_initialized;
GCond *com_uninitialize;
GCond *com_uninitialized;
GMutex com_init_lock;
GMutex com_deinit_lock;
GCond com_initialized;
GCond com_uninitialize;
GCond com_uninitialized;
};
struct _GstDshowAudioDecClass
@ -149,6 +149,7 @@ public:
m_MediaType.Set (*pmt);
return S_OK;
}
int GetBufferSize();
protected:
HRESULT m_hres;

View file

@ -50,8 +50,7 @@
#include <objbase.h>
#include <dshow.h>
#include <Rpc.h>
#include <dshow/gstdshowinterface.h>
#include <glib.h>
#pragma warning( disable : 4090 4024)

View file

@ -52,12 +52,13 @@
#include <wmcodecdsp.h>
#include "gstdshowvideodec.h"
#include <gst/video/video.h>
GST_DEBUG_CATEGORY_STATIC (dshowvideodec_debug);
#define GST_CAT_DEFAULT dshowvideodec_debug
GST_BOILERPLATE (GstDshowVideoDec, gst_dshowvideodec, GstElement,
GST_TYPE_ELEMENT);
#define gst_dshowvideodec_parent_class parent_class
G_DEFINE_TYPE(GstDshowVideoDec, gst_dshowvideodec, GST_TYPE_ELEMENT)
static void gst_dshowvideodec_finalize (GObject * object);
static GstStateChangeReturn gst_dshowvideodec_change_state
@ -65,8 +66,8 @@ static GstStateChangeReturn gst_dshowvideodec_change_state
/* sink pad overwrites */
static gboolean gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_dshowvideodec_sink_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_dshowvideodec_chain (GstPad * pad, GstBuffer * buffer);
static gboolean gst_dshowvideodec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event);
static GstFlowReturn gst_dshowvideodec_chain (GstPad * pad, GstObject * parent, GstBuffer * buffer);
/* src pad overwrites */
static GstCaps *gst_dshowvideodec_src_getcaps (GstPad * pad);
@ -139,7 +140,6 @@ static PreferredFilter preferred_mpeg1_filters[] = {
{&CLSID_MPEG_VIDEO_DECODER}, {0}
};
/* video codecs array */
static const VideoCodecEntry video_dec_codecs[] = {
{"dshowvdec_wmv1", "Windows Media Video 7",
@ -147,7 +147,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV1,
"video/x-wmv, wmvversion = (int) 1",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_wmv_filters},
{"dshowvdec_wmv2", "Windows Media Video 8",
@ -155,7 +155,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV2,
"video/x-wmv, wmvversion = (int) 2",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_wmv_filters},
{"dshowvdec_wmv3", "Windows Media Video 9",
@ -163,7 +163,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV3,
"video/x-wmv, wmvversion = (int) 3, " "format = (string) WMV3",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_wmv_filters},
{"dshowvdec_wmvp", "Windows Media Video 9 Image",
@ -171,7 +171,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVP,
"video/x-wmv, wmvversion = (int) 3, " "format = (string) { WMVP, MSS1 }",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_wmv_filters},
{"dshowvdec_wmva", "Windows Media Video 9 Advanced",
@ -179,7 +179,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVA,
"video/x-wmv, wmvversion = (int) 3, " "format = (string) WMVA",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_wmv_filters},
{"dshowvdec_wvc1", "Windows Media VC1 video",
@ -187,7 +187,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WVC1,
"video/x-wmv, wmvversion = (int) 3, " "format = (string) WVC1",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_wmv_filters},
{"dshowvdec_cinepak", "Cinepack",
@ -195,7 +195,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_CVID,
"video/x-cinepak",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_RGB32,
"video/x-raw-rgb, bpp=(int)32, depth=(int)24, "
"video/x-raw, format=(string)RGB, bpp=(int)32, depth=(int)24, "
"endianness=(int)4321, red_mask=(int)65280, "
"green_mask=(int)16711680, blue_mask=(int)-16777216",
preferred_cinepack_filters},
@ -205,7 +205,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP4S,
"video/x-msmpeg, msmpegversion=(int)41",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_mp4s_filters},
{"dshowvdec_msmpeg42", "Microsoft ISO MPEG-4 version 2",
@ -213,7 +213,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP42,
"video/x-msmpeg, msmpegversion=(int)42",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_mpeg4_filters},
{"dshowvdec_msmpeg43", "Microsoft ISO MPEG-4 version 3",
@ -221,7 +221,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP43,
"video/x-msmpeg, msmpegversion=(int)43",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_mp43_filters},
{"dshowvdec_msmpeg4", "Microsoft ISO MPEG-4 version 1.1",
@ -229,7 +229,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_M4S2,
"video/x-msmpeg, msmpegversion=(int)4",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_mp4s_filters},
{"dshowvdec_mpeg1",
@ -239,7 +239,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
"video/mpeg, mpegversion= (int) 1, "
"parsed= (boolean) true, " "systemstream= (boolean) false",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_mpeg1_filters},
{"dshowvdec_mpeg4", "MPEG-4 Video",
@ -247,7 +247,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MPG4,
"video/mpeg, msmpegversion=(int)4",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2",
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_mpeg4_filters},
/* The rest of these have no preferred filter; windows doesn't come
@ -257,38 +257,39 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_XVID,
"video/x-xvid",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2"},
GST_VIDEO_CAPS_MAKE("YUY2")},
{"dshowvdec_divx5", "DIVX 5.0 Video",
GST_MAKE_FOURCC ('D', 'X', '5', '0'),
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_DX50,
"video/x-divx, divxversion=(int)5",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2"},
GST_VIDEO_CAPS_MAKE("YUY2")},
{"dshowvdec_divx4", "DIVX 4.0 Video",
GST_MAKE_FOURCC ('D', 'I', 'V', 'X'),
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_DIVX,
"video/x-divx, divxversion=(int)4",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2"},
GST_VIDEO_CAPS_MAKE("YUY2")},
{"dshowvdec_divx3", "DIVX 3.0 Video",
GST_MAKE_FOURCC ('D', 'I', 'V', '3'),
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP43,
"video/x-divx, divxversion=(int)3",
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
"video/x-raw-yuv, format=(fourcc)YUY2"}
GST_VIDEO_CAPS_MAKE("YUY2")}
};
HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample)
{
gboolean in_seg = FALSE;
gint64 clip_start = 0, clip_stop = 0;
guint64 clip_start = 0, clip_stop = 0;
GstDshowVideoDecClass *klass =
(GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (mDec);
GstBuffer *buf = NULL;
GstClockTime start, stop;
GstMapInfo map;
if(pMediaSample)
{
@ -315,10 +316,7 @@ HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample)
/* buffer is in our segment, allocate a new out buffer and clip its
* timestamps */
mDec->last_ret = gst_pad_alloc_buffer (mDec->srcpad,
GST_BUFFER_OFFSET_NONE,
size,
GST_PAD_CAPS (mDec->srcpad), &buf);
gst_buffer_pool_acquire_buffer(mDec->buffer_pool, &buf, NULL);
if (!buf) {
GST_WARNING_OBJECT (mDec,
"cannot allocate a new GstBuffer");
@ -329,6 +327,7 @@ HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample)
GST_BUFFER_TIMESTAMP (buf) = clip_start;
GST_BUFFER_DURATION (buf) = clip_stop - clip_start;
gst_buffer_map(buf, &map, GST_MAP_WRITE);
if (strstr (klass->entry->srccaps, "rgb")) {
/* FOR RGB directshow decoder will return bottom-up BITMAP
* There is probably a way to get top-bottom video frames from
@ -338,12 +337,13 @@ HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample)
guint stride = mDec->width * 4;
for (; line < mDec->height; line++) {
memcpy (GST_BUFFER_DATA (buf) + (line * stride),
memcpy (map.data + (line * stride),
pBuffer + (size - ((line + 1) * (stride))), stride);
}
} else {
memcpy (GST_BUFFER_DATA (buf), pBuffer, MIN ((unsigned int)size, GST_BUFFER_SIZE (buf)));
memcpy (map.data, pBuffer, MIN ((unsigned int)size, map.size));
}
gst_buffer_unmap(buf, &map);
GST_LOG_OBJECT (mDec,
"push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
@ -377,25 +377,25 @@ gst_dshowvideodec_base_init (gpointer klass)
GstPadTemplate *src, *sink;
GstCaps *srccaps, *sinkcaps;
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstElementDetails details;
const VideoCodecEntry *tmp;
gpointer qdata;
gchar *longname, *description;
qdata = g_type_get_qdata (G_OBJECT_CLASS_TYPE (klass), DSHOW_CODEC_QDATA);
/* element details */
tmp = videodec_class->entry = (VideoCodecEntry *) qdata;
details.longname = g_strdup_printf ("DirectShow %s Decoder Wrapper",
longname = g_strdup_printf ("DirectShow %s Decoder Wrapper",
tmp->element_longname);
details.klass = g_strdup ("Codec/Decoder/Video");
details.description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
tmp->element_longname);
details.author = "Sebastien Moutte <sebastien@moutte.net>";
gst_element_class_set_details (element_class, &details);
g_free (details.longname);
g_free (details.klass);
g_free (details.description);
gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Video", description,
"Sebastien Moutte <sebastien@moutte.net>");
g_free (longname);
g_free (description);
sinkcaps = gst_caps_from_string (tmp->sinkcaps);
gst_caps_set_simple (sinkcaps,
@ -431,7 +431,7 @@ gst_dshowvideodec_com_thread (GstDshowVideoDec * vdec)
{
HRESULT res;
g_mutex_lock (vdec->com_init_lock);
g_mutex_lock (&vdec->com_init_lock);
/* Initialize COM with a MTA for this process. This thread will
* be the first one to enter the apartement and the last one to leave
@ -448,24 +448,23 @@ gst_dshowvideodec_com_thread (GstDshowVideoDec * vdec)
vdec->comInitialized = TRUE;
/* Signal other threads waiting on this condition that COM was initialized */
g_cond_signal (vdec->com_initialized);
g_cond_signal (&vdec->com_initialized);
g_mutex_unlock (vdec->com_init_lock);
g_mutex_unlock (&vdec->com_init_lock);
/* Wait until the unitialize condition is met to leave the COM apartement */
g_mutex_lock (vdec->com_deinit_lock);
g_cond_wait (vdec->com_uninitialize, vdec->com_deinit_lock);
g_mutex_lock (&vdec->com_deinit_lock);
g_cond_wait (&vdec->com_uninitialize, &vdec->com_deinit_lock);
CoUninitialize ();
GST_INFO_OBJECT (vdec, "COM unintialized succesfully");
vdec->comInitialized = FALSE;
g_cond_signal (vdec->com_uninitialized);
g_mutex_unlock (vdec->com_deinit_lock);
g_cond_signal (&vdec->com_uninitialized);
g_mutex_unlock (&vdec->com_deinit_lock);
}
static void
gst_dshowvideodec_init (GstDshowVideoDec * vdec,
GstDshowVideoDecClass * vdec_class)
gst_dshowvideodec_init (GstDshowVideoDec * vdec)
{
GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vdec);
@ -474,7 +473,6 @@ gst_dshowvideodec_init (GstDshowVideoDec * vdec,
gst_pad_new_from_template (gst_element_class_get_pad_template
(element_class, "sink"), "sink");
gst_pad_set_setcaps_function (vdec->sinkpad, gst_dshowvideodec_sink_setcaps);
gst_pad_set_event_function (vdec->sinkpad, gst_dshowvideodec_sink_event);
gst_pad_set_chain_function (vdec->sinkpad, gst_dshowvideodec_chain);
gst_element_add_pad (GST_ELEMENT (vdec), vdec->sinkpad);
@ -499,22 +497,23 @@ gst_dshowvideodec_init (GstDshowVideoDec * vdec,
vdec->segment = gst_segment_new ();
vdec->setup = FALSE;
vdec->buffer_pool = NULL;
vdec->com_init_lock = g_mutex_new();
vdec->com_deinit_lock = g_mutex_new();
vdec->com_initialized = g_cond_new();
vdec->com_uninitialize = g_cond_new();
vdec->com_uninitialized = g_cond_new();
g_mutex_init (&vdec->com_init_lock);
g_mutex_init (&vdec->com_deinit_lock);
g_cond_init (&vdec->com_initialized);
g_cond_init (&vdec->com_uninitialize);
g_cond_init (&vdec->com_uninitialized);
g_mutex_lock (vdec->com_init_lock);
g_mutex_lock (&vdec->com_init_lock);
/* create the COM initialization thread */
g_thread_create ((GThreadFunc)gst_dshowvideodec_com_thread,
vdec, FALSE, NULL);
g_thread_new ("COM Init Thread", (GThreadFunc)gst_dshowvideodec_com_thread,
vdec);
/* wait until the COM thread signals that COM has been initialized */
g_cond_wait (vdec->com_initialized, vdec->com_init_lock);
g_mutex_unlock (vdec->com_init_lock);
g_cond_wait (&vdec->com_initialized, &vdec->com_init_lock);
g_mutex_unlock (&vdec->com_init_lock);
}
static void
@ -527,19 +526,24 @@ gst_dshowvideodec_finalize (GObject * object)
vdec->segment = NULL;
}
/* signal the COM thread that it sould uninitialize COM */
if (vdec->comInitialized) {
g_mutex_lock (vdec->com_deinit_lock);
g_cond_signal (vdec->com_uninitialize);
g_cond_wait (vdec->com_uninitialized, vdec->com_deinit_lock);
g_mutex_unlock (vdec->com_deinit_lock);
if(vdec->buffer_pool) {
gst_object_unref(vdec->buffer_pool);
vdec->buffer_pool = NULL;
}
g_mutex_free (vdec->com_init_lock);
g_mutex_free (vdec->com_deinit_lock);
g_cond_free (vdec->com_initialized);
g_cond_free (vdec->com_uninitialize);
g_cond_free (vdec->com_uninitialized);
/* signal the COM thread that it sould uninitialize COM */
if (vdec->comInitialized) {
g_mutex_lock (&vdec->com_deinit_lock);
g_cond_signal (&vdec->com_uninitialize);
g_cond_wait (&vdec->com_uninitialized, &vdec->com_deinit_lock);
g_mutex_unlock (&vdec->com_deinit_lock);
}
g_mutex_clear (&vdec->com_init_lock);
g_mutex_clear (&vdec->com_deinit_lock);
g_cond_clear (&vdec->com_initialized);
g_cond_clear (&vdec->com_uninitialize);
g_cond_clear (&vdec->com_uninitialized);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
@ -583,9 +587,10 @@ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
GstDshowVideoDecClass *klass =
(GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
GstBuffer *extradata = NULL;
gsize extra_size;
const GValue *v = NULL;
guint size = 0;
GstCaps *caps_out;
GstCaps *caps_out = NULL;
AM_MEDIA_TYPE output_mediatype, input_mediatype;
VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL;
CComPtr<IPin> output_pin;
@ -593,6 +598,11 @@ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
IBaseFilter *srcfilter = NULL;
IBaseFilter *sinkfilter = NULL;
const GValue *fps, *par;
GstQuery *query = NULL;
GstBufferPool *pool = NULL;
GstStructure *pool_config = NULL;
guint pool_size, pool_min, pool_max;
GstVideoInfo video_info;
/* read data */
if (!gst_structure_get_int (s, "width", &vdec->width) ||
@ -622,8 +632,10 @@ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
vdec->par_n = vdec->par_d = 1;
}
if ((v = gst_structure_get_value (s, "codec_data")))
if ((v = gst_structure_get_value (s, "codec_data"))) {
extradata = gst_value_get_buffer (v);
extra_size = gst_buffer_get_size(extradata);
}
/* define the input type format */
memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
@ -634,33 +646,31 @@ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) {
size =
sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) -
1 : 0);
sizeof (MPEG1VIDEOINFO) + (extradata ? extra_size - 1 : 0);
input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);
input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
if (extradata) {
MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader;
memcpy (mpeg_info->bSequenceHeader,
GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata);
gst_buffer_extract(extradata, 0, mpeg_info->bSequenceHeader, extra_size);
mpeg_info->cbSequenceHeader = extra_size;
}
input_mediatype.formattype = FORMAT_MPEGVideo;
} else {
size =
sizeof (VIDEOINFOHEADER) +
(extradata ? GST_BUFFER_SIZE (extradata) : 0);
sizeof (VIDEOINFOHEADER) + (extradata ? extra_size : 0);
input_vheader = (VIDEOINFOHEADER *)g_malloc0 (size);
input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
if (extradata) { /* Codec data is appended after our header */
memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER),
GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata);
gst_buffer_extract(extradata, 0,
((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER), extra_size);
input_vheader->bmiHeader.biSize += extra_size;
}
input_mediatype.formattype = FORMAT_VideoInfo;
}
input_vheader->rcSource.top = input_vheader->rcSource.left = 0;
input_vheader->rcSource.right = vdec->width;
input_vheader->rcSource.bottom = vdec->height;
@ -777,12 +787,52 @@ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
"pixel-aspect-ratio", GST_TYPE_FRACTION, vdec->par_n, vdec->par_d, NULL);
if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
gst_caps_unref (caps_out);
GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
("Failed to negotiate output"), (NULL));
goto end;
}
gst_caps_unref (caps_out);
/* request or create a buffer pool */
if (vdec->buffer_pool) {
gst_object_unref (vdec->buffer_pool);
}
query = gst_query_new_allocation(caps_out, TRUE);
gst_pad_peer_query(vdec->srcpad, query);
if (gst_query_get_n_allocation_pools (query) > 0) {
gst_query_parse_nth_allocation_pool (query, 0, &pool, &pool_size, &pool_min,
&pool_max);
}
else {
pool = NULL;
pool_size = output_mediatype.lSampleSize;
pool_min = 1;
pool_max = 0;
}
if (pool == NULL) {
pool = gst_video_buffer_pool_new ();
}
if (!pool) {
GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
("Could not create buffer bool"), (NULL));
goto end;
}
pool_config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_set_params (pool_config, caps_out, pool_size,
pool_min, pool_max);
gst_buffer_pool_set_config (pool, pool_config);
if (!gst_buffer_pool_set_active (pool, TRUE)) {
GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
("Failed set buffer pool active"), (NULL));
goto end;
}
vdec->buffer_pool = pool;
hres = vdec->mediafilter->Run (-1);
if (hres != S_OK) {
@ -793,6 +843,8 @@ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
ret = TRUE;
end:
if (caps_out)
gst_caps_unref (caps_out);
gst_object_unref (vdec);
if (input_vheader)
g_free (input_vheader);
@ -800,50 +852,47 @@ end:
srcfilter->Release();
if (sinkfilter)
sinkfilter->Release();
if (query)
gst_query_unref(query);
return ret;
}
static gboolean
gst_dshowvideodec_sink_event (GstPad * pad, GstEvent * event)
gst_dshowvideodec_sink_event (GstPad * pad, GstObject * parent, GstEvent * event)
{
gboolean ret = TRUE;
GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_CAPS:
GstCaps *caps;
gst_event_parse_caps(event, &caps);
ret = gst_dshowvideodec_sink_setcaps(pad, caps);
break;
case GST_EVENT_FLUSH_STOP:
gst_dshowvideodec_flush (vdec);
ret = gst_pad_event_default (pad, event);
ret = gst_pad_event_default (pad, parent, event);
break;
case GST_EVENT_NEWSEGMENT:
case GST_EVENT_SEGMENT:
{
GstFormat format;
gdouble rate;
gint64 start, stop, time;
gboolean update;
const GstSegment *segment;
gst_event_parse_new_segment (event, &update, &rate, &format, &start,
&stop, &time);
gst_event_parse_segment (event, &segment);
/* save the new segment in our local current segment */
gst_segment_set_newsegment (vdec->segment, update, rate, format, start,
stop, time);
gst_segment_copy_into(segment, vdec->segment);
GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
"new segment received => start=%" GST_TIME_FORMAT " stop=%"
GST_TIME_FORMAT, GST_TIME_ARGS (vdec->segment->start),
GST_TIME_ARGS (vdec->segment->stop));
if (update) {
GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
"closing current segment flushing..");
gst_dshowvideodec_flush (vdec);
}
ret = gst_pad_event_default (pad, event);
ret = gst_pad_event_default (pad, parent, event);
break;
}
default:
ret = gst_pad_event_default (pad, event);
ret = gst_pad_event_default (pad, parent, event);
break;
}
@ -853,11 +902,12 @@ gst_dshowvideodec_sink_event (GstPad * pad, GstEvent * event)
}
static GstFlowReturn
gst_dshowvideodec_chain (GstPad * pad, GstBuffer * buffer)
gst_dshowvideodec_chain (GstPad * pad, GstObject *parent, GstBuffer * buffer)
{
GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
bool discont = FALSE;
GstClockTime stop;
GstMapInfo map;
if (!vdec->setup) {
/* we are not setup */
@ -866,7 +916,7 @@ gst_dshowvideodec_chain (GstPad * pad, GstBuffer * buffer)
goto beach;
}
if (GST_FLOW_IS_FATAL (vdec->last_ret)) {
if (vdec->last_ret != GST_FLOW_OK) {
GST_DEBUG_OBJECT (vdec, "last decoding iteration generated a fatal error "
"%s", gst_flow_get_name (vdec->last_ret));
goto beach;
@ -882,7 +932,7 @@ gst_dshowvideodec_chain (GstPad * pad, GstBuffer * buffer)
GST_CAT_LOG_OBJECT (dshowvideodec_debug, vdec,
"chain (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT,
GST_BUFFER_SIZE (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
gst_buffer_get_size (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
GST_TIME_ARGS (stop));
/* if the incoming buffer has discont flag set => flush decoder data */
@ -894,10 +944,12 @@ gst_dshowvideodec_chain (GstPad * pad, GstBuffer * buffer)
discont = TRUE;
}
gst_buffer_map(buffer, &map, GST_MAP_READ);
/* push the buffer to the directshow decoder */
vdec->fakesrc->GetOutputPin()->PushBuffer(
GST_BUFFER_DATA (buffer), GST_BUFFER_TIMESTAMP (buffer), stop,
GST_BUFFER_SIZE (buffer), discont);
map.data, GST_BUFFER_TIMESTAMP (buffer), stop,
map.size, discont);
gst_buffer_unmap(buffer, &map);
beach:
gst_buffer_unref (buffer);

View file

@ -113,11 +113,13 @@ struct _GstDshowVideoDec
gboolean setup;
gboolean comInitialized;
GMutex *com_init_lock;
GMutex *com_deinit_lock;
GCond *com_initialized;
GCond *com_uninitialize;
GCond *com_uninitialized;
GMutex com_init_lock;
GMutex com_deinit_lock;
GCond com_initialized;
GCond com_uninitialize;
GCond com_uninitialized;
GstBufferPool *buffer_pool;
};
struct _GstDshowVideoDecClass