dshowsrcwrapper: Port to 1.0

https://bugzilla.gnome.org/show_bug.cgi?id=732283
This commit is contained in:
Jerome Laheurte 2014-09-17 12:24:39 +02:00 committed by Sebastian Dröge
parent b041deb2fa
commit 4f60ecdd98
8 changed files with 330 additions and 466 deletions

View file

@ -0,0 +1,42 @@
== Dependencies ==
* CMake (tested with 2.8)
* Microsoft SDK
(http://www.microsoft.com/en-us/download/details.aspx?id=8279). Don't
use any after 7.1, they don't include the samples. You can uncheck
the .NET stuff.
* Visual Studio, in theory any version will do; tested with 2013
Express
(http://www.visualstudio.com/en-us/products/visual-studio-express-vs.aspx).
== Build ==
First build the SDK "baseclasses" sample; they should have been
installed in <SDK>/Samples/multimedia/directshow/baseclasses. Just
open the SLN and build both Debug_MBCS and Release_MBCS.
Install the GStreamer development files (for version 1.4.1):
http://gstreamer.freedesktop.org/data/pkg/windows/1.4.1/. I use the 32
bits version even on a 64 bits system because CMake 2.8 does not seem
to be able to generate a 64 bits configuration yet. It's probably
doable with CMake 3.x.
Then go to sys/dshowsrcwrapper and use CMake to generate the project
file for your choosen version of Visual. There are two influential
variables:
* GST_INSTALL_BASE: your GStreamer installation directory
(default C:\gstreamer\1.0\x86)
* SDK_INSTALL_BASE: the Windows SDK installation path (default
C:\Program Files\Microsoft SDKs\Windows\v7.0).
mkdir build
cd build
cmake -G "Visual Studio 12" ..
Open the SLN and build the project. Copy the DLL to the GStreamer
plugins directory.

View file

@ -0,0 +1,54 @@
CMAKE_MINIMUM_REQUIRED (VERSION 2.8)
PROJECT(dshowsrcwrapper)
SET(GST_INSTALL_BASE "C:\\gstreamer\\1.0\\x86" CACHE PATH "Path to the GStreamer install base")
SET(SDK_INSTALL_BASE "C:\\Program Files\\Microsoft SDKs\\Windows\\v7.1" CACHE PATH "Path to the Windows SDK root")
SET_PROPERTY(
DIRECTORY
APPEND PROPERTY COMPILE_DEFINITIONS
HAVE_CONFIG_H
)
INCLUDE_DIRECTORIES(
${GST_INSTALL_BASE}/include
${GST_INSTALL_BASE}/include/gstreamer-1.0
${GST_INSTALL_BASE}/include/glib-2.0
${GST_INSTALL_BASE}/lib/glib-2.0/include
${SDK_INSTALL_BASE}/Samples/multimedia/directshow/baseclasses
${PROJECT_SOURCE_DIR}/../../win32/common
)
LINK_DIRECTORIES(
${GST_INSTALL_BASE}/lib
${SDK_INSTALL_BASE}/Samples/multimedia/directshow/baseclasses/Release_MBCS
${SDK_INSTALL_BASE}/Samples/multimedia/directshow/baseclasses/Debug_MBCS
)
ADD_LIBRARY(libgstdshowsrcwrapper SHARED
gstdshow.cpp
gstdshow.h
gstdshowfakesink.cpp
gstdshowfakesink.h
gstdshowaudiosrc.cpp
gstdshowaudiosrc.h
gstdshowvideosrc.cpp
gstdshowvideosrc.h
gstdshowsrcwrapper.cpp
)
TARGET_LINK_LIBRARIES(libgstdshowsrcwrapper
gstreamer-1.0
gstaudio-1.0
gstvideo-1.0
gstbase-1.0
glib-2.0
gobject-2.0
debug strmbasd
optimized strmbase
rpcrt4
uuid
winmm
)

View file

@ -19,6 +19,8 @@
* Boston, MA 02110-1301, USA.
*/
#include <gst/video/video-format.h>
#include "gstdshow.h"
#include "gstdshowfakesink.h"
@ -424,16 +426,16 @@ gst_dshow_new_video_caps (GstVideoFormat video_format, const gchar * name,
/* raw video format */
switch (video_format) {
case GST_VIDEO_FORMAT_BGR:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_BGR);
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("BGR"));
break;
case GST_VIDEO_FORMAT_I420:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_YUV ("I420"));
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("I420"));
break;
case GST_VIDEO_FORMAT_YUY2:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_YUV ("YUY2"));
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("YUY2"));
break;
case GST_VIDEO_FORMAT_UYVY:
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_YUV ("UYVY"));
video_caps = gst_caps_from_string (GST_VIDEO_CAPS_MAKE ("UYVY"));
break;
default:
break;
@ -444,7 +446,7 @@ gst_dshow_new_video_caps (GstVideoFormat video_format, const gchar * name,
if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=FALSE", 31) == 0) {
video_caps = gst_caps_new_simple ("video/x-dv",
"systemstream", G_TYPE_BOOLEAN, FALSE,
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'v', 's', 'd'),
"format", G_TYPE_STRING, "dvsd",
NULL);
} else if (g_ascii_strncasecmp (name, "video/x-dv, systemstream=TRUE", 31) == 0) {
video_caps = gst_caps_new_simple ("video/x-dv",

View file

@ -31,23 +31,18 @@ GST_DEBUG_CATEGORY_STATIC (dshowaudiosrc_debug);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("audio/x-raw-int, "
"endianness = (int) { " G_STRINGIFY (G_BYTE_ORDER) " }, "
"signed = (boolean) { TRUE, FALSE }, "
"width = (int) 16, "
"depth = (int) 16, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]; "
"audio/x-raw-int, "
"signed = (boolean) { TRUE, FALSE }, "
"width = (int) 8, "
"depth = (int) 8, "
"rate = (int) [ 1, MAX ], " "channels = (int) [ 1, 2 ]")
GST_STATIC_CAPS ("audio/x-raw, "
"format = (string){ "
GST_AUDIO_NE (S16) ", "
GST_AUDIO_NE (U16) ", "
GST_AUDIO_NE (S8) ", "
GST_AUDIO_NE (U8)
" }, "
"rate = " GST_AUDIO_RATE_RANGE ", "
"channels = (int) [ 1, 2 ]")
);
static void gst_dshowaudiosrc_init_interfaces (GType type);
GST_BOILERPLATE_FULL (GstDshowAudioSrc, gst_dshowaudiosrc, GstAudioSrc,
GST_TYPE_AUDIO_SRC, gst_dshowaudiosrc_init_interfaces);
G_DEFINE_TYPE(GstDshowAudioSrc, gst_dshowaudiosrc, GST_TYPE_AUDIO_SRC);
enum
{
@ -56,32 +51,23 @@ enum
PROP_DEVICE_NAME
};
static void gst_dshowaudiosrc_probe_interface_init (GstPropertyProbeInterface *
iface);
static const GList *gst_dshowaudiosrc_probe_get_properties (GstPropertyProbe *
probe);
static GValueArray *gst_dshowaudiosrc_probe_get_values (GstPropertyProbe *
probe, guint prop_id, const GParamSpec * pspec);
static GValueArray *gst_dshowaudiosrc_get_device_name_values (GstDshowAudioSrc *
src);
static void gst_dshowaudiosrc_dispose (GObject * gobject);
static void gst_dshowaudiosrc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_dshowaudiosrc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstCaps *gst_dshowaudiosrc_get_caps (GstBaseSrc * src);
static GstCaps *gst_dshowaudiosrc_get_caps (GstBaseSrc * src, GstCaps * filter);
static GstStateChangeReturn gst_dshowaudiosrc_change_state (GstElement *
element, GstStateChange transition);
static gboolean gst_dshowaudiosrc_open (GstAudioSrc * asrc);
static gboolean gst_dshowaudiosrc_prepare (GstAudioSrc * asrc,
GstRingBufferSpec * spec);
GstAudioRingBufferSpec * spec);
static gboolean gst_dshowaudiosrc_unprepare (GstAudioSrc * asrc);
static gboolean gst_dshowaudiosrc_close (GstAudioSrc * asrc);
static guint gst_dshowaudiosrc_read (GstAudioSrc * asrc, gpointer data,
guint length);
guint length, GstClockTime *timestamp);
static guint gst_dshowaudiosrc_delay (GstAudioSrc * asrc);
static void gst_dshowaudiosrc_reset (GstAudioSrc * asrc);
@ -91,42 +77,6 @@ static GstCaps *gst_dshowaudiosrc_getcaps_from_streamcaps (GstDshowAudioSrc *
static gboolean gst_dshowaudiosrc_push_buffer (guint8 * buffer, guint size,
gpointer src_object, GstClockTime duration);
static void
gst_dshowaudiosrc_init_interfaces (GType type)
{
static const GInterfaceInfo dshowaudiosrc_info = {
(GInterfaceInitFunc) gst_dshowaudiosrc_probe_interface_init,
NULL,
NULL,
};
g_type_add_interface_static (type,
GST_TYPE_PROPERTY_PROBE, &dshowaudiosrc_info);
}
static void
gst_dshowaudiosrc_probe_interface_init (GstPropertyProbeInterface * iface)
{
iface->get_properties = gst_dshowaudiosrc_probe_get_properties;
/* iface->needs_probe = gst_dshowaudiosrc_probe_needs_probe;
iface->probe_property = gst_dshowaudiosrc_probe_probe_property;*/
iface->get_values = gst_dshowaudiosrc_probe_get_values;
}
static void
gst_dshowaudiosrc_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
gst_element_class_set_static_metadata (element_class,
"Directshow audio capture source", "Source/Audio",
"Receive data from a directshow audio capture graph",
"Sebastien Moutte <sebastien@moutte.net>");
}
static void
gst_dshowaudiosrc_class_init (GstDshowAudioSrcClass * klass)
{
@ -146,11 +96,11 @@ gst_dshowaudiosrc_class_init (GstDshowAudioSrcClass * klass)
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_dshowaudiosrc_get_property);
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_dshowaudiosrc_get_caps);
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_dshowaudiosrc_change_state);
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_dshowaudiosrc_get_caps);
gstaudiosrc_class->open = GST_DEBUG_FUNCPTR (gst_dshowaudiosrc_open);
gstaudiosrc_class->prepare = GST_DEBUG_FUNCPTR (gst_dshowaudiosrc_prepare);
gstaudiosrc_class->unprepare =
@ -172,12 +122,20 @@ gst_dshowaudiosrc_class_init (GstDshowAudioSrcClass * klass)
"Human-readable name of the sound device", NULL,
static_cast < GParamFlags > (G_PARAM_READWRITE)));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
gst_element_class_set_static_metadata (gstelement_class,
"Directshow audio capture source", "Source/Audio",
"Receive data from a directshow audio capture graph",
"Sebastien Moutte <sebastien@moutte.net>");
GST_DEBUG_CATEGORY_INIT (dshowaudiosrc_debug, "dshowaudiosrc", 0,
"Directshow audio source");
}
static void
gst_dshowaudiosrc_init (GstDshowAudioSrc * src, GstDshowAudioSrcClass * klass)
gst_dshowaudiosrc_init (GstDshowAudioSrc * src)
{
src->device = NULL;
src->device_name = NULL;
@ -189,7 +147,7 @@ gst_dshowaudiosrc_init (GstDshowAudioSrc * src, GstDshowAudioSrcClass * klass)
src->pins_mediatypes = NULL;
src->gbarray = g_byte_array_new ();
src->gbarray_lock = g_mutex_new ();
g_mutex_init(&src->gbarray_lock);
src->is_running = FALSE;
@ -226,10 +184,7 @@ gst_dshowaudiosrc_dispose (GObject * gobject)
src->gbarray = NULL;
}
if (src->gbarray_lock) {
g_mutex_free (src->gbarray_lock);
src->gbarray_lock = NULL;
}
g_mutex_clear(&src->gbarray_lock);
/* clean dshow */
if (src->audio_cap_filter)
@ -237,113 +192,10 @@ gst_dshowaudiosrc_dispose (GObject * gobject)
CoUninitialize ();
G_OBJECT_CLASS (parent_class)->dispose (gobject);
G_OBJECT_CLASS (gst_dshowaudiosrc_parent_class)->dispose (gobject);
}
static const GList *
gst_dshowaudiosrc_probe_get_properties (GstPropertyProbe * probe)
{
GObjectClass *klass = G_OBJECT_GET_CLASS (probe);
static GList *props = NULL;
if (!props) {
GParamSpec *pspec;
pspec = g_object_class_find_property (klass, "device-name");
props = g_list_append (props, pspec);
}
return props;
}
static GValueArray *
gst_dshowaudiosrc_get_device_name_values (GstDshowAudioSrc * src)
{
GValueArray *array = g_value_array_new (0);
ICreateDevEnum *devices_enum = NULL;
IEnumMoniker *moniker_enum = NULL;
IMoniker *moniker = NULL;
HRESULT hres = S_FALSE;
ULONG fetched;
hres = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, (LPVOID *) & devices_enum);
if (hres != S_OK) {
GST_ERROR
("Can't create an instance of the system device enumerator (error=0x%x)",
hres);
array = NULL;
goto clean;
}
hres = devices_enum->CreateClassEnumerator (CLSID_AudioInputDeviceCategory,
&moniker_enum, 0);
if (hres != S_OK || !moniker_enum) {
GST_ERROR ("Can't get enumeration of audio devices (error=0x%x)", hres);
array = NULL;
goto clean;
}
moniker_enum->Reset ();
while (hres = moniker_enum->Next (1, &moniker, &fetched), hres == S_OK) {
IPropertyBag *property_bag = NULL;
hres = moniker->BindToStorage (NULL, NULL, IID_IPropertyBag,
(LPVOID *) & property_bag);
if (SUCCEEDED (hres) && property_bag) {
VARIANT varFriendlyName;
VariantInit (&varFriendlyName);
hres = property_bag->Read (L"FriendlyName", &varFriendlyName, NULL);
if (hres == S_OK && varFriendlyName.bstrVal) {
gchar *friendly_name =
g_utf16_to_utf8 ((const gunichar2 *) varFriendlyName.bstrVal,
wcslen (varFriendlyName.bstrVal), NULL, NULL, NULL);
GValue value = { 0 };
g_value_init (&value, G_TYPE_STRING);
g_value_set_string (&value, friendly_name);
g_value_array_append (array, &value);
g_value_unset (&value);
g_free (friendly_name);
SysFreeString (varFriendlyName.bstrVal);
}
property_bag->Release ();
}
moniker->Release ();
}
clean:
if (moniker_enum)
moniker_enum->Release ();
if (devices_enum)
devices_enum->Release ();
return array;
}
static GValueArray *
gst_dshowaudiosrc_probe_get_values (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec)
{
GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (probe);
GValueArray *array = NULL;
switch (prop_id) {
case PROP_DEVICE_NAME:
array = gst_dshowaudiosrc_get_device_name_values (src);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (probe, prop_id, pspec);
break;
}
return array;
}
static void
gst_dshowaudiosrc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -387,7 +239,7 @@ gst_dshowaudiosrc_get_property (GObject * object, guint prop_id,
}
static GstCaps *
gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc)
gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
{
HRESULT hres = S_OK;
IBindCtx *lpbc = NULL;
@ -474,7 +326,15 @@ gst_dshowaudiosrc_get_caps (GstBaseSrc * basesrc)
}
if (src->caps) {
return gst_caps_ref (src->caps);
GstCaps *caps;
if (filter) {
caps = gst_caps_intersect_full (filter, src->caps, GST_CAPS_INTERSECT_FIRST);
} else {
caps = gst_caps_ref (src->caps);
}
return caps;
}
return NULL;
@ -492,14 +352,14 @@ gst_dshowaudiosrc_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_READY_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
if (src->media_filter)
if (src->media_filter) {
src->is_running = TRUE;
hres = src->media_filter->Run (0);
}
if (hres != S_OK) {
GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
src->is_running = FALSE;
return GST_STATE_CHANGE_FAILURE;
} else {
src->is_running = TRUE;
}
break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
@ -521,7 +381,7 @@ gst_dshowaudiosrc_change_state (GstElement * element, GstStateChange transition)
break;
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
return GST_ELEMENT_CLASS(gst_dshowaudiosrc_parent_class)->change_state(element, transition);
}
static gboolean
@ -587,12 +447,25 @@ error:
}
static gboolean
gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstAudioRingBufferSpec * spec)
{
HRESULT hres;
IPin *input_pin = NULL;
GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);
/* In 1.0, prepare() seems to be called in the PLAYING state. Most
of the time you can't do much on a running graph. */
gboolean was_running = src->is_running;
if (was_running) {
HRESULT hres = src->media_filter->Stop ();
if (hres != S_OK) {
GST_ERROR("Can't STOP the directshow capture graph for preparing (error=0x%x)", hres);
return FALSE;
}
src->is_running = FALSE;
}
/* search the negociated caps in our caps list to get its index and the corresponding mediatype */
if (gst_caps_is_subset (spec->caps, src->caps)) {
guint i = 0;
@ -626,7 +499,7 @@ gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
goto error;
}
spec->segsize = (gint) (spec->bytes_per_sample * spec->rate * spec->latency_time /
spec->segsize = (gint) (spec->info.bpf * spec->info.rate * spec->latency_time /
GST_MSECOND);
spec->segtotal = (gint) ((gfloat) spec->buffer_time /
(gfloat) spec->latency_time + 0.5);
@ -634,7 +507,7 @@ gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
spec->segsize))
{
GST_WARNING ("Could not change capture latency");
spec->segsize = spec->rate * spec->channels;
spec->segsize = spec->info.rate * spec->info.channels;
spec->segtotal = 2;
};
GST_INFO ("Configuring with segsize:%d segtotal:%d", spec->segsize, spec->segtotal);
@ -653,9 +526,20 @@ gst_dshowaudiosrc_prepare (GstAudioSrc * asrc, GstRingBufferSpec * spec)
}
}
if (was_running) {
HRESULT hres = src->media_filter->Run (0);
if (hres != S_OK) {
GST_ERROR("Can't RUN the directshow capture graph after prepare (error=0x%x)", hres);
return FALSE;
}
src->is_running = TRUE;
}
return TRUE;
error:
/* Don't restart the graph, we're out anyway. */
return FALSE;
}
@ -711,7 +595,7 @@ gst_dshowaudiosrc_close (GstAudioSrc * asrc)
}
static guint
gst_dshowaudiosrc_read (GstAudioSrc * asrc, gpointer data, guint length)
gst_dshowaudiosrc_read (GstAudioSrc * asrc, gpointer data, guint length, GstClockTime *timestamp)
{
GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);
guint ret = 0;
@ -722,15 +606,15 @@ gst_dshowaudiosrc_read (GstAudioSrc * asrc, gpointer data, guint length)
if (src->gbarray) {
test:
if (src->gbarray->len >= length) {
g_mutex_lock (src->gbarray_lock);
g_mutex_lock (&src->gbarray_lock);
memcpy (data, src->gbarray->data + (src->gbarray->len - length), length);
g_byte_array_remove_range (src->gbarray, src->gbarray->len - length,
length);
ret = length;
g_mutex_unlock (src->gbarray_lock);
g_mutex_unlock (&src->gbarray_lock);
} else {
if (src->is_running) {
Sleep (GST_BASE_AUDIO_SRC(src)->ringbuffer->spec.latency_time /
Sleep (GST_AUDIO_BASE_SRC(src)->ringbuffer->spec.latency_time /
GST_MSECOND / 10);
goto test;
}
@ -747,11 +631,11 @@ gst_dshowaudiosrc_delay (GstAudioSrc * asrc)
guint ret = 0;
if (src->gbarray) {
g_mutex_lock (src->gbarray_lock);
g_mutex_lock (&src->gbarray_lock);
if (src->gbarray->len) {
ret = src->gbarray->len / 4;
}
g_mutex_unlock (src->gbarray_lock);
g_mutex_unlock (&src->gbarray_lock);
}
return ret;
@ -762,11 +646,11 @@ gst_dshowaudiosrc_reset (GstAudioSrc * asrc)
{
GstDshowAudioSrc *src = GST_DSHOWAUDIOSRC (asrc);
g_mutex_lock (src->gbarray_lock);
g_mutex_lock (&src->gbarray_lock);
GST_DEBUG ("byte array size= %d", src->gbarray->len);
if (src->gbarray->len > 0)
g_byte_array_remove_range (src->gbarray, 0, src->gbarray->len);
g_mutex_unlock (src->gbarray_lock);
g_mutex_unlock (&src->gbarray_lock);
}
static GstCaps *
@ -804,15 +688,29 @@ gst_dshowaudiosrc_getcaps_from_streamcaps (GstDshowAudioSrc * src, IPin * pin,
if (gst_dshow_check_mediatype (pin_mediatype->mediatype, MEDIASUBTYPE_PCM,
FORMAT_WaveFormatEx)) {
GstAudioFormat format = GST_AUDIO_FORMAT_UNKNOWN;
WAVEFORMATEX *wavformat =
(WAVEFORMATEX *) pin_mediatype->mediatype->pbFormat;
mediacaps =
gst_caps_new_simple ("audio/x-raw-int", "width", G_TYPE_INT,
wavformat->wBitsPerSample, "depth", G_TYPE_INT,
wavformat->wBitsPerSample, "endianness", G_TYPE_INT, G_BYTE_ORDER,
"signed", G_TYPE_BOOLEAN, TRUE, "channels", G_TYPE_INT,
wavformat->nChannels, "rate", G_TYPE_INT, wavformat->nSamplesPerSec,
NULL);
switch (wavformat->wFormatTag) {
case WAVE_FORMAT_PCM:
format = gst_audio_format_build_integer (TRUE, G_BYTE_ORDER, wavformat->wBitsPerSample, wavformat->wBitsPerSample);
break;
default:
break;
}
if (format != GST_AUDIO_FORMAT_UNKNOWN) {
GstAudioInfo info;
gst_audio_info_init(&info);
gst_audio_info_set_format(&info,
format,
wavformat->nSamplesPerSec,
wavformat->nChannels,
NULL);
mediacaps = gst_audio_info_to_caps(&info);
}
if (mediacaps) {
src->pins_mediatypes =
@ -847,9 +745,9 @@ gst_dshowaudiosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
return FALSE;
}
g_mutex_lock (src->gbarray_lock);
g_mutex_lock (&src->gbarray_lock);
g_byte_array_prepend (src->gbarray, buffer, size);
g_mutex_unlock (src->gbarray_lock);
g_mutex_unlock (&src->gbarray_lock);
return TRUE;
}

View file

@ -25,7 +25,6 @@
#include <gst/gst.h>
#include <gst/audio/gstaudiosrc.h>
#include <gst/interfaces/propertyprobe.h>
#include "gstdshow.h"
#include "gstdshowfakesink.h"
@ -67,7 +66,7 @@ struct _GstDshowAudioSrc
/* bytes array */
GByteArray *gbarray;
GMutex *gbarray_lock;
GMutex gbarray_lock;
gboolean is_running;
};

View file

@ -25,7 +25,7 @@ CDshowFakeSink::CDshowFakeSink ():
m_hres (S_OK),
m_callback (NULL),
m_data (NULL),
CBaseRenderer (CLSID_DshowFakeSink, _T("DshowFakeSink"), NULL, &m_hres)
CBaseRenderer (CLSID_DshowFakeSink, TEXT("DshowFakeSink"), NULL, &m_hres)
{
}

View file

@ -34,22 +34,24 @@ GST_DEBUG_CATEGORY_STATIC (dshowvideosrc_debug);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_BGR ";"
GST_VIDEO_CAPS_YUV ("{ I420 }") ";"
GST_VIDEO_CAPS_YUV ("{ YUY2 }") ";"
GST_VIDEO_CAPS_YUV ("{ UYVY }") ";"
"video/x-dv,"
"systemstream = (boolean) FALSE,"
"width = (int) [ 1, MAX ],"
"height = (int) [ 1, MAX ],"
"framerate = (fraction) [ 0, MAX ],"
"format = (fourcc) dvsd;" "video/x-dv," "systemstream = (boolean) TRUE")
GST_STATIC_CAPS ("video/x-raw, format=(string) { "
GST_VIDEO_NE(BGR) ", "
GST_VIDEO_NE(I420) ", "
GST_VIDEO_NE(YUY2) ", "
GST_VIDEO_NE(UYVY) " }, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE "; "
"video/x-dv, "
"format= (string) DVSD, "
"width = " GST_VIDEO_SIZE_RANGE ", "
"height = " GST_VIDEO_SIZE_RANGE ", "
"framerate = " GST_VIDEO_FPS_RANGE ", "
"systemstream = (boolean) { TRUE, FALSE }")
);
static void gst_dshowvideosrc_init_interfaces (GType type);
GST_BOILERPLATE_FULL (GstDshowVideoSrc, gst_dshowvideosrc, GstPushSrc,
GST_TYPE_PUSH_SRC, gst_dshowvideosrc_init_interfaces);
G_DEFINE_TYPE (GstDshowVideoSrc, gst_dshowvideosrc, GST_TYPE_PUSH_SRC)
enum
{
@ -58,26 +60,12 @@ enum
PROP_DEVICE_NAME
};
static void gst_dshowvideosrc_probe_interface_init (GstPropertyProbeInterface *
iface);
static const GList *gst_dshowvideosrc_probe_get_properties (GstPropertyProbe *
probe);
static GValueArray *gst_dshowvideosrc_probe_get_values (GstPropertyProbe *
probe, guint prop_id, const GParamSpec * pspec);
static GValueArray *gst_dshowvideosrc_get_device_name_values (GstDshowVideoSrc *
src);
static gboolean gst_dshowvideosrc_probe_needs_probe (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec);
static void gst_dshowvideosrc_probe_probe_property (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec);
static void gst_dshowvideosrc_dispose (GObject * gobject);
static void gst_dshowvideosrc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_dshowvideosrc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstCaps *gst_dshowvideosrc_get_caps (GstBaseSrc * src);
static GstStateChangeReturn gst_dshowvideosrc_change_state (GstElement *
element, GstStateChange transition);
@ -87,8 +75,8 @@ static gboolean gst_dshowvideosrc_stop (GstBaseSrc * bsrc);
static gboolean gst_dshowvideosrc_unlock (GstBaseSrc * bsrc);
static gboolean gst_dshowvideosrc_unlock_stop (GstBaseSrc * bsrc);
static gboolean gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps);
static GstCaps *gst_dshowvideosrc_get_caps (GstBaseSrc * bsrc);
static void gst_dshowvideosrc_src_fixate (GstBaseSrc * bsrc, GstCaps * caps);
static GstCaps *gst_dshowvideosrc_get_caps (GstBaseSrc * bsrc, GstCaps * filter);
static GstCaps *gst_dshowvideosrc_src_fixate (GstBaseSrc * bsrc, GstCaps * caps);
static GstFlowReturn gst_dshowvideosrc_create (GstPushSrc * psrc,
GstBuffer ** buf);
@ -100,42 +88,6 @@ static GstCaps *gst_dshowvideosrc_getcaps_from_enum_mediatypes (GstDshowVideoSrc
static gboolean gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size,
gpointer src_object, GstClockTime duration);
static void
gst_dshowvideosrc_init_interfaces (GType type)
{
static const GInterfaceInfo dshowvideosrc_info = {
(GInterfaceInitFunc) gst_dshowvideosrc_probe_interface_init,
NULL,
NULL,
};
g_type_add_interface_static (type,
GST_TYPE_PROPERTY_PROBE, &dshowvideosrc_info);
}
static void
gst_dshowvideosrc_probe_interface_init (GstPropertyProbeInterface * iface)
{
iface->get_properties = gst_dshowvideosrc_probe_get_properties;
iface->needs_probe = gst_dshowvideosrc_probe_needs_probe;
iface->probe_property = gst_dshowvideosrc_probe_probe_property;
iface->get_values = gst_dshowvideosrc_probe_get_values;
}
static void
gst_dshowvideosrc_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
gst_element_class_set_static_metadata (element_class,
"DirectShow video capture source", "Source/Video",
"Receive data from a directshow video capture graph",
"Sebastien Moutte <sebastien@moutte.net>");
}
static void
gst_dshowvideosrc_class_init (GstDshowVideoSrcClass * klass)
{
@ -158,8 +110,8 @@ gst_dshowvideosrc_class_init (GstDshowVideoSrcClass * klass)
gstelement_class->change_state =
GST_DEBUG_FUNCPTR (gst_dshowvideosrc_change_state);
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_dshowvideosrc_get_caps);
gstbasesrc_class->set_caps = GST_DEBUG_FUNCPTR (gst_dshowvideosrc_set_caps);
gstbasesrc_class->get_caps = GST_DEBUG_FUNCPTR (gst_dshowvideosrc_get_caps);
gstbasesrc_class->fixate = GST_DEBUG_FUNCPTR (gst_dshowvideosrc_src_fixate);
gstbasesrc_class->start = GST_DEBUG_FUNCPTR (gst_dshowvideosrc_start);
gstbasesrc_class->stop = GST_DEBUG_FUNCPTR (gst_dshowvideosrc_stop);
@ -181,13 +133,21 @@ gst_dshowvideosrc_class_init (GstDshowVideoSrcClass * klass)
"Human-readable name of the sound device", NULL,
static_cast < GParamFlags > (G_PARAM_READWRITE)));
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
gst_element_class_set_static_metadata (gstelement_class,
"DirectShow video capture source", "Source/Video",
"Receive data from a directshow video capture graph",
"Sebastien Moutte <sebastien@moutte.net>");
GST_DEBUG_CATEGORY_INIT (dshowvideosrc_debug, "dshowvideosrc", 0,
"Directshow video source");
}
static void
gst_dshowvideosrc_init (GstDshowVideoSrc * src, GstDshowVideoSrcClass * klass)
gst_dshowvideosrc_init (GstDshowVideoSrc * src)
{
src->device = NULL;
src->device_name = NULL;
@ -198,6 +158,7 @@ gst_dshowvideosrc_init (GstDshowVideoSrc * src, GstDshowVideoSrcClass * klass)
src->caps = NULL;
src->pins_mediatypes = NULL;
src->is_rgb = FALSE;
src->is_running = FALSE;
/*added for analog input*/
src->graph_builder = NULL;
@ -205,8 +166,8 @@ gst_dshowvideosrc_init (GstDshowVideoSrc * src, GstDshowVideoSrcClass * klass)
src->pVC = NULL;
src->pVSC = NULL;
src->buffer_cond = g_cond_new ();
src->buffer_mutex = g_mutex_new ();
g_cond_init(&src->buffer_cond);
g_mutex_init(&src->buffer_mutex);
src->buffer = NULL;
src->stop_requested = FALSE;
@ -215,7 +176,7 @@ gst_dshowvideosrc_init (GstDshowVideoSrc * src, GstDshowVideoSrcClass * klass)
gst_base_src_set_live (GST_BASE_SRC (src), TRUE);
}
static void
static GstCaps *
gst_dshowvideosrc_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
/* If there is no desired video size, set default video size to device preffered video size */
@ -247,6 +208,10 @@ gst_dshowvideosrc_src_fixate (GstBaseSrc * bsrc, GstCaps * caps)
pin_mediatype->defaultFPS, 1);
}
}
caps = GST_BASE_SRC_CLASS (gst_dshowvideosrc_parent_class)->fixate(bsrc, caps);
return caps;
}
static void
@ -280,156 +245,12 @@ gst_dshowvideosrc_dispose (GObject * gobject)
src->video_cap_filter = NULL;
}
if (src->buffer_mutex) {
g_mutex_free (src->buffer_mutex);
src->buffer_mutex = NULL;
}
if (src->buffer_cond) {
g_cond_free (src->buffer_cond);
src->buffer_cond = NULL;
}
if (src->buffer) {
gst_buffer_unref (src->buffer);
src->buffer = NULL;
}
g_cond_clear(&src->buffer_cond);
g_mutex_clear(&src->buffer_mutex);
CoUninitialize ();
G_OBJECT_CLASS (parent_class)->dispose (gobject);
}
static gboolean
gst_dshowvideosrc_probe_needs_probe (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec)
{
static gboolean init = FALSE;
gboolean ret = FALSE;
if (!init) {
ret = TRUE;
init = TRUE;
}
return ret;
}
static void
gst_dshowvideosrc_probe_probe_property (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec)
{
GObjectClass *klass = G_OBJECT_GET_CLASS (probe);
switch (prop_id) {
case PROP_DEVICE_NAME:
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (probe, prop_id, pspec);
break;
}
}
static const GList *
gst_dshowvideosrc_probe_get_properties (GstPropertyProbe * probe)
{
GObjectClass *klass = G_OBJECT_GET_CLASS (probe);
static GList *props = NULL;
if (!props) {
GParamSpec *pspec;
pspec = g_object_class_find_property (klass, "device-name");
props = g_list_append (props, pspec);
}
return props;
}
static GValueArray *
gst_dshowvideosrc_get_device_name_values (GstDshowVideoSrc * src)
{
GValueArray *array = g_value_array_new (0);
ICreateDevEnum *devices_enum = NULL;
IEnumMoniker *moniker_enum = NULL;
IMoniker *moniker = NULL;
HRESULT hres = S_FALSE;
ULONG fetched;
hres = CoCreateInstance (CLSID_SystemDeviceEnum, NULL, CLSCTX_INPROC_SERVER,
IID_ICreateDevEnum, (LPVOID *) & devices_enum);
if (hres != S_OK) {
GST_ERROR ("Can't create system device enumerator (error=0x%x)", hres);
array = NULL;
goto clean;
}
hres = devices_enum->CreateClassEnumerator (CLSID_VideoInputDeviceCategory,
&moniker_enum, 0);
if (hres != S_OK || !moniker_enum) {
GST_ERROR ("Can't get enumeration of video devices (error=0x%x)", hres);
array = NULL;
goto clean;
}
moniker_enum->Reset ();
while (hres = moniker_enum->Next (1, &moniker, &fetched), hres == S_OK) {
IPropertyBag *property_bag = NULL;
hres =
moniker->BindToStorage (NULL, NULL, IID_IPropertyBag,
(LPVOID *) & property_bag);
if (SUCCEEDED (hres) && property_bag) {
VARIANT varFriendlyName;
VariantInit (&varFriendlyName);
hres = property_bag->Read (L"FriendlyName", &varFriendlyName, NULL);
if (hres == S_OK && varFriendlyName.bstrVal) {
gchar *friendly_name =
g_utf16_to_utf8 ((const gunichar2 *) varFriendlyName.bstrVal,
wcslen (varFriendlyName.bstrVal), NULL, NULL, NULL);
GValue value = { 0 };
g_value_init (&value, G_TYPE_STRING);
g_value_set_string (&value, friendly_name);
g_value_array_append (array, &value);
g_value_unset (&value);
g_free (friendly_name);
SysFreeString (varFriendlyName.bstrVal);
}
property_bag->Release ();
}
moniker->Release ();
}
clean:
if (moniker_enum)
moniker_enum->Release ();
if (devices_enum)
devices_enum->Release ();
return array;
}
static GValueArray *
gst_dshowvideosrc_probe_get_values (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec)
{
GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (probe);
GValueArray *array = NULL;
switch (prop_id) {
case PROP_DEVICE_NAME:
array = gst_dshowvideosrc_get_device_name_values (src);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (probe, prop_id, pspec);
break;
}
return array;
G_OBJECT_CLASS (gst_dshowvideosrc_parent_class)->dispose (gobject);
}
static void
@ -475,7 +296,7 @@ gst_dshowvideosrc_get_property (GObject * object, guint prop_id,
}
static GstCaps *
gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc)
gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc, GstCaps * filter)
{
HRESULT hres = S_OK;
IBindCtx *lpbc = NULL;
@ -569,7 +390,15 @@ gst_dshowvideosrc_get_caps (GstBaseSrc * basesrc)
}
if (src->caps) {
return gst_caps_ref (src->caps);
GstCaps *caps;
if (filter) {
caps = gst_caps_intersect_full (filter, src->caps, GST_CAPS_INTERSECT_FIRST);
} else {
caps = gst_caps_ref (src->caps);
}
return caps;
}
return NULL;
@ -587,10 +416,15 @@ gst_dshowvideosrc_change_state (GstElement * element, GstStateChange transition)
case GST_STATE_CHANGE_READY_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
if (src->media_filter)
if (src->media_filter) {
/* Setting this to TRUE because set_caps may be invoked before
Run() returns. */
src->is_running = TRUE;
hres = src->media_filter->Run (0);
}
if (hres != S_OK) {
GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
src->is_running = FALSE;
return GST_STATE_CHANGE_FAILURE;
}
break;
@ -601,6 +435,7 @@ gst_dshowvideosrc_change_state (GstElement * element, GstStateChange transition)
GST_ERROR ("Can't STOP the directshow capture graph (error=%d)", hres);
return GST_STATE_CHANGE_FAILURE;
}
src->is_running = FALSE;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
break;
@ -608,7 +443,7 @@ gst_dshowvideosrc_change_state (GstElement * element, GstStateChange transition)
break;
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
return GST_ELEMENT_CLASS(gst_dshowvideosrc_parent_class)->change_state(element, transition);
}
static gboolean
@ -690,7 +525,7 @@ gst_dshowvideosrc_start (GstBaseSrc * bsrc)
&MEDIATYPE_Video, src->video_cap_filter,
IID_IAMStreamConfig, (LPVOID *)&src->pVSC);
if (hres != S_OK) {
// this means we can't set frame rate (non-DV only)
/* this means we can't set frame rate (non-DV only) */
GST_ERROR ("Error %x: Cannot find VCapture:IAMStreamConfig", hres);
goto error;
}
@ -742,6 +577,17 @@ gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
GstStructure *s = gst_caps_get_structure (caps, 0);
/* Same remark as in gstdshowaudiosrc. */
gboolean was_running = src->is_running;
if (was_running) {
HRESULT hres = src->media_filter->Stop ();
if (hres != S_OK) {
GST_ERROR ("Can't STOP the directshow capture graph (error=0x%x)", hres);
return FALSE;
}
src->is_running = FALSE;
}
/* search the negociated caps in our caps list to get its index and the corresponding mediatype */
if (gst_caps_is_subset (caps, src->caps)) {
guint i = 0;
@ -824,20 +670,30 @@ gst_dshowvideosrc_set_caps (GstBaseSrc * bsrc, GstCaps * caps)
gst_structure_get_int (s, "width", &src->width);
gst_structure_get_int (s, "height", &src->height);
src->is_rgb = FALSE;
caps_string = gst_caps_to_string (caps);
if (caps_string) {
if (strstr (caps_string, "video/x-raw-rgb")) {
src->is_rgb = TRUE;
} else {
src->is_rgb = FALSE;
}
g_free (caps_string);
}
GstVideoInfo info;
gst_video_info_from_caps(&info, caps);
switch (GST_VIDEO_INFO_FORMAT(&info)) {
case GST_VIDEO_FORMAT_RGB:
case GST_VIDEO_FORMAT_BGR:
src->is_rgb = TRUE;
break;
default:
src->is_rgb = FALSE;
break;
}
}
}
}
if (was_running) {
HRESULT hres = src->media_filter->Run (0);
if (hres != S_OK) {
GST_ERROR ("Can't RUN the directshow capture graph (error=0x%x)", hres);
return FALSE;
}
src->is_running = TRUE;
}
return TRUE;
error:
@ -924,10 +780,10 @@ gst_dshowvideosrc_unlock (GstBaseSrc * bsrc)
{
GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (bsrc);
g_mutex_lock (src->buffer_mutex);
g_mutex_lock (&src->buffer_mutex);
src->stop_requested = TRUE;
g_cond_signal (src->buffer_cond);
g_mutex_unlock (src->buffer_mutex);
g_cond_signal (&src->buffer_cond);
g_mutex_unlock (&src->buffer_mutex);
return TRUE;
}
@ -947,12 +803,12 @@ gst_dshowvideosrc_create (GstPushSrc * psrc, GstBuffer ** buf)
{
GstDshowVideoSrc *src = GST_DSHOWVIDEOSRC (psrc);
g_mutex_lock (src->buffer_mutex);
g_mutex_lock (&src->buffer_mutex);
while (src->buffer == NULL && !src->stop_requested)
g_cond_wait (src->buffer_cond, src->buffer_mutex);
g_cond_wait (&src->buffer_cond, &src->buffer_mutex);
*buf = src->buffer;
src->buffer = NULL;
g_mutex_unlock (src->buffer_mutex);
g_mutex_unlock (&src->buffer_mutex);
if (src->stop_requested) {
if (*buf != NULL) {
@ -976,7 +832,6 @@ gst_dshowvideosrc_getcaps_from_streamcaps (GstDshowVideoSrc * src, IPin * pin)
HRESULT hres = S_OK;
int icount = 0;
int isize = 0;
VIDEO_STREAM_CONFIG_CAPS vscc;
int i = 0;
IAMStreamConfig *streamcaps = NULL;
@ -988,7 +843,7 @@ gst_dshowvideosrc_getcaps_from_streamcaps (GstDshowVideoSrc * src, IPin * pin)
streamcaps->GetNumberOfCapabilities (&icount, &isize);
if (isize != sizeof (vscc)) {
if (isize != sizeof (VIDEO_STREAM_CONFIG_CAPS)) {
streamcaps->Release ();
return NULL;
}
@ -1068,10 +923,18 @@ gst_dshowvideosrc_getcaps_from_enum_mediatypes (GstDshowVideoSrc * src, IPin * p
GstCaps *mediacaps = NULL;
GstVideoFormat video_format = gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);
if (video_format != GST_VIDEO_FORMAT_UNKNOWN)
mediacaps = gst_video_format_new_caps (video_format,
pin_mediatype->defaultWidth, pin_mediatype->defaultHeight,
pin_mediatype->defaultFPS, 1, 1, 1);
if (video_format != GST_VIDEO_FORMAT_UNKNOWN) {
GstVideoInfo info;
gst_video_info_init(&info);
gst_video_info_set_format(&info, video_format, pin_mediatype->defaultWidth, pin_mediatype->defaultHeight);
info.fps_n = pin_mediatype->defaultFPS;
info.fps_d = 1;
info.par_n = 1;
info.par_d = 1;
info.interlace_mode = GST_VIDEO_INTERLACE_MODE_PROGRESSIVE; /* XXX is this correct ? */
mediacaps = gst_video_info_to_caps(&info);
}
if (mediacaps) {
src->pins_mediatypes =
@ -1102,6 +965,7 @@ gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
IPin *pPin = NULL;
HRESULT hres = S_FALSE;
AM_MEDIA_TYPE *pMediaType = NULL;
GstMapInfo info;
if (!buffer || size == 0 || !src) {
return FALSE;
@ -1110,7 +974,7 @@ gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
/* create a new buffer assign to it the clock time as timestamp */
buf = gst_buffer_new_and_alloc (size);
GST_BUFFER_SIZE (buf) = size;
gst_buffer_set_size(buf, size);
GstClock *clock = gst_element_get_clock (GST_ELEMENT (src));
GST_BUFFER_TIMESTAMP (buf) =
@ -1119,6 +983,12 @@ gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
GST_BUFFER_DURATION (buf) = duration;
if (!gst_buffer_map(buf, &info, GST_MAP_WRITE)) {
gst_buffer_unref(buf);
GST_ERROR("Failed to map buffer");
return FALSE;
}
if (src->is_rgb) {
/* FOR RGB directshow decoder will return bottom-up BITMAP
* There is probably a way to get top-bottom video frames from
@ -1128,26 +998,25 @@ gst_dshowvideosrc_push_buffer (guint8 * buffer, guint size, gpointer src_object,
gint stride = size / src->height;
for (; line < src->height; line++) {
memcpy (GST_BUFFER_DATA (buf) + (line * stride),
memcpy (info.data + (line * stride),
buffer + (size - ((line + 1) * (stride))), stride);
}
} else {
memcpy (GST_BUFFER_DATA (buf), buffer, size);
memcpy (info.data, buffer, size);
}
gst_buffer_unmap(buf, &info);
GST_DEBUG ("push_buffer => pts %" GST_TIME_FORMAT "duration %"
GST_TIME_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
GST_TIME_ARGS (duration));
/* the negotiate() method already set caps on the source pad */
gst_buffer_set_caps (buf, GST_PAD_CAPS (GST_BASE_SRC_PAD (src)));
g_mutex_lock (src->buffer_mutex);
g_mutex_lock (&src->buffer_mutex);
if (src->buffer != NULL)
gst_buffer_unref (src->buffer);
src->buffer = buf;
g_cond_signal (src->buffer_cond);
g_mutex_unlock (src->buffer_mutex);
g_cond_signal (&src->buffer_cond);
g_mutex_unlock (&src->buffer_mutex);
return TRUE;
}

View file

@ -25,7 +25,6 @@
#include <glib.h>
#include <gst/gst.h>
#include <gst/base/gstpushsrc.h>
#include <gst/interfaces/propertyprobe.h>
#include "gstdshow.h"
#include "gstdshowfakesink.h"
@ -80,12 +79,13 @@ struct _GstDshowVideoSrc
IAMStreamConfig *pVSC; // for video cap
/* the last buffer from DirectShow */
GCond *buffer_cond;
GMutex *buffer_mutex;
GCond buffer_cond;
GMutex buffer_mutex;
GstBuffer *buffer;
gboolean stop_requested;
gboolean is_rgb;
gboolean is_running;
gint width;
gint height;
};