directshow: Remove white spaces

Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/1744>
This commit is contained in:
Seungha Yang 2022-02-19 00:12:55 +09:00 committed by GStreamer Marge Bot
parent 717009f8f5
commit d94d338bd5
16 changed files with 146 additions and 146 deletions

View file

@ -99,8 +99,8 @@ static PreferredFilter preferred_mp3_filters[] = {
};
/* MPEG 1/2: use the MPEG Audio Decoder filter */
static const GUID CLSID_WINDOWS_MPEG_AUDIO_DECODER =
{0x4A2286E0, 0x7BEF, 0x11CE,
static const GUID CLSID_WINDOWS_MPEG_AUDIO_DECODER =
{0x4A2286E0, 0x7BEF, 0x11CE,
{0x9B, 0xD9, 0x00, 0x00, 0xE2, 0x02, 0x59, 0x9C}};
static PreferredFilter preferred_mpegaudio_filters[] = {
{&CLSID_WINDOWS_MPEG_AUDIO_DECODER},
@ -171,7 +171,7 @@ HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
pMediaSample->GetPointer(&pBuffer);
pMediaSample->GetTime(&lStart, &lStop);
if (!GST_CLOCK_TIME_IS_VALID (mDec->timestamp)) {
// Convert REFERENCE_TIME to GST_CLOCK_TIME
mDec->timestamp = (GstClockTime)lStart * 100;
@ -205,7 +205,7 @@ HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
GST_WARNING_OBJECT (mDec, "cannot allocate a new GstBuffer");
goto done;
}
/* set buffer properties */
GST_BUFFER_TIMESTAMP (out_buf) = buf_start;
GST_BUFFER_DURATION (out_buf) = duration;
@ -233,8 +233,8 @@ HRESULT AudioFakeSink::DoRenderSample(IMediaSample *pMediaSample)
/* truncating */
if ((start_offset != 0) || (stop_offset != (size_t) size)) {
GstBuffer *subbuf = gst_buffer_copy_region (out_buf, GST_BUFFER_COPY_ALL,
GstBuffer *subbuf = gst_buffer_copy_region (out_buf, GST_BUFFER_COPY_ALL,
start_offset, stop_offset - start_offset);
if (subbuf) {
@ -268,7 +268,7 @@ HRESULT AudioFakeSink::CheckMediaType(const CMediaType *pmt)
{
if(pmt != NULL)
{
/* The Vista MP3 decoder (and possibly others?) outputs an
/* The Vista MP3 decoder (and possibly others?) outputs an
* AM_MEDIA_TYPE with the wrong cbFormat. So, rather than using
* CMediaType.operator==, we implement a sufficient check ourselves.
* I think this is a bug in the MP3 decoder.
@ -279,7 +279,7 @@ HRESULT AudioFakeSink::CheckMediaType(const CMediaType *pmt)
{
/* Types are the same at the top-level. Now, we need to compare
* the format blocks.
* We special case WAVEFORMATEX to not check that
* We special case WAVEFORMATEX to not check that
* pmt->cbFormat == m_MediaType.cbFormat, though the actual format
* blocks must still be the same.
*/
@ -343,9 +343,9 @@ gst_dshowaudiodec_base_init (gpointer klass)
description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
tmp->element_longname);
gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Audio", description,
gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Audio", description,
"Sebastien Moutte <sebastien@moutte.net>");
g_free (longname);
g_free (description);
@ -471,7 +471,7 @@ gst_dshowaudiodec_init (GstDshowAudioDec * adec)
g_mutex_lock (&adec->com_init_lock);
/* create the COM initialization thread */
g_thread_new ("COM init thread", (GThreadFunc)gst_dshowaudiodec_com_thread,
g_thread_new ("COM init thread", (GThreadFunc)gst_dshowaudiodec_com_thread,
adec);
/* wait until the COM thread signals that COM has been initialized */
@ -738,7 +738,7 @@ dshowaudiodec_set_input_format (GstDshowAudioDec *adec, GstCaps *caps)
mpeg1_format->fwHeadMode = ACM_MPEG_STEREO;
else
mpeg1_format->fwHeadMode = ACM_MPEG_SINGLECHANNEL;
mpeg1_format->fwHeadModeExt = 0;
mpeg1_format->wHeadEmphasis = 0;
mpeg1_format->fwHeadFlags = 0;
@ -769,8 +769,8 @@ dshowaudiodec_set_input_format (GstDshowAudioDec *adec, GstCaps *caps)
mpeg1_format->wfx.nSamplesPerSec = adec->rate;
mpeg1_format->dwHeadBitrate = 128000; /* This doesn't seem to matter */
mpeg1_format->wfx.nAvgBytesPerSec = mpeg1_format->dwHeadBitrate / 8;
}
else
}
else
{
size = sizeof (WAVEFORMATEX) +
(adec->codec_data ? gst_buffer_get_size(adec->codec_data) : 0);
@ -778,7 +778,7 @@ dshowaudiodec_set_input_format (GstDshowAudioDec *adec, GstCaps *caps)
if (adec->layer == 3) {
MPEGLAYER3WAVEFORMAT *mp3format;
/* The WinXP mp3 decoder doesn't actually check the size of this structure,
/* The WinXP mp3 decoder doesn't actually check the size of this structure,
* but requires that this be allocated and filled out (or we get obscure
* random crashes)
*/
@ -802,7 +802,7 @@ dshowaudiodec_set_input_format (GstDshowAudioDec *adec, GstCaps *caps)
if (adec->codec_data) { /* Codec data is appended after our header */
gsize codec_size = gst_buffer_get_size(adec->codec_data);
gst_buffer_extract(adec->codec_data, 0, ((guchar *) format) + sizeof (WAVEFORMATEX),
gst_buffer_extract(adec->codec_data, 0, ((guchar *) format) + sizeof (WAVEFORMATEX),
codec_size);
format->cbSize = codec_size;
}
@ -853,7 +853,7 @@ dshowaudiodec_set_output_format (GstDshowAudioDec *adec)
mediatype->formattype = FORMAT_WaveFormatEx;
mediatype->cbFormat = sizeof (WAVEFORMATEX);
mediatype->pbFormat = (BYTE *)format;
return mediatype;
}
@ -919,7 +919,7 @@ gst_dshowaudiodec_setup_graph (GstDshowAudioDec * adec, GstCaps *caps)
adec->fakesink->SetMediaType(output_mediatype);
gst_audio_info_init(&audio_info);
gst_audio_info_set_format(&audio_info,
gst_audio_info_set_format(&audio_info,
gst_audio_format_build_integer(TRUE, G_BYTE_ORDER, adec->depth, adec->depth),
adec->rate, adec->channels, NULL);
@ -998,7 +998,7 @@ gst_dshowaudiodec_get_filter_settings (GstDshowAudioDec * adec)
AM_MEDIA_TYPE *mediatype = NULL;
enum_mediatypes->Reset();
while (!ret && enum_mediatypes->Next(1, &mediatype, &fetched) == S_OK)
while (!ret && enum_mediatypes->Next(1, &mediatype, &fetched) == S_OK)
{
if (IsEqualGUID (mediatype->subtype, MEDIASUBTYPE_PCM) &&
IsEqualGUID (mediatype->formattype, FORMAT_WaveFormatEx))
@ -1178,7 +1178,7 @@ dshow_adec_register (GstPlugin * plugin)
outsubtype,
audio_dec_codecs[i].preferred_filters);
if (filter)
if (filter)
{
GST_DEBUG ("Registering %s", audio_dec_codecs[i].element_name);

View file

@ -79,7 +79,7 @@ struct _GstDshowAudioDec
/* element pads */
GstPad *sinkpad;
GstPad *srcpad;
GstFlowReturn last_ret;
/* filters interfaces*/
@ -87,8 +87,8 @@ struct _GstDshowAudioDec
AudioFakeSink *fakesink;
IBaseFilterPtr decfilter;
/* graph manager interfaces */
/* graph manager interfaces */
IMediaFilterPtr mediafilter;
IFilterGraphPtr filtergraph;
@ -103,7 +103,7 @@ struct _GstDshowAudioDec
gint rate;
gint layer;
GstBuffer *codec_data;
/* current segment */
GstSegment * segment;
@ -126,23 +126,23 @@ struct _GstDshowAudioDecClass
gboolean dshow_adec_register (GstPlugin * plugin);
const GUID CLSID_AudioFakeSink =
{ 0x3867f537, 0x3e3d, 0x44da,
const GUID CLSID_AudioFakeSink =
{ 0x3867f537, 0x3e3d, 0x44da,
{ 0xbb, 0xf2, 0x02, 0x48, 0x7b, 0xb0, 0xbc, 0xc4} };
class AudioFakeSink : public CBaseRenderer
{
public:
AudioFakeSink(GstDshowAudioDec *dec) :
AudioFakeSink(GstDshowAudioDec *dec) :
m_hres(S_OK),
CBaseRenderer(CLSID_AudioFakeSink, _T("AudioFakeSink"), NULL, &m_hres),
mDec(dec)
mDec(dec)
{};
virtual ~AudioFakeSink() {};
HRESULT DoRenderSample(IMediaSample *pMediaSample);
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT SetMediaType (AM_MEDIA_TYPE *pmt)
HRESULT SetMediaType (AM_MEDIA_TYPE *pmt)
{
m_MediaType.Set (*pmt);
return S_OK;

View file

@ -25,7 +25,7 @@
GST_DEBUG_CATEGORY_EXTERN (dshowdec_debug);
#define GST_CAT_DEFAULT dshowdec_debug
const GUID CLSID_DecodeFakeSrc =
const GUID CLSID_DecodeFakeSrc =
{ 0x039527db, 0x6b48, 0x45a7, { 0xab, 0xcf, 0x21, 0xab, 0xc5, 0x44, 0xbb, 0xb6} };
static CCritSec g_pCriticSec;
@ -40,14 +40,14 @@ FakeOutputPin::~FakeOutputPin()
{
}
HRESULT FakeOutputPin::GetMediaType(int iPosition,
HRESULT FakeOutputPin::GetMediaType(int iPosition,
CMediaType *pMediaType)
{
if(iPosition == 0) {
*pMediaType = m_MediaType;
return S_OK;
}
return VFW_S_NO_MORE_ITEMS;
}
#if 0
@ -79,7 +79,7 @@ HRESULT FakeOutputPin::CheckMediaType(const CMediaType *pmt)
return S_FALSE;
}
HRESULT FakeOutputPin::DecideBufferSize (IMemAllocator *pAlloc,
HRESULT FakeOutputPin::DecideBufferSize (IMemAllocator *pAlloc,
ALLOCATOR_PROPERTIES *ppropInputRequest)
{
ALLOCATOR_PROPERTIES properties;
@ -98,12 +98,12 @@ STDMETHODIMP FakeOutputPin::SetMediaType (AM_MEDIA_TYPE *pmt)
return S_OK;
}
STDMETHODIMP FakeOutputPin::PushBuffer(byte *buffer,
__int64 start, __int64 stop,
STDMETHODIMP FakeOutputPin::PushBuffer(byte *buffer,
__int64 start, __int64 stop,
unsigned int size, bool discont)
{
IMediaSample *pSample = NULL;
if (start != -1) {
start /= 100;
stop /= 100;
@ -120,10 +120,10 @@ STDMETHODIMP FakeOutputPin::PushBuffer(byte *buffer,
pSample->SetActualDataLength(size);
}
pSample->SetDiscontinuity(discont);
pSample->SetSyncPoint(TRUE);
pSample->SetPreroll(FALSE);
if (start != -1)
pSample->SetTime(&start, &stop);
@ -176,4 +176,4 @@ CBasePin *FakeSrc::GetPin(int n)
FakeOutputPin *FakeSrc::GetOutputPin()
{
return m_pOutputPin;
}
}

View file

@ -36,7 +36,7 @@ public:
/* methods */
FakeOutputPin (CBaseFilter *pFilter, CCritSec *sec);
~FakeOutputPin ();
virtual HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT GetMediaType(int iPosition, CMediaType *pMediaType);
virtual HRESULT DecideBufferSize (IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest);
@ -62,4 +62,4 @@ public:
FakeOutputPin *GetOutputPin();
};
#endif // _DSHOWDECWRAPPER_FAKESRC_H_
#endif // _DSHOWDECWRAPPER_FAKESRC_H_

View file

@ -27,12 +27,12 @@
_COM_SMARTPTR_TYPEDEF(IDMOWrapperFilter, __uuidof(IDMOWrapperFilter));
IPin *
IPin *
gst_dshow_get_pin_from_filter (IBaseFilter *filter, PIN_DIRECTION pindir)
{
IEnumPinsPtr enumpins;
IPinPtr pin;
HRESULT hres;
HRESULT hres;
hres = filter->EnumPins (&enumpins);
if (FAILED(hres)) {
@ -52,9 +52,9 @@ gst_dshow_get_pin_from_filter (IBaseFilter *filter, PIN_DIRECTION pindir)
return NULL;
}
IBaseFilter *
gst_dshow_find_filter(CLSID input_majortype, CLSID input_subtype,
CLSID output_majortype, CLSID output_subtype,
IBaseFilter *
gst_dshow_find_filter(CLSID input_majortype, CLSID input_subtype,
CLSID output_majortype, CLSID output_subtype,
PreferredFilter *preferred_filters)
{
HRESULT hres;
@ -69,20 +69,20 @@ gst_dshow_find_filter(CLSID input_majortype, CLSID input_subtype,
/* First, see if any of our preferred filters is available.
* If not, we fall back to the highest-ranked installed filter */
if (preferred_filters) {
while (preferred_filters->filter_guid)
while (preferred_filters->filter_guid)
{
/* If the filter is a DMO, we need to do this a bit differently */
if (preferred_filters->dmo_category)
if (preferred_filters->dmo_category)
{
IDMOWrapperFilterPtr wrapper;
hres = CoCreateInstance (CLSID_DMOWrapperFilter, NULL,
hres = CoCreateInstance (CLSID_DMOWrapperFilter, NULL,
CLSCTX_INPROC,
IID_IBaseFilter, (void **)&filter);
if (SUCCEEDED(hres)) {
hres = filter->QueryInterface (&wrapper);
if (SUCCEEDED(hres)) {
hres = wrapper->Init (*preferred_filters->filter_guid,
hres = wrapper->Init (*preferred_filters->filter_guid,
*preferred_filters->dmo_category);
if (SUCCEEDED(hres))
return filter;
@ -90,9 +90,9 @@ gst_dshow_find_filter(CLSID input_majortype, CLSID input_subtype,
filter->Release();
}
}
else
else
{
hres = CoCreateInstance (*preferred_filters->filter_guid,
hres = CoCreateInstance (*preferred_filters->filter_guid,
NULL, CLSCTX_INPROC,
IID_IBaseFilter, (void **)&filter);
if (SUCCEEDED(hres))
@ -104,28 +104,28 @@ gst_dshow_find_filter(CLSID input_majortype, CLSID input_subtype,
}
}
hres = CoCreateInstance(CLSID_FilterMapper2, NULL, CLSCTX_INPROC,
hres = CoCreateInstance(CLSID_FilterMapper2, NULL, CLSCTX_INPROC,
IID_IFilterMapper2, (void **) &mapper);
if (FAILED(hres))
return NULL;
inTypes[0] = input_majortype;
inTypes[1] = input_subtype;
outTypes[0] = output_majortype;
outTypes[1] = output_subtype;
hres = mapper->EnumMatchingFilters (&enum_moniker, 0,
FALSE, MERIT_DO_NOT_USE+1,
TRUE, 1, inTypes, NULL, NULL, FALSE,
hres = mapper->EnumMatchingFilters (&enum_moniker, 0,
FALSE, MERIT_DO_NOT_USE+1,
TRUE, 1, inTypes, NULL, NULL, FALSE,
TRUE, 1, outTypes, NULL, NULL);
if (FAILED(hres))
return NULL;
enum_moniker->Reset ();
while(enum_moniker->Next (1, &moniker, &fetched) == S_OK)
{
hres = moniker->BindToObject(NULL, NULL,
hres = moniker->BindToObject(NULL, NULL,
IID_IBaseFilter, (void**)&filter);
if(SUCCEEDED(hres)) {
return filter;

View file

@ -54,9 +54,9 @@ typedef struct {
IPin *gst_dshow_get_pin_from_filter (IBaseFilter *filter, PIN_DIRECTION pindir);
/* find and return a filter according to the input and output types */
IBaseFilter *
gst_dshow_find_filter(CLSID input_majortype, CLSID input_subtype,
CLSID output_majortype, CLSID output_subtype,
IBaseFilter *
gst_dshow_find_filter(CLSID input_majortype, CLSID input_subtype,
CLSID output_majortype, CLSID output_subtype,
PreferredFilter *preferred_filters);
#define DSHOW_CODEC_QDATA g_quark_from_string ("dshow-codec")

View file

@ -114,8 +114,8 @@ static PreferredFilter preferred_wmv_filters[] = {
{&CLSID_CWMVDecMediaObject, &DMOCATEGORY_VIDEO_DECODER}, {0}
};
static const GUID CLSID_AVI_DECOMPRESSOR =
{0xCF49D4E0, 0x1115, 0x11CE,
static const GUID CLSID_AVI_DECOMPRESSOR =
{0xCF49D4E0, 0x1115, 0x11CE,
{0xB0, 0x3A, 0x00, 0x20, 0xAF, 0x0B, 0xA7, 0x70}};
static PreferredFilter preferred_cinepack_filters[] = {
{&CLSID_AVI_DECOMPRESSOR}, {0}
@ -132,8 +132,8 @@ static PreferredFilter preferred_mp4s_filters[] = {
static PreferredFilter preferred_mp43_filters[] = {
{&CLSID_CMpeg43DecMediaObject, &DMOCATEGORY_VIDEO_DECODER}, {0}};
static const GUID CLSID_MPEG_VIDEO_DECODER =
{0xFEB50740, 0x7BEF, 0x11CE,
static const GUID CLSID_MPEG_VIDEO_DECODER =
{0xFEB50740, 0x7BEF, 0x11CE,
{0x9B, 0xD9, 0x00, 0x00, 0xE2, 0x02, 0x59, 0x9C}};
static PreferredFilter preferred_mpeg1_filters[] = {
{&CLSID_MPEG_VIDEO_DECODER}, {0}
@ -240,7 +240,7 @@ static const VideoCodecEntry video_dec_codecs[] = {
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
GST_VIDEO_CAPS_MAKE("YUY2"),
preferred_mpeg1_filters},
{"dshowvdec_mpeg4", "MPEG-4 Video",
GST_MAKE_FOURCC ('M', 'P', 'G', '4'),
GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MPG4,
@ -328,7 +328,7 @@ HRESULT VideoFakeSink::DoRenderSample(IMediaSample *pMediaSample)
gst_buffer_map(buf, &map, GST_MAP_WRITE);
if (strstr (klass->entry->srccaps, "rgb")) {
/* FOR RGB directshow decoder will return bottom-up BITMAP
/* FOR RGB directshow decoder will return bottom-up BITMAP
* There is probably a way to get top-bottom video frames from
* the decoder...
*/
@ -390,7 +390,7 @@ gst_dshowvideodec_base_init (gpointer klass)
description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
tmp->element_longname);
gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Video", description,
gst_element_class_set_metadata(element_class, longname, "Codec/Decoder/Video", description,
"Sebastien Moutte <sebastien@moutte.net>");
g_free (longname);
@ -787,7 +787,7 @@ gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
"framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL);
}
gst_caps_set_simple (caps_out,
gst_caps_set_simple (caps_out,
"pixel-aspect-ratio", GST_TYPE_FRACTION, vdec->par_n, vdec->par_d, NULL);
if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
@ -990,7 +990,7 @@ gst_dshowvideodec_src_getcaps (GstPad * pad)
enum_mediatypes->Reset();
while (hres =
enum_mediatypes->Next(1, &mediatype, &fetched),
hres == S_OK)
hres == S_OK)
{
VIDEOINFOHEADER *video_info;
GstCaps *mediacaps = NULL;
@ -1084,7 +1084,7 @@ gst_dshowvideodec_get_filter_output_format (GstDshowVideoDec * vdec,
enum_mediatypes->Reset();
while (hres =
enum_mediatypes->Next(1, &mediatype, &fetched),
hres == S_OK)
hres == S_OK)
{
if (IsEqualGUID (mediatype->subtype, subtype) &&
IsEqualGUID (mediatype->formattype, FORMAT_VideoInfo))

View file

@ -86,7 +86,7 @@ struct _GstDshowVideoDec
/* caps of our src pad */
GstCaps *srccaps;
GstFlowReturn last_ret;
/* list of dshow mediatypes corresponding to the caps list */
@ -130,23 +130,23 @@ struct _GstDshowVideoDecClass
gboolean dshow_vdec_register (GstPlugin * plugin);
const GUID CLSID_VideoFakeSink =
const GUID CLSID_VideoFakeSink =
{ 0xff8f0c8e, 0x64f9, 0x4471,
{ 0x96, 0x0e, 0xd2, 0xd3, 0x18, 0x87, 0x78, 0x9a} };
class VideoFakeSink : public CBaseRenderer
{
public:
VideoFakeSink(GstDshowVideoDec *dec) :
VideoFakeSink(GstDshowVideoDec *dec) :
m_hres(S_OK),
CBaseRenderer(CLSID_VideoFakeSink, _T("VideoFakeSink"), NULL, &m_hres),
mDec(dec)
mDec(dec)
{};
virtual ~VideoFakeSink() {};
HRESULT DoRenderSample(IMediaSample *pMediaSample);
HRESULT CheckMediaType(const CMediaType *pmt);
HRESULT SetMediaType (AM_MEDIA_TYPE *pmt)
HRESULT SetMediaType (AM_MEDIA_TYPE *pmt)
{
m_MediaType.Set (*pmt);
return S_OK;

View file

@ -23,7 +23,7 @@ GST_DEBUG_CATEGORY_EXTERN (dshowvideosink_debug);
#define GST_CAT_DEFAULT dshowvideosink_debug
// {A0A5CF33-BD0C-4158-9A56-3011DEE3AF6B}
const GUID CLSID_VideoFakeSrc =
const GUID CLSID_VideoFakeSrc =
{ 0xa0a5cf33, 0xbd0c, 0x4158, { 0x9a, 0x56, 0x30, 0x11, 0xde, 0xe3, 0xaf, 0x6b } };
/* output pin*/
@ -43,7 +43,7 @@ HRESULT VideoFakeSrcPin::GetMediaType(int iPosition, CMediaType *pMediaType)
*pMediaType = m_MediaType;
return S_OK;
}
return VFW_S_NO_MORE_ITEMS;
}
@ -73,7 +73,7 @@ HRESULT VideoFakeSrcPin::CheckMediaType(const CMediaType *pmt)
(newvh->bmiHeader.biWidth >= curvh->bmiHeader.biWidth))
{
GST_DEBUG ("CheckMediaType has same media type, width %d (%d image)", newvh->bmiHeader.biWidth, curvh->bmiHeader.biWidth);
/* OK, compatible! */
return S_OK;
}
@ -81,7 +81,7 @@ HRESULT VideoFakeSrcPin::CheckMediaType(const CMediaType *pmt)
GST_WARNING ("Looked similar, but aren't...");
}
}
}
GST_WARNING ("Different media types, FAILING!");
return S_FALSE;
@ -90,8 +90,8 @@ HRESULT VideoFakeSrcPin::CheckMediaType(const CMediaType *pmt)
HRESULT VideoFakeSrcPin::DecideBufferSize (IMemAllocator *pAlloc, ALLOCATOR_PROPERTIES *ppropInputRequest)
{
ALLOCATOR_PROPERTIES properties;
GST_DEBUG ("Required allocator properties: %d, %d, %d, %d",
ppropInputRequest->cbAlign, ppropInputRequest->cbBuffer,
GST_DEBUG ("Required allocator properties: %d, %d, %d, %d",
ppropInputRequest->cbAlign, ppropInputRequest->cbBuffer,
ppropInputRequest->cbPrefix, ppropInputRequest->cBuffers);
ppropInputRequest->cbBuffer = m_SampleSize;
@ -99,8 +99,8 @@ HRESULT VideoFakeSrcPin::DecideBufferSize (IMemAllocator *pAlloc, ALLOCATOR_PROP
/* First set the buffer descriptions we're interested in */
HRESULT hres = pAlloc->SetProperties(ppropInputRequest, &properties);
GST_DEBUG ("Actual Allocator properties: %d, %d, %d, %d",
properties.cbAlign, properties.cbBuffer,
GST_DEBUG ("Actual Allocator properties: %d, %d, %d, %d",
properties.cbAlign, properties.cbBuffer,
properties.cbPrefix, properties.cBuffers);
return S_OK;
@ -147,7 +147,7 @@ STDMETHODIMP VideoFakeSrcPin::CopyToDestinationBuffer (byte *srcbuf, byte *dstbu
* most of the time */
if ((fourcc == GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')) ||
(fourcc == GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V')) ||
(fourcc == GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')))
(fourcc == GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')))
{
/* Nice and simple */
int srcstride = GST_ROUND_UP_4 (vh->rcSource.right * 2);
@ -271,7 +271,7 @@ GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer)
Sleep(100);
}
if (FAILED (hres))
if (FAILED (hres))
{
StopUsingOutputPin();
GST_WARNING ("Could not get sample for delivery to sink: %x", hres);
@ -285,9 +285,9 @@ GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer)
if(sample_buffer)
{
/* Copy to the destination stride.
* This is not just a simple memcpy because of the different strides.
* TODO: optimise for the same-stride case and avoid the copy entirely.
/* Copy to the destination stride.
* This is not just a simple memcpy because of the different strides.
* TODO: optimise for the same-stride case and avoid the copy entirely.
*/
CopyToDestinationBuffer (data, sample_buffer);
}
@ -295,10 +295,10 @@ GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer)
pSample->SetDiscontinuity(FALSE); /* Decoded frame; unimportant */
pSample->SetSyncPoint(TRUE); /* Decoded frame; always a valid syncpoint */
pSample->SetPreroll(FALSE); /* For non-displayed frames.
pSample->SetPreroll(FALSE); /* For non-displayed frames.
Not used in GStreamer */
/* Disable synchronising on this sample. We instead let GStreamer handle
/* Disable synchronising on this sample. We instead let GStreamer handle
* this at a higher level, inside BaseSink. */
pSample->SetTime(NULL, NULL);

View file

@ -48,7 +48,7 @@ static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
"video/x-raw,"
"width = (int) [ 1, MAX ],"
"height = (int) [ 1, MAX ],"
"framerate = (fraction) [ 0, MAX ],"
"framerate = (fraction) [ 0, MAX ],"
"format = {(string)YUY2, (string)UYVY, (string)YV12 }")
);
@ -94,9 +94,9 @@ static void gst_dshowvideosink_set_window_for_renderer (GstDshowVideoSink *sink)
/* COM initialization/uninitialization thread */
static void gst_dshowvideosink_com_thread (GstDshowVideoSink * sink);
/* TODO: event, preroll, buffer_alloc?
* buffer_alloc won't generally be all that useful because the renderers require a
* different stride to GStreamer's implicit values.
/* TODO: event, preroll, buffer_alloc?
* buffer_alloc won't generally be all that useful because the renderers require a
* different stride to GStreamer's implicit values.
*/
static void
@ -233,7 +233,7 @@ gst_dshowvideosink_class_init (GstDshowVideoSinkClass * klass)
(GParamFlags)G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_RENDERER, g_param_spec_string ("renderer", "Renderer",
PROP_RENDERER, g_param_spec_string ("renderer", "Renderer",
"Force usage of specific DirectShow renderer (EVR, VMR9 or VMR7)",
NULL, (GParamFlags)G_PARAM_READWRITE));
}
@ -436,7 +436,7 @@ dump_all_pin_media_types (IBaseFilter *filter)
{
IEnumPins *enumpins = NULL;
IPin *pin = NULL;
HRESULT hres;
HRESULT hres;
hres = filter->EnumPins (&enumpins);
if (FAILED(hres)) {
@ -468,13 +468,13 @@ dump_all_pin_media_types (IBaseFilter *filter)
enumpins->Release();
}
gboolean
gboolean
gst_dshow_get_pin_from_filter (IBaseFilter *filter, PIN_DIRECTION pindir, IPin **pin)
{
gboolean ret = FALSE;
IEnumPins *enumpins = NULL;
IPin *pintmp = NULL;
HRESULT hres;
HRESULT hres;
*pin = NULL;
hres = filter->EnumPins (&enumpins);
@ -498,7 +498,7 @@ gst_dshow_get_pin_from_filter (IBaseFilter *filter, PIN_DIRECTION pindir, IPin *
return ret;
}
static void
static void
gst_dshowvideosink_handle_event (GstDshowVideoSink *sink)
{
if (sink->filter_media_event) {
@ -545,7 +545,7 @@ LRESULT APIENTRY WndProcHook (HWND hWnd, UINT message, WPARAM wParam, LPARAM lPa
}
/* WndProc for our default window, if the application didn't supply one */
LRESULT APIENTRY
LRESULT APIENTRY
WndProc (HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam)
{
GstDshowVideoSink *sink = (GstDshowVideoSink *)GetWindowLongPtr (hWnd, GWLP_USERDATA);
@ -618,7 +618,7 @@ gst_dshowvideosink_window_thread (GstDshowVideoSink * sink)
exstyle = 0;
}
else {
/* By default, create a normal top-level window, the size
/* By default, create a normal top-level window, the size
* of the video.
*/
RECT rect;
@ -759,7 +759,7 @@ static void gst_dshowvideosink_set_window_for_renderer (GstDshowVideoSink *sink)
}
sink->is_new_window = FALSE;
/* This tells the renderer where the window is located, needed to
/* This tells the renderer where the window is located, needed to
* start drawing in the right place. */
sink->renderersupport->MoveWindow();
GST_INFO_OBJECT (sink, "Set renderer window to %x", sink->window_id);
@ -813,7 +813,7 @@ gst_dshowvideosink_connect_graph (GstDshowVideoSink *sink)
srcpin = sink->fakesrc->GetOutputPin();
gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT,
gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT,
&sinkpin);
if (!sinkpin) {
GST_WARNING_OBJECT (sink, "Cannot get input pin from Renderer");
@ -861,7 +861,7 @@ gst_dshowvideosink_start_graph (GstDshowVideoSink *sink)
ret = GST_STATE_CHANGE_FAILURE;
goto done;
}
GST_DEBUG_OBJECT (sink, "DirectShow graph is now running");
ret = GST_STATE_CHANGE_SUCCESS;
@ -931,7 +931,7 @@ gst_dshowvideosink_stop_graph (GstDshowVideoSink *sink)
sink->filter_graph->Disconnect(sink->fakesrc->GetOutputPin());
gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT,
gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT,
&sinkpin);
sink->filter_graph->Disconnect(sinkpin);
sinkpin->Release();
@ -1016,7 +1016,7 @@ private:
HWND video_window;
public:
EVRSupport (GstDshowVideoSink *sink) :
EVRSupport (GstDshowVideoSink *sink) :
sink(sink),
filter(NULL),
service(NULL),
@ -1066,7 +1066,7 @@ public:
IID_IBaseFilter, (LPVOID *) &filter);
GST_DEBUG_OBJECT (sink, "cocreateinstance returned %d", hres);
if (FAILED (hres)) {
GST_ERROR_OBJECT (sink,
GST_ERROR_OBJECT (sink,
"Can't create an instance of renderer (error=%x)",
hres);
return FALSE;
@ -1079,7 +1079,7 @@ public:
return FALSE;
}
hres = service->GetService (MR_VIDEO_RENDER_SERVICE,
hres = service->GetService (MR_VIDEO_RENDER_SERVICE,
IID_IMFVideoDisplayControl, (void **) &control);
if (FAILED (hres)) {
GST_WARNING_OBJECT (sink, "EVR control service missing: %x", hres);
@ -1152,8 +1152,8 @@ private:
HWND video_window;
public:
VMR9Support (GstDshowVideoSink *sink) :
sink(sink),
VMR9Support (GstDshowVideoSink *sink) :
sink(sink),
filter(NULL),
control(NULL),
config(NULL)
@ -1183,7 +1183,7 @@ public:
hres = CoCreateInstance (CLSID_VideoMixingRenderer9, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (LPVOID *) &filter);
if (FAILED (hres)) {
GST_ERROR_OBJECT (sink,
GST_ERROR_OBJECT (sink,
"Can't create an instance of renderer (error=%x)",
hres);
return FALSE;
@ -1205,7 +1205,7 @@ public:
GST_DEBUG_OBJECT (sink, "Set VMR9 (%p) to windowless mode!", filter);
}
/* We can't QI to this until _after_ we've been set to windowless mode.
/* We can't QI to this until _after_ we've been set to windowless mode.
* Apparently this is against the rules in COM, but that's how it is... */
hres = filter->QueryInterface (
IID_IVMRWindowlessControl9, (void **) &control);
@ -1281,8 +1281,8 @@ private:
HWND video_window;
public:
VMR7Support (GstDshowVideoSink *sink) :
sink(sink),
VMR7Support (GstDshowVideoSink *sink) :
sink(sink),
filter(NULL),
control(NULL),
config(NULL)
@ -1312,7 +1312,7 @@ public:
hres = CoCreateInstance (CLSID_VideoMixingRenderer, NULL, CLSCTX_INPROC,
IID_IBaseFilter, (LPVOID *) &filter);
if (FAILED (hres)) {
GST_ERROR_OBJECT (sink,
GST_ERROR_OBJECT (sink,
"Can't create an instance of renderer (error=%x)",
hres);
return FALSE;
@ -1398,8 +1398,8 @@ public:
}
};
static gboolean
gst_dshowvideosink_create_renderer (GstDshowVideoSink *sink)
static gboolean
gst_dshowvideosink_create_renderer (GstDshowVideoSink *sink)
{
GST_DEBUG_OBJECT (sink, "Trying to create renderer '%s'", "EVR");
@ -1458,7 +1458,7 @@ gst_dshowvideosink_build_filtergraph (GstDshowVideoSink *sink)
{
HRESULT hres;
/* Build our DirectShow FilterGraph, looking like:
/* Build our DirectShow FilterGraph, looking like:
*
* [ fakesrc ] -> [ sink filter ]
*
@ -1470,7 +1470,7 @@ gst_dshowvideosink_build_filtergraph (GstDshowVideoSink *sink)
hres = CoCreateInstance (CLSID_FilterGraph, NULL, CLSCTX_INPROC,
IID_IFilterGraph, (LPVOID *) & sink->filter_graph);
if (FAILED (hres)) {
GST_ERROR_OBJECT (sink,
GST_ERROR_OBJECT (sink,
"Can't create an instance of the dshow graph manager (error=%x)", hres);
goto error;
}
@ -1503,7 +1503,7 @@ gst_dshowvideosink_build_filtergraph (GstDshowVideoSink *sink)
sink->filter_graph->AddFilter (sink->renderersupport->GetFilter(),
L"renderer");
if (FAILED (hres)) {
GST_ERROR_OBJECT (sink,
GST_ERROR_OBJECT (sink,
"Can't add renderer to the graph (error=%x)", hres);
goto error;
}
@ -1547,7 +1547,7 @@ gst_dshowvideosink_set_caps (GstBaseSink * bsink, GstCaps * caps)
if (sink->connected) {
IPin *sinkpin;
sink->filter_graph->Disconnect(sink->fakesrc->GetOutputPin());
gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT,
gst_dshow_get_pin_from_filter (sink->renderersupport->GetFilter(), PINDIR_INPUT,
&sinkpin);
sink->filter_graph->Disconnect(sinkpin);
sinkpin->Release();
@ -1560,7 +1560,7 @@ gst_dshowvideosink_set_caps (GstBaseSink * bsink, GstCaps * caps)
GST_DEBUG_OBJECT (sink, "Configuring output pin media type");
/* Now we have an AM_MEDIA_TYPE describing what we're going to send.
* We set this on our DirectShow fakesrc's output pin.
* We set this on our DirectShow fakesrc's output pin.
*/
sink->fakesrc->GetOutputPin()->SetMediaType (&sink->mediatype);
GST_DEBUG_OBJECT (sink, "Configured output pin media type");
@ -1590,8 +1590,8 @@ gst_dshowvideosink_stop (GstBaseSink * bsink)
GST_WARNING_OBJECT (sink, "Cannot destroy filter graph; it doesn't exist");
return TRUE;
}
/* If we created a new window, send the close message and wait until
/* If we created a new window, send the close message and wait until
* it's closed in the window thread */
if (sink->is_new_window) {
SendMessage (sink->window_id, WM_CLOSE, NULL, NULL);
@ -1714,9 +1714,9 @@ video_media_type_to_caps (AM_MEDIA_TYPE *mediatype)
{
VIDEOINFOHEADER *vh = (VIDEOINFOHEADER *)mediatype->pbFormat;
/* TODO: Set PAR here. Based on difference between source and target RECTs?
/* TODO: Set PAR here. Based on difference between source and target RECTs?
* Do we want framerate? Based on AvgTimePerFrame? */
gst_caps_set_simple (caps,
gst_caps_set_simple (caps,
"width", G_TYPE_INT, vh->bmiHeader.biWidth,
"height", G_TYPE_INT, vh->bmiHeader.biHeight,
NULL);
@ -1841,7 +1841,7 @@ gst_caps_to_directshow_media_type (GstDshowVideoSink * sink, GstCaps *caps,
if (sink->keep_aspect_ratio) {
par_n = GST_VIDEO_INFO_PAR_N (&info);
par_d = GST_VIDEO_INFO_PAR_D (&info);
/* To handle non-square pixels, we set the target rectangle to a
/* To handle non-square pixels, we set the target rectangle to a
* different size than the source rectangle.
* There might be a better way, but this seems to work. */
vi->rcTarget.bottom = height;

View file

@ -92,7 +92,7 @@ struct _GstDshowVideoSink
/* The video window set through GstXOverlay */
HWND window_id;
/* If we created the window, it needs to be closed in ::stop() */
gboolean is_new_window;

View file

@ -69,11 +69,11 @@ void gst_dshow_free_mediatype (AM_MEDIA_TYPE * pmt);
GstCapturePinMediaType *gst_dshow_new_pin_mediatype (IPin * pin);
/* create a new capture media type from enum mediatype */
GstCapturePinMediaType * gst_dshow_new_pin_mediatype_from_enum_mediatypes (IPin * pin,
GstCapturePinMediaType * gst_dshow_new_pin_mediatype_from_enum_mediatypes (IPin * pin,
IEnumMediaTypes *enum_mediatypes);
/* create a new capture media type from streamcaps */
GstCapturePinMediaType *gst_dshow_new_pin_mediatype_from_streamcaps (IPin * pin,
GstCapturePinMediaType *gst_dshow_new_pin_mediatype_from_streamcaps (IPin * pin,
gint id, IAMStreamConfig * streamcaps);
/* free the memory of all mediatypes of the input list if pin mediatype */
@ -92,7 +92,7 @@ gboolean gst_dshow_find_filter (CLSID input_majortype, CLSID input_subtype,
CLSID output_majortype, CLSID output_subtype,
gchar * prefered_filter_name, IBaseFilter ** filter);
/* get the dshow device path from device friendly name.
/* get the dshow device path from device friendly name.
If friendly name is not set, it will return the first available device */
gchar *gst_dshow_getdevice_from_devicename (const GUID * device_category,
gchar ** device_name, gint * device_index);

View file

@ -1,7 +1,7 @@
/* GStreamer
* Copyright (C) 2007 Sebastien Moutte <sebastien@moutte.net>
*
* gstdshowaudiosrc.h:
* gstdshowaudiosrc.h:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public

View file

@ -1,7 +1,7 @@
/* GStreamer
* Copyright (C) 2007 Sebastien Moutte <sebastien@moutte.net>
*
* gstdshowsrcwrapper.c:
* gstdshowsrcwrapper.c:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public

View file

@ -504,7 +504,7 @@ gst_dshowvideosrc_start (GstBaseSrc * bsrc)
}
/*
The filter graph now is created via the IGraphBuilder Interface
The filter graph now is created via the IGraphBuilder Interface
Code added to build upstream filters, needed for USB Analog TV Tuners / DVD Maker, based on AMCap code.
by Fabrice Costa <fabricio.costa@moldeointeractive.com.ar>
*/
@ -520,18 +520,18 @@ gst_dshowvideosrc_start (GstBaseSrc * bsrc)
/*graph builder is derived from IFilterGraph so we can assign it to the old src->filter_graph*/
src->filter_graph = (IFilterGraph*) src->graph_builder;
}
/*adding capture graph builder to correctly create upstream filters, Analog TV, TV Tuner */
hres = CoCreateInstance(CLSID_CaptureGraphBuilder2, NULL,
CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2,
CLSCTX_INPROC_SERVER, IID_ICaptureGraphBuilder2,
(LPVOID *) & src->capture_builder);
if ( hres != S_OK || !src->capture_builder ) {
if ( hres != S_OK || !src->capture_builder ) {
GST_ERROR
("Can't create an instance of the dshow capture graph builder manager (error=0x%x)",
hres);
goto error;
} else {
} else {
src->capture_builder->SetFiltergraph(src->graph_builder);
}
@ -557,16 +557,16 @@ gst_dshowvideosrc_start (GstBaseSrc * bsrc)
/* Finding interfaces really creates the upstream filters */
hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Interleaved, src->video_cap_filter,
&MEDIATYPE_Interleaved, src->video_cap_filter,
IID_IAMVideoCompression, (LPVOID *)&src->pVC);
if(hres != S_OK)
{
hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Video, src->video_cap_filter,
&MEDIATYPE_Video, src->video_cap_filter,
IID_IAMVideoCompression, (LPVOID *)&src->pVC);
}
hres = src->capture_builder->FindInterface(&PIN_CATEGORY_CAPTURE,
&MEDIATYPE_Interleaved,
src->video_cap_filter, IID_IAMStreamConfig, (LPVOID *)&src->pVSC);
@ -944,7 +944,7 @@ gst_dshowvideosrc_getcaps_from_streamcaps (IPin * pin, GList ** pins_mediatypes)
if (pin_mediatype) {
GstCaps *mediacaps = NULL;
GstVideoFormat video_format =
GstVideoFormat video_format =
gst_dshow_guid_to_gst_video_format (pin_mediatype->mediatype);
if (video_format != GST_VIDEO_FORMAT_UNKNOWN) {

View file

@ -1,7 +1,7 @@
/* GStreamer
* Copyright (C) 2007 Sebastien Moutte <sebastien@moutte.net>
*
* gstdshowvideosrc.h:
* gstdshowvideosrc.h:
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public