mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2025-02-11 00:42:36 +00:00
mediafoundation: Fix various GCC warnings
Part-of: <https://gitlab.freedesktop.org/gstreamer/gstreamer/-/merge_requests/8406>
This commit is contained in:
parent
0268ee9c52
commit
66b92465cd
13 changed files with 57 additions and 72 deletions
|
@ -252,14 +252,14 @@ gst_mf_aac_enc_get_output_type (GstMFAudioEncoder * encoder,
|
|||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_CHANNELS (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_CHANNELS (info))
|
||||
continue;
|
||||
|
||||
hr = type->GetUINT32 (MF_MT_AUDIO_SAMPLES_PER_SECOND, &value);
|
||||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_RATE (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_RATE (info))
|
||||
continue;
|
||||
|
||||
hr = type->GetUINT32 (MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &value);
|
||||
|
@ -278,7 +278,8 @@ gst_mf_aac_enc_get_output_type (GstMFAudioEncoder * encoder,
|
|||
return FALSE;
|
||||
}
|
||||
|
||||
GST_DEBUG_OBJECT (self, "have %d candidate output", filtered_types.size ());
|
||||
GST_DEBUG_OBJECT (self, "have %d candidate output",
|
||||
(guint) filtered_types.size ());
|
||||
|
||||
/* 2. Find the best matching bitrate */
|
||||
bitrate = self->bitrate;
|
||||
|
@ -400,14 +401,14 @@ gst_mf_aac_enc_get_input_type (GstMFAudioEncoder * encoder, GstAudioInfo * info,
|
|||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_CHANNELS (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_CHANNELS (info))
|
||||
continue;
|
||||
|
||||
hr = type->GetUINT32 (MF_MT_AUDIO_SAMPLES_PER_SECOND, &value);
|
||||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_RATE (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_RATE (info))
|
||||
continue;
|
||||
|
||||
filtered_types.push_back (type);
|
||||
|
@ -421,7 +422,7 @@ gst_mf_aac_enc_get_input_type (GstMFAudioEncoder * encoder, GstAudioInfo * info,
|
|||
}
|
||||
|
||||
GST_DEBUG_OBJECT (self, "Total %d input types are available",
|
||||
filtered_types.size ());
|
||||
(guint) filtered_types.size ());
|
||||
|
||||
/* Just select the first one */
|
||||
target_input = *filtered_types.begin ();
|
||||
|
@ -461,7 +462,7 @@ gst_mf_aac_enc_set_src_caps (GstMFAudioEncoder * encoder, GstAudioInfo * info)
|
|||
hr = output_type->GetBlobSize (MF_MT_USER_DATA, &blob_size);
|
||||
if (!gst_mf_result (hr) || blob_size <= config_data_offset) {
|
||||
GST_ERROR_OBJECT (self,
|
||||
"Couldn't get size of MF_MT_USER_DATA, size %d, %d", blob_size);
|
||||
"Couldn't get size of MF_MT_USER_DATA, size %d", blob_size);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
|
|
|
@ -1311,9 +1311,9 @@ gst_mf_capture_dshow_on_buffer (double sample_time, BYTE * data, LONG len,
|
|||
gst_clear_caps (&caps);
|
||||
}
|
||||
|
||||
if (len < GST_VIDEO_INFO_SIZE (&self->info)) {
|
||||
if (len < (LONG) GST_VIDEO_INFO_SIZE (&self->info)) {
|
||||
GST_ERROR_OBJECT (self, "Too small size %d < %d",
|
||||
(gint) len, GST_VIDEO_INFO_SIZE (&self->info));
|
||||
(gint) len, (guint) GST_VIDEO_INFO_SIZE (&self->info));
|
||||
goto error;
|
||||
}
|
||||
|
||||
|
@ -1346,7 +1346,7 @@ gst_mf_capture_dshow_on_buffer (double sample_time, BYTE * data, LONG len,
|
|||
src = data + src_stride * (height - 1);
|
||||
dst = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
|
||||
|
||||
for (guint i = 0; i < height; i++) {
|
||||
for (gint i = 0; i < height; i++) {
|
||||
memcpy (dst, src, width);
|
||||
src -= src_stride;
|
||||
dst += dst_stride;
|
||||
|
@ -1365,7 +1365,7 @@ gst_mf_capture_dshow_on_buffer (double sample_time, BYTE * data, LONG len,
|
|||
width = GST_VIDEO_INFO_COMP_WIDTH (&self->info, i)
|
||||
* GST_VIDEO_INFO_COMP_PSTRIDE (&self->info, i);
|
||||
|
||||
for (guint j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (&self->info, i); j++) {
|
||||
for (gint j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (&self->info, i); j++) {
|
||||
memcpy (dst, src, width);
|
||||
src += src_stride;
|
||||
dst += dst_stride;
|
||||
|
|
|
@ -252,14 +252,14 @@ gst_mf_mp3_enc_get_output_type (GstMFAudioEncoder * encoder,
|
|||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_CHANNELS (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_CHANNELS (info))
|
||||
continue;
|
||||
|
||||
hr = type->GetUINT32 (MF_MT_AUDIO_SAMPLES_PER_SECOND, &value);
|
||||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_RATE (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_RATE (info))
|
||||
continue;
|
||||
|
||||
hr = type->GetUINT32 (MF_MT_AUDIO_AVG_BYTES_PER_SECOND, &value);
|
||||
|
@ -278,7 +278,8 @@ gst_mf_mp3_enc_get_output_type (GstMFAudioEncoder * encoder,
|
|||
return FALSE;
|
||||
}
|
||||
|
||||
GST_DEBUG_OBJECT (self, "have %d candidate output", filtered_types.size ());
|
||||
GST_DEBUG_OBJECT (self, "have %d candidate output",
|
||||
(guint) filtered_types.size ());
|
||||
|
||||
/* 2. Find the best matching bitrate */
|
||||
bitrate = self->bitrate;
|
||||
|
@ -384,14 +385,14 @@ gst_mf_mp3_enc_get_input_type (GstMFAudioEncoder * encoder, GstAudioInfo * info,
|
|||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_CHANNELS (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_CHANNELS (info))
|
||||
continue;
|
||||
|
||||
hr = type->GetUINT32 (MF_MT_AUDIO_SAMPLES_PER_SECOND, &value);
|
||||
if (!gst_mf_result (hr))
|
||||
continue;
|
||||
|
||||
if (value != GST_AUDIO_INFO_RATE (info))
|
||||
if (value != (UINT32) GST_AUDIO_INFO_RATE (info))
|
||||
continue;
|
||||
|
||||
filtered_types.push_back (type);
|
||||
|
@ -405,7 +406,7 @@ gst_mf_mp3_enc_get_input_type (GstMFAudioEncoder * encoder, GstAudioInfo * info,
|
|||
}
|
||||
|
||||
GST_DEBUG_OBJECT (self, "Total %d input types are available",
|
||||
filtered_types.size ());
|
||||
(guint) filtered_types.size ());
|
||||
|
||||
/* Just select the first one */
|
||||
target_input = *filtered_types.begin ();
|
||||
|
|
|
@ -332,6 +332,7 @@ gst_mf_source_object_get_running_time (GstMFSourceObject * object)
|
|||
return timestamp;
|
||||
}
|
||||
|
||||
#if GST_MF_WINAPI_APP && GST_MF_WINAPI_DESKTOP
|
||||
static gboolean
|
||||
gst_mf_source_object_use_winrt_api (void)
|
||||
{
|
||||
|
@ -365,6 +366,7 @@ gst_mf_source_object_use_winrt_api (void)
|
|||
|
||||
return ret;
|
||||
}
|
||||
#endif
|
||||
|
||||
GstMFSourceObject *
|
||||
gst_mf_source_object_new (GstMFSourceType type, gint device_index,
|
||||
|
|
|
@ -443,17 +443,6 @@ gst_mf_source_reader_start (GstMFSourceObject * object)
|
|||
return TRUE;
|
||||
}
|
||||
|
||||
static GstMFSourceReaderSample *
|
||||
gst_mf_source_reader_sample_new (IMFSample * sample, GstClockTime timestamp)
|
||||
{
|
||||
GstMFSourceReaderSample *reader_sample = g_new0 (GstMFSourceReaderSample, 1);
|
||||
|
||||
reader_sample->sample = sample;
|
||||
reader_sample->clock_time = timestamp;
|
||||
|
||||
return reader_sample;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_mf_source_reader_stop (GstMFSourceObject * object)
|
||||
{
|
||||
|
@ -483,7 +472,7 @@ gst_mf_source_reader_read_sample (GstMFSourceReader * self)
|
|||
|
||||
if ((stream_flags & MF_SOURCE_READERF_ERROR) == MF_SOURCE_READERF_ERROR) {
|
||||
GST_ERROR_OBJECT (self, "Error while reading sample, sample flags 0x%x",
|
||||
stream_flags);
|
||||
(guint) stream_flags);
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
|
@ -572,7 +561,6 @@ gst_mf_source_reader_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
|||
ComPtr < IMFMediaBuffer > media_buffer;
|
||||
GstVideoFrame frame;
|
||||
BYTE *data;
|
||||
gint i, j;
|
||||
HRESULT hr;
|
||||
GstClockTime timestamp = GST_CLOCK_TIME_NONE;
|
||||
GstClockTime duration = GST_CLOCK_TIME_NONE;
|
||||
|
@ -614,13 +602,13 @@ gst_mf_source_reader_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
|||
src = data + src_stride * (height - 1);
|
||||
dst = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&frame, 0);
|
||||
|
||||
for (j = 0; j < height; j++) {
|
||||
for (gint j = 0; j < height; j++) {
|
||||
memcpy (dst, src, width);
|
||||
src -= src_stride;
|
||||
dst += dst_stride;
|
||||
}
|
||||
} else {
|
||||
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->info); i++) {
|
||||
for (guint i = 0; i < GST_VIDEO_INFO_N_PLANES (&self->info); i++) {
|
||||
guint8 *src, *dst;
|
||||
gint src_stride, dst_stride;
|
||||
gint width;
|
||||
|
@ -633,7 +621,7 @@ gst_mf_source_reader_fill (GstMFSourceObject * object, GstBuffer * buffer)
|
|||
width = GST_VIDEO_INFO_COMP_WIDTH (&self->info, i)
|
||||
* GST_VIDEO_INFO_COMP_PSTRIDE (&self->info, i);
|
||||
|
||||
for (j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (&self->info, i); j++) {
|
||||
for (gint j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (&self->info, i); j++) {
|
||||
memcpy (dst, src, width);
|
||||
src += src_stride;
|
||||
dst += dst_stride;
|
||||
|
@ -826,7 +814,7 @@ gst_mf_source_reader_thread_func (GstMFSourceReader * self)
|
|||
} else if (object->device_name) {
|
||||
match = g_ascii_strcasecmp (activate->name, object->device_name) == 0;
|
||||
} else if (object->device_index >= 0) {
|
||||
match = activate->index == object->device_index;
|
||||
match = activate->index == (guint) object->device_index;
|
||||
} else {
|
||||
/* pick the first entry */
|
||||
match = TRUE;
|
||||
|
|
|
@ -104,7 +104,7 @@ public:
|
|||
STDMETHODIMP_ (ULONG)
|
||||
AddRef (void)
|
||||
{
|
||||
GST_TRACE ("%p, %d", this, ref_count_);
|
||||
GST_TRACE ("%p, %u", this, (guint) ref_count_);
|
||||
return InterlockedIncrement (&ref_count_);
|
||||
}
|
||||
|
||||
|
@ -113,7 +113,7 @@ public:
|
|||
{
|
||||
ULONG ref_count;
|
||||
|
||||
GST_TRACE ("%p, %d", this, ref_count_);
|
||||
GST_TRACE ("%p, %u", this, (guint) ref_count_);
|
||||
ref_count = InterlockedDecrement (&ref_count_);
|
||||
|
||||
if (ref_count == 0) {
|
||||
|
@ -186,7 +186,7 @@ private:
|
|||
g_weak_ref_init (&client_, nullptr);
|
||||
}
|
||||
|
||||
~GstMFTransformAsyncCallback ()
|
||||
virtual ~GstMFTransformAsyncCallback ()
|
||||
{
|
||||
g_weak_ref_clear (&client_);
|
||||
}
|
||||
|
@ -641,7 +641,7 @@ gst_mf_transform_process_output (GstMFTransform * self)
|
|||
GST_DEBUG_OBJECT (self, "Ignore error on flushing");
|
||||
ret = GST_FLOW_FLUSHING;
|
||||
} else {
|
||||
GST_ERROR_OBJECT (self, "ProcessOutput error, hr 0x%x", hr);
|
||||
GST_ERROR_OBJECT (self, "ProcessOutput error, hr 0x%x", (guint) hr);
|
||||
ret = GST_FLOW_ERROR;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -94,8 +94,7 @@ static struct
|
|||
GstVideoFormat
|
||||
gst_mf_video_subtype_to_video_format (const GUID * subtype)
|
||||
{
|
||||
gint i;
|
||||
for (i = 0; i < G_N_ELEMENTS (raw_video_format_map); i++) {
|
||||
for (guint i = 0; i < G_N_ELEMENTS (raw_video_format_map); i++) {
|
||||
if (IsEqualGUID (raw_video_format_map[i].mf_format, *subtype))
|
||||
return raw_video_format_map[i].format;
|
||||
}
|
||||
|
@ -106,8 +105,7 @@ gst_mf_video_subtype_to_video_format (const GUID * subtype)
|
|||
const GUID *
|
||||
gst_mf_video_subtype_from_video_format (GstVideoFormat format)
|
||||
{
|
||||
gint i;
|
||||
for (i = 0; i < G_N_ELEMENTS (raw_video_format_map); i++) {
|
||||
for (guint i = 0; i < G_N_ELEMENTS (raw_video_format_map); i++) {
|
||||
if (raw_video_format_map[i].format == format)
|
||||
return &raw_video_format_map[i].mf_format;
|
||||
}
|
||||
|
@ -120,7 +118,7 @@ gst_mf_media_type_to_video_caps (IMFMediaType * media_type)
|
|||
{
|
||||
HRESULT hr;
|
||||
GstCaps *caps = nullptr;
|
||||
gint i;
|
||||
guint i;
|
||||
guint32 width = 0;
|
||||
guint32 height = 0;
|
||||
guint32 num, den;
|
||||
|
@ -156,7 +154,7 @@ gst_mf_media_type_to_video_caps (IMFMediaType * media_type)
|
|||
|
||||
if (!caps) {
|
||||
GST_WARNING ("Unknown format %" GST_FOURCC_FORMAT,
|
||||
GST_FOURCC_ARGS (subtype.Data1));
|
||||
GST_FOURCC_ARGS ((int) subtype.Data1));
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
|
@ -568,7 +566,7 @@ gst_mf_media_type_release (IMFMediaType * media_type)
|
|||
gboolean
|
||||
gst_mf_update_video_info_with_stride (GstVideoInfo * info, gint stride)
|
||||
{
|
||||
guint width, height, cr_h;
|
||||
guint height, cr_h;
|
||||
|
||||
g_return_val_if_fail (info != nullptr, FALSE);
|
||||
g_return_val_if_fail (stride > 0, FALSE);
|
||||
|
@ -578,7 +576,6 @@ gst_mf_update_video_info_with_stride (GstVideoInfo * info, gint stride)
|
|||
if (GST_VIDEO_INFO_FORMAT (info) == GST_VIDEO_FORMAT_ENCODED)
|
||||
return TRUE;
|
||||
|
||||
width = GST_VIDEO_INFO_WIDTH (info);
|
||||
height = GST_VIDEO_INFO_HEIGHT (info);
|
||||
|
||||
/* copied from video-info */
|
||||
|
@ -925,9 +922,9 @@ gst_mf_attribute_value_to_string (const GUID & guid, const PROPVARIANT & var)
|
|||
|
||||
switch (var.vt) {
|
||||
case VT_UI4:
|
||||
return g_strdup_printf ("%d", var.ulVal);
|
||||
return g_strdup_printf ("%d", (guint) var.ulVal);
|
||||
case VT_UI8:
|
||||
return g_strdup_printf ("%" G_GUINT64_FORMAT, var.uhVal);
|
||||
return g_strdup_printf ("%" G_GUINT64_FORMAT, var.uhVal.QuadPart);
|
||||
case VT_R8:
|
||||
return g_strdup_printf ("%f", var.dblVal);
|
||||
case VT_CLSID:
|
||||
|
|
|
@ -192,7 +192,7 @@ IGstMFVideoBuffer::GetUserData (gpointer * user_data)
|
|||
STDMETHODIMP_ (ULONG)
|
||||
IGstMFVideoBuffer::AddRef (void)
|
||||
{
|
||||
GST_TRACE ("%p, %d", this, ref_count_);
|
||||
GST_TRACE ("%p, %d", this, (guint) ref_count_);
|
||||
return InterlockedIncrement (&ref_count_);
|
||||
}
|
||||
|
||||
|
@ -201,7 +201,7 @@ IGstMFVideoBuffer::Release (void)
|
|||
{
|
||||
ULONG ref_count;
|
||||
|
||||
GST_TRACE ("%p, %d", this, ref_count_);
|
||||
GST_TRACE ("%p, %d", this, (guint) ref_count_);
|
||||
ref_count = InterlockedDecrement (&ref_count_);
|
||||
|
||||
if (ref_count == 0) {
|
||||
|
@ -315,7 +315,7 @@ IGstMFVideoBuffer::GetCurrentLength (DWORD * length)
|
|||
|
||||
*length = current_len_;
|
||||
|
||||
GST_TRACE ("%p, %d", this, current_len_);
|
||||
GST_TRACE ("%p, %d", this, (guint) current_len_);
|
||||
|
||||
return S_OK;
|
||||
}
|
||||
|
@ -325,11 +325,11 @@ IGstMFVideoBuffer::SetCurrentLength (DWORD length)
|
|||
{
|
||||
std::lock_guard<std::mutex> lock(lock_);
|
||||
|
||||
GST_TRACE ("%p %d", this, length);
|
||||
GST_TRACE ("%p %d", this, (guint) length);
|
||||
|
||||
if (length > contiguous_len_) {
|
||||
GST_LOG ("%p, Requested length %d is larger than contiguous_len %d",
|
||||
this, length, contiguous_len_);
|
||||
this, (guint) length, (guint) contiguous_len_);
|
||||
return E_INVALIDARG;
|
||||
}
|
||||
|
||||
|
@ -465,7 +465,7 @@ IGstMFVideoBuffer::ContiguousCopyToUnlocked (BYTE * dest_buffer,
|
|||
return S_OK;
|
||||
}
|
||||
|
||||
for (gint i = 0; i < GST_VIDEO_INFO_N_PLANES (info_); i++) {
|
||||
for (guint i = 0; i < GST_VIDEO_INFO_N_PLANES (info_); i++) {
|
||||
BYTE *src, *dst;
|
||||
guint src_stride, dst_stride;
|
||||
guint width, height;
|
||||
|
@ -480,7 +480,7 @@ IGstMFVideoBuffer::ContiguousCopyToUnlocked (BYTE * dest_buffer,
|
|||
* GST_VIDEO_INFO_COMP_PSTRIDE (info_, i);
|
||||
height = GST_VIDEO_INFO_COMP_HEIGHT (info_, i);
|
||||
|
||||
for (gint j = 0; j < height; j++) {
|
||||
for (guint j = 0; j < height; j++) {
|
||||
memcpy (dst, src, width);
|
||||
src += src_stride;
|
||||
dst += dst_stride;
|
||||
|
@ -510,7 +510,7 @@ IGstMFVideoBuffer::ContiguousCopyFromUnlocked (const BYTE * src_buffer,
|
|||
return S_OK;
|
||||
}
|
||||
|
||||
for (gint i = 0; i < GST_VIDEO_INFO_N_PLANES (info_); i++) {
|
||||
for (guint i = 0; i < GST_VIDEO_INFO_N_PLANES (info_); i++) {
|
||||
BYTE *dst;
|
||||
guint src_stride, dst_stride;
|
||||
guint width, height;
|
||||
|
@ -526,7 +526,7 @@ IGstMFVideoBuffer::ContiguousCopyFromUnlocked (const BYTE * src_buffer,
|
|||
* GST_VIDEO_INFO_COMP_PSTRIDE (info_, i);
|
||||
height = GST_VIDEO_INFO_COMP_HEIGHT (info_, i);
|
||||
|
||||
for (gint j = 0; j < height; j++) {
|
||||
for (guint j = 0; j < height; j++) {
|
||||
gint to_copy = 0;
|
||||
|
||||
if (offset + width < src_buffer_length)
|
||||
|
|
|
@ -28,7 +28,7 @@
|
|||
#include <mutex>
|
||||
|
||||
#ifndef __cplusplus
|
||||
#error IGstMFVideoBuffer interface doesn't provide C API
|
||||
#error IGstMFVideoBuffer interface does not provide C API
|
||||
#endif
|
||||
|
||||
/* Define UUID for QueryInterface() */
|
||||
|
@ -92,7 +92,7 @@ public:
|
|||
|
||||
private:
|
||||
IGstMFVideoBuffer (void);
|
||||
~IGstMFVideoBuffer (void);
|
||||
virtual ~IGstMFVideoBuffer (void);
|
||||
|
||||
HRESULT Initialize (GstVideoInfo * info);
|
||||
HRESULT InitializeWrapped (GstVideoInfo * info,
|
||||
|
|
|
@ -360,7 +360,7 @@ gst_mf_video_encoder_init_mft (GstMFVideoEncoder * self)
|
|||
hr = MFSetAttributeRatio (out_type.Get (), MF_MT_FRAME_RATE, fps_n, fps_d);
|
||||
if (!gst_mf_result (hr)) {
|
||||
GST_ERROR_OBJECT (self,
|
||||
"Couldn't set framerate %d/%d, hr: 0x%x", (guint) hr);
|
||||
"Couldn't set framerate %d/%d, hr: 0x%x", fps_n, fps_d, (guint) hr);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
|
@ -848,7 +848,7 @@ gst_mf_video_encoder_finish_sample (GstMFVideoEncoder * self,
|
|||
/* This would be the first frame */
|
||||
if (self->mf_pts_offset == 0) {
|
||||
LONGLONG mf_pts_offset = -1;
|
||||
if (sample_timestamp > mf_dts) {
|
||||
if (sample_timestamp > (LONGLONG) mf_dts) {
|
||||
mf_pts_offset = sample_timestamp - mf_dts;
|
||||
GST_DEBUG_OBJECT (self, "Calculates PTS offset using \"PTS - DTS\": %"
|
||||
G_GINT64_FORMAT, mf_pts_offset);
|
||||
|
@ -914,7 +914,7 @@ gst_mf_video_encoder_finish_sample (GstMFVideoEncoder * self,
|
|||
|
||||
if (keyframe) {
|
||||
GST_DEBUG_OBJECT (self, "Keyframe pts %" GST_TIME_FORMAT,
|
||||
GST_BUFFER_PTS (buffer));
|
||||
GST_TIME_ARGS (GST_BUFFER_PTS (buffer)));
|
||||
GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||
} else {
|
||||
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||
|
@ -957,7 +957,6 @@ gst_mf_video_encoder_create_input_sample (GstMFVideoEncoder * self,
|
|||
ComPtr < IMFMediaBuffer > media_buffer;
|
||||
ComPtr < IGstMFVideoBuffer > video_buffer;
|
||||
GstVideoInfo *info = &self->input_state->info;
|
||||
gint i, j;
|
||||
GstVideoFrame *vframe = nullptr;
|
||||
BYTE *data = nullptr;
|
||||
gboolean need_copy = self->need_align;
|
||||
|
@ -1000,7 +999,7 @@ gst_mf_video_encoder_create_input_sample (GstMFVideoEncoder * self,
|
|||
if (!gst_mf_result (hr))
|
||||
goto error;
|
||||
|
||||
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (info); i++) {
|
||||
for (guint i = 0; i < GST_VIDEO_INFO_N_PLANES (info); i++) {
|
||||
guint8 *src, *dst;
|
||||
gint src_stride, dst_stride;
|
||||
gint width;
|
||||
|
@ -1014,7 +1013,7 @@ gst_mf_video_encoder_create_input_sample (GstMFVideoEncoder * self,
|
|||
width = GST_VIDEO_INFO_COMP_WIDTH (info, i)
|
||||
* GST_VIDEO_INFO_COMP_PSTRIDE (info, i);
|
||||
|
||||
for (j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (info, i); j++) {
|
||||
for (gint j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (info, i); j++) {
|
||||
memcpy (dst, src, width);
|
||||
src += src_stride;
|
||||
dst += dst_stride;
|
||||
|
@ -1480,7 +1479,7 @@ gst_mf_video_encoder_enum_internal (GstMFTransform * transform, GUID & subtype,
|
|||
HRESULT hr;
|
||||
MFT_REGISTER_TYPE_INFO *infos;
|
||||
UINT32 info_size;
|
||||
gint i;
|
||||
guint i;
|
||||
GstCaps *src_caps = nullptr;
|
||||
GstCaps *sink_caps = nullptr;
|
||||
GstCaps *d3d11_caps = nullptr;
|
||||
|
|
|
@ -365,11 +365,10 @@ gst_mf_video_src_fixate (GstBaseSrc * src, GstCaps * caps)
|
|||
{
|
||||
GstStructure *structure;
|
||||
GstCaps *fixated_caps;
|
||||
gint i;
|
||||
|
||||
fixated_caps = gst_caps_make_writable (caps);
|
||||
|
||||
for (i = 0; i < gst_caps_get_size (fixated_caps); ++i) {
|
||||
for (guint i = 0; i < gst_caps_get_size (fixated_caps); ++i) {
|
||||
structure = gst_caps_get_structure (fixated_caps, i);
|
||||
gst_structure_fixate_field_nearest_int (structure, "width", G_MAXINT);
|
||||
gst_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
|
||||
|
@ -503,7 +502,7 @@ gst_mf_video_src_create (GstPushSrc * pushsrc, GstBuffer ** buffer)
|
|||
}
|
||||
|
||||
diff = GST_CLOCK_DIFF (GST_BUFFER_PTS (buf), running_time);
|
||||
if (diff > self->latency) {
|
||||
if (diff > (GstClockTimeDiff) self->latency) {
|
||||
self->latency = (GstClockTime) diff;
|
||||
GST_DEBUG_OBJECT (self, "Updated latency value %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (self->latency));
|
||||
|
|
|
@ -685,12 +685,10 @@ gst_mf_vp9_enc_set_src_caps (GstMFVideoEncoder * encoder,
|
|||
{
|
||||
GstMFVP9Enc *self = (GstMFVP9Enc *) encoder;
|
||||
GstVideoCodecState *out_state;
|
||||
GstStructure *s;
|
||||
GstCaps *out_caps;
|
||||
GstTagList *tags;
|
||||
|
||||
out_caps = gst_caps_new_empty_simple ("video/x-vp9");
|
||||
s = gst_caps_get_structure (out_caps, 0);
|
||||
|
||||
out_state = gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (self),
|
||||
out_caps, state);
|
||||
|
|
|
@ -210,7 +210,7 @@ plugin_init (GstPlugin * plugin)
|
|||
|
||||
hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET);
|
||||
if (!gst_mf_result (hr)) {
|
||||
GST_WARNING ("MFStartup failure, hr: 0x%x", hr);
|
||||
GST_WARNING ("MFStartup failure, hr: 0x%x", (guint) hr);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue