mfvideoenc: Add support for zero-copy encoding

Add custom IMFMediaBuffer and IMF2DBuffer implementation in order to
keep track of lifecycle of Media Foundation memory object.
By this new implementation, we can pass raw memory of upstream buffer
to Media Foundation without copy.

Part-of: <https://gitlab.freedesktop.org/gstreamer/gst-plugins-bad/-/merge_requests/1518>
This commit is contained in:
Seungha Yang 2020-08-12 20:31:32 +09:00 committed by GStreamer Merge Bot
parent 16a07d303a
commit d1d2acead1
5 changed files with 803 additions and 44 deletions

View file

@ -0,0 +1,506 @@
/* GStreamer
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#include "gstmfvideobuffer.h"
#include <string.h>
G_BEGIN_DECLS
GST_DEBUG_CATEGORY_EXTERN (gst_mf_video_buffer_debug);
#define GST_CAT_DEFAULT gst_mf_video_buffer_debug
G_END_DECLS
IGstMFVideoBuffer::IGstMFVideoBuffer ()
: ref_count_ (1)
, current_len_ (0)
, contiguous_len_ (0)
, data_ (nullptr)
, contiguous_data_ (nullptr)
, info_ (nullptr)
, contiguous_info_ (nullptr)
, locked_ (false)
, wrapped_ (false)
, user_data_ (nullptr)
, notify_ (nullptr)
{
}
IGstMFVideoBuffer::~IGstMFVideoBuffer ()
{
if (info_)
gst_video_info_free (info_);
if (contiguous_info_)
gst_video_info_free (contiguous_info_);
g_free (contiguous_data_);
if (!wrapped_)
g_free (data_);
}
HRESULT
IGstMFVideoBuffer::CreateInstance (GstVideoInfo * info,
IMFMediaBuffer ** buffer)
{
HRESULT hr = S_OK;
IGstMFVideoBuffer * self;
if (!info || !buffer)
return E_INVALIDARG;
self = new IGstMFVideoBuffer ();
if (!self)
return E_OUTOFMEMORY;
hr = self->Initialize (info);
if (SUCCEEDED (hr))
hr = self->QueryInterface (IID_PPV_ARGS (buffer));
self->Release ();
return hr;
}
HRESULT
IGstMFVideoBuffer::CreateInstanceWrapped (GstVideoInfo * info,
BYTE * data, DWORD length, IMFMediaBuffer ** buffer)
{
HRESULT hr = S_OK;
IGstMFVideoBuffer * self;
if (!info || !data || length == 0 || !buffer)
return E_INVALIDARG;
self = new IGstMFVideoBuffer ();
if (!self)
return E_OUTOFMEMORY;
hr = self->InitializeWrapped (info, data, length);
if (SUCCEEDED (hr))
hr = self->QueryInterface (IID_PPV_ARGS (buffer));
self->Release ();
return hr;
}
HRESULT
IGstMFVideoBuffer::Initialize (GstVideoInfo * info)
{
if (!info)
return E_INVALIDARG;
info_ = gst_video_info_copy (info);
contiguous_info_ = gst_video_info_new ();
/* check if padding is required */
gst_video_info_set_format (contiguous_info_, GST_VIDEO_INFO_FORMAT (info_),
GST_VIDEO_INFO_WIDTH (info_), GST_VIDEO_INFO_HEIGHT (info_));
contiguous_ = GST_VIDEO_INFO_SIZE (info_) ==
GST_VIDEO_INFO_SIZE (contiguous_info_);
contiguous_len_ = GST_VIDEO_INFO_SIZE (contiguous_info_);
/* NOTE: {Set,Get}CurrentLength will not be applied for
* IMF2DBuffer interface */
current_len_ = contiguous_len_;
data_ = (BYTE *) g_malloc0 (GST_VIDEO_INFO_SIZE (info_));
if (!data_)
return E_OUTOFMEMORY;
return S_OK;
}
HRESULT
IGstMFVideoBuffer::InitializeWrapped (GstVideoInfo * info, BYTE * data,
DWORD length)
{
if (!info || !data || length == 0)
return E_INVALIDARG;
if (length < GST_VIDEO_INFO_SIZE (info))
return E_INVALIDARG;
info_ = gst_video_info_copy (info);
contiguous_info_ = gst_video_info_new ();
/* check if padding is required */
gst_video_info_set_format (contiguous_info_, GST_VIDEO_INFO_FORMAT (info_),
GST_VIDEO_INFO_WIDTH (info_), GST_VIDEO_INFO_HEIGHT (info_));
contiguous_ = GST_VIDEO_INFO_SIZE (info_) ==
GST_VIDEO_INFO_SIZE (contiguous_info_);
contiguous_len_ = GST_VIDEO_INFO_SIZE (contiguous_info_);
current_len_ = contiguous_len_;
data_ = data;
wrapped_ = true;
return S_OK;
}
HRESULT
IGstMFVideoBuffer::SetUserData (gpointer user_data, GDestroyNotify notify)
{
GDestroyNotify old_notify = notify_;
gpointer old_user_data = user_data_;
if (old_notify)
old_notify (old_user_data);
user_data_ = user_data;
notify_ = notify;
return S_OK;
}
HRESULT
IGstMFVideoBuffer::GetUserData (gpointer * user_data)
{
if (!user_data)
return E_INVALIDARG;
*user_data = user_data_;
return S_OK;
}
/* IUnknown interface */
STDMETHODIMP_ (ULONG)
IGstMFVideoBuffer::AddRef (void)
{
GST_TRACE ("%p, %d", this, ref_count_);
return InterlockedIncrement (&ref_count_);
}
STDMETHODIMP_ (ULONG)
IGstMFVideoBuffer::Release (void)
{
ULONG ref_count;
GST_TRACE ("%p, %d", this, ref_count_);
ref_count = InterlockedDecrement (&ref_count_);
if (ref_count == 0) {
GDestroyNotify old_notify = notify_;
gpointer old_user_data = user_data_;
GST_TRACE ("Delete instance %p", this);
delete this;
if (old_notify)
old_notify (old_user_data);
}
return ref_count;
}
STDMETHODIMP
IGstMFVideoBuffer::QueryInterface (REFIID riid, void ** object)
{
if (!object)
return E_POINTER;
if (riid == IID_IUnknown) {
GST_TRACE ("query IUnknown interface %p", this);
*object = static_cast<IUnknown *> (static_cast<IMFMediaBuffer *> (this));
} else if (riid == __uuidof(IMFMediaBuffer)) {
GST_TRACE ("query IMFMediaBuffer interface %p", this);
*object = static_cast<IMFMediaBuffer *> (this);
} else if (riid == __uuidof(IMF2DBuffer)) {
GST_TRACE ("query IMF2DBuffer interface %p", this);
*object = static_cast<IMF2DBuffer *> (this);
} else if (riid == __uuidof(IGstMFVideoBuffer)) {
GST_TRACE ("query IGstMFVideoBuffer interface %p", this);
*object = this;
} else {
*object = NULL;
return E_NOINTERFACE;
}
AddRef();
return S_OK;
}
/* IMFMediaBuffer interface */
STDMETHODIMP
IGstMFVideoBuffer::Lock (BYTE ** buffer, DWORD * max_length,
DWORD * current_length)
{
std::lock_guard<std::mutex> lock(lock_);
GST_TRACE ("%p", this);
if (locked_)
return MF_E_INVALIDREQUEST;
locked_ = true;
if (contiguous_) {
*buffer = data_;
goto done;
}
/* IMFMediaBuffer::Lock method should return contiguous memory */
if (!contiguous_data_)
contiguous_data_ = (BYTE *) g_malloc0 (contiguous_len_);
ContiguousCopyTo (contiguous_data_, contiguous_len_);
*buffer = contiguous_data_;
done:
if (max_length)
*max_length = contiguous_len_;
if (current_length)
*current_length = current_len_;
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::Unlock (void)
{
std::lock_guard<std::mutex> lock(lock_);
GST_TRACE ("%p", this);
if (!locked_)
return MF_E_INVALIDREQUEST;
locked_ = false;
if (contiguous_)
return S_OK;
/* copy back to original data */
ContiguousCopyFrom (contiguous_data_, contiguous_len_);
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::GetCurrentLength (DWORD * length)
{
*length = current_len_;
GST_TRACE ("%p", this);
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::SetCurrentLength (DWORD length)
{
GST_TRACE ("%p %d", this, length);
if (length > contiguous_len_)
return E_INVALIDARG;
current_len_ = length;
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::GetMaxLength (DWORD * length)
{
GST_TRACE ("%p", this);
*length = contiguous_len_;
return S_OK;
}
/* IMF2DBuffer */
STDMETHODIMP
IGstMFVideoBuffer::Lock2D (BYTE ** buffer, LONG * pitch)
{
std::lock_guard<std::mutex> lock(lock_);
GST_TRACE ("%p", this);
if (locked_)
return MF_E_INVALIDREQUEST;
locked_ = true;
*buffer = data_;
*pitch = GST_VIDEO_INFO_PLANE_STRIDE (info_, 0);
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::Unlock2D (void)
{
std::lock_guard<std::mutex> lock(lock_);
GST_TRACE ("%p", this);
if (!locked_)
return MF_E_INVALIDREQUEST;
locked_ = false;
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::GetScanline0AndPitch (BYTE ** buffer, LONG * pitch)
{
std::lock_guard<std::mutex> lock(lock_);
GST_TRACE ("%p", this);
/* Lock2D must be called before */
if (!locked_)
return ERROR_INVALID_FUNCTION;
*buffer = data_;
*pitch = GST_VIDEO_INFO_PLANE_STRIDE (info_, 0);
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::IsContiguousFormat (BOOL * contiguous)
{
GST_TRACE ("%p", this);
*contiguous = contiguous_;
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::GetContiguousLength (DWORD * length)
{
GST_TRACE ("%p", this);
*length = contiguous_len_;
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::ContiguousCopyTo (BYTE * dest_buffer,
DWORD dest_buffer_length)
{
GST_TRACE ("%p", this);
if (!dest_buffer || dest_buffer_length < contiguous_len_)
return E_INVALIDARG;
if (contiguous_) {
memcpy (dest_buffer, data_, current_len_);
return S_OK;
}
for (gint i = 0; i < GST_VIDEO_INFO_N_PLANES (info_); i++) {
BYTE *src, *dst;
guint src_stride, dst_stride;
guint width, height;
src = data_ + GST_VIDEO_INFO_PLANE_OFFSET (info_, i);
dst = dest_buffer + GST_VIDEO_INFO_PLANE_OFFSET (contiguous_info_, i);
src_stride = GST_VIDEO_INFO_PLANE_STRIDE (info_, i);
dst_stride = GST_VIDEO_INFO_PLANE_STRIDE (contiguous_info_, i);
width = GST_VIDEO_INFO_COMP_WIDTH (info_, i)
* GST_VIDEO_INFO_COMP_PSTRIDE (info_, i);
height = GST_VIDEO_INFO_COMP_HEIGHT (info_, i);
for (gint j = 0; j < height; j++) {
memcpy (dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
return S_OK;
}
STDMETHODIMP
IGstMFVideoBuffer::ContiguousCopyFrom (const BYTE * src_buffer,
DWORD src_buffer_length)
{
gint offset;
GST_TRACE ("%p", this);
if (!src_buffer)
return E_INVALIDARG;
/* Nothing to copy */
if (src_buffer_length == 0)
return S_OK;
if (contiguous_) {
memcpy (data_, src_buffer, src_buffer_length);
return S_OK;
}
for (gint i = 0; i < GST_VIDEO_INFO_N_PLANES (info_); i++) {
BYTE *dst;
guint src_stride, dst_stride;
guint width, height;
offset = GST_VIDEO_INFO_PLANE_OFFSET (contiguous_info_, i);
dst = data_ + GST_VIDEO_INFO_PLANE_OFFSET (info_, i);
src_stride = GST_VIDEO_INFO_PLANE_STRIDE (contiguous_info_, i);
dst_stride = GST_VIDEO_INFO_PLANE_STRIDE (info_, i);
width = GST_VIDEO_INFO_COMP_WIDTH (info_, i)
* GST_VIDEO_INFO_COMP_PSTRIDE (info_, i);
height = GST_VIDEO_INFO_COMP_HEIGHT (info_, i);
for (gint j = 0; j < height; j++) {
gint to_copy = 0;
if (offset + width < src_buffer_length)
to_copy = width;
else
to_copy = (gint) src_buffer_length - offset;
if (to_copy <= 0)
return S_OK;
memcpy (dst, src_buffer + offset, to_copy);
offset += src_stride;
dst += dst_stride;
}
}
return S_OK;
}

View file

@ -0,0 +1,119 @@
/* GStreamer
* Copyright (C) 2020 Seungha Yang <seungha@centricular.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifndef __GST_MF_VIDEO_BUFFER_H__
#define __GST_MF_VIDEO_BUFFER_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <windows.h>
#include <mfobjects.h>
#include <mferror.h>
#include <mutex>
#ifndef __cplusplus
#error IGstMFVideoBuffer interface doesn't provide C API
#endif
/* Define UUID for QueryInterface() */
class DECLSPEC_UUID("ce922806-a8a6-4e1e-871f-e0cdd5fc9899")
IGstMFVideoBuffer : public IMFMediaBuffer, public IMF2DBuffer
{
public:
static HRESULT CreateInstance (GstVideoInfo * info,
IMFMediaBuffer ** buffer);
static HRESULT CreateInstanceWrapped (GstVideoInfo * info,
BYTE * data,
DWORD length,
IMFMediaBuffer ** buffer);
/* notify will be called right after this object is destroyed */
HRESULT SetUserData (gpointer user_data,
GDestroyNotify notify);
HRESULT GetUserData (gpointer * user_data);
/* IUnknown interface */
STDMETHODIMP_ (ULONG) AddRef (void);
STDMETHODIMP_ (ULONG) Release (void);
STDMETHODIMP QueryInterface (REFIID riid,
void ** object);
/* IMFMediaBuffer interface
*
* Caller of this interface expects returned raw memory layout via Lock()
* has no padding with default stride. If stored memory layout consists of
* non-default stride and/or with some padding, then Lock() / Unlock() would
* cause memory copy therefore.
* Caller should avoid to use this interface as much as possible
* if IMF2DBuffer interface available.
*/
STDMETHODIMP Lock (BYTE ** buffer,
DWORD * max_length,
DWORD * current_length);
STDMETHODIMP Unlock (void);
STDMETHODIMP GetCurrentLength (DWORD * length);
STDMETHODIMP SetCurrentLength (DWORD length);
STDMETHODIMP GetMaxLength (DWORD * length);
/* IMF2DBuffer interface
*
* this interface supports any raw memory layout with non-default stride.
* But more complex layout (padding at bottom for instance) is not supported.
*/
STDMETHODIMP Lock2D (BYTE ** buffer,
LONG * pitch);
STDMETHODIMP Unlock2D (void);
STDMETHODIMP GetScanline0AndPitch (BYTE ** buffer,
LONG * pitch);
STDMETHODIMP IsContiguousFormat (BOOL * contiguous);
STDMETHODIMP GetContiguousLength (DWORD * length);
STDMETHODIMP ContiguousCopyTo (BYTE * dest_buffer,
DWORD dest_buffer_length);
STDMETHODIMP ContiguousCopyFrom (const BYTE * src_buffer,
DWORD src_buffer_length);
private:
IGstMFVideoBuffer (void);
~IGstMFVideoBuffer (void);
HRESULT Initialize (GstVideoInfo * info);
HRESULT InitializeWrapped (GstVideoInfo * info,
BYTE * data,
DWORD length);
private:
volatile ULONG ref_count_;
DWORD current_len_;
DWORD contiguous_len_;
BYTE *data_;
BYTE *contiguous_data_;
GstVideoInfo *info_;
GstVideoInfo *contiguous_info_;
BOOL contiguous_;
std::mutex lock_;
bool locked_;
bool wrapped_;
gpointer user_data_;
GDestroyNotify notify_;
};
#endif /* __GST_MF_VIDEO_BUFFER_H__ */

View file

@ -25,6 +25,7 @@
#include <gst/gst.h>
#include "gstmfvideoenc.h"
#include <wrl.h>
#include "gstmfvideobuffer.h"
using namespace Microsoft::WRL;
@ -276,6 +277,99 @@ gst_mf_video_enc_set_format (GstVideoEncoder * enc, GstVideoCodecState * state)
return TRUE;
}
static void
gst_mf_video_buffer_free (GstVideoFrame * frame)
{
if (!frame)
return;
gst_video_frame_unmap (frame);
g_free (frame);
}
static gboolean
gst_mf_video_enc_frame_needs_copy (GstVideoFrame * vframe)
{
/* Single plane data can be used without copy */
if (GST_VIDEO_FRAME_N_PLANES (vframe) == 1)
return FALSE;
switch (GST_VIDEO_FRAME_FORMAT (vframe)) {
case GST_VIDEO_FORMAT_I420:
{
guint8 *data, *other_data;
guint size;
/* Unexpected stride size, Media Foundation doesn't provide API for
* per plane stride information */
if (GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 0) !=
2 * GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 1) ||
GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 1) !=
GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 2)) {
return TRUE;
}
size = GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 0) *
GST_VIDEO_FRAME_HEIGHT (vframe);
if (size + GST_VIDEO_FRAME_PLANE_OFFSET (vframe, 0) !=
GST_VIDEO_FRAME_PLANE_OFFSET (vframe, 1))
return TRUE;
data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (vframe, 0);
other_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (vframe, 1);
if (data + size != other_data)
return TRUE;
size = GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 1) *
GST_VIDEO_FRAME_COMP_HEIGHT (vframe, 1);
if (size + GST_VIDEO_FRAME_PLANE_OFFSET (vframe, 1) !=
GST_VIDEO_FRAME_PLANE_OFFSET (vframe, 2))
return TRUE;
data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (vframe, 1);
other_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (vframe, 2);
if (data + size != other_data)
return TRUE;
return FALSE;
}
case GST_VIDEO_FORMAT_NV12:
case GST_VIDEO_FORMAT_P010_10LE:
case GST_VIDEO_FORMAT_P016_LE:
{
guint8 *data, *other_data;
guint size;
/* Unexpected stride size, Media Foundation doesn't provide API for
* per plane stride information */
if (GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 0) !=
GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 1)) {
return TRUE;
}
size = GST_VIDEO_FRAME_PLANE_STRIDE (vframe, 0) *
GST_VIDEO_FRAME_HEIGHT (vframe);
/* Unexpected padding */
if (size + GST_VIDEO_FRAME_PLANE_OFFSET (vframe, 0) !=
GST_VIDEO_FRAME_PLANE_OFFSET (vframe, 1))
return TRUE;
data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (vframe, 0);
other_data = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (vframe, 1);
if (data + size != other_data)
return TRUE;
return FALSE;
}
default:
g_assert_not_reached ();
return TRUE;
}
return TRUE;
}
typedef struct
{
GstClockTime mf_pts;
@ -289,62 +383,82 @@ gst_mf_video_enc_process_input (GstMFVideoEnc * self,
HRESULT hr;
ComPtr<IMFSample> sample;
ComPtr<IMFMediaBuffer> media_buffer;
ComPtr<IGstMFVideoBuffer> video_buffer;
GstVideoInfo *info = &self->input_state->info;
gint i, j;
BYTE *data;
GstVideoFrame vframe;
gboolean res = FALSE;
GstVideoFrame *vframe = NULL;
gboolean unset_force_keyframe = FALSE;
GstMFVideoEncFrameData *frame_data = NULL;
BYTE *data = NULL;
gboolean need_copy;
if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ)) {
vframe = g_new0 (GstVideoFrame, 1);
if (!gst_video_frame_map (vframe, info, frame->input_buffer, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Couldn't map input frame");
g_free (vframe);
return FALSE;
}
hr = MFCreateSample (sample.GetAddressOf ());
hr = MFCreateSample (&sample);
if (!gst_mf_result (hr))
goto done;
goto error;
hr = MFCreateMemoryBuffer (GST_VIDEO_INFO_SIZE (info),
media_buffer.GetAddressOf ());
if (!gst_mf_result (hr))
goto done;
hr = media_buffer->Lock (&data, NULL, NULL);
if (!gst_mf_result (hr))
goto done;
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (info); i++) {
guint8 *src, *dst;
gint src_stride, dst_stride;
gint width;
src = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (&vframe, i);
dst = data + GST_VIDEO_INFO_PLANE_OFFSET (info, i);
src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (&vframe, i);
dst_stride = GST_VIDEO_INFO_PLANE_STRIDE (info, i);
width = GST_VIDEO_INFO_COMP_WIDTH (info, i)
* GST_VIDEO_INFO_COMP_PSTRIDE (info, i);
for (j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (info, i); j++) {
memcpy (dst, src, width);
src += src_stride;
dst += dst_stride;
}
/* Check if we can forward this memory to Media Foundation without copy */
need_copy = gst_mf_video_enc_frame_needs_copy (vframe);
if (need_copy) {
GST_TRACE_OBJECT (self, "Copy input buffer into Media Foundation memory");
hr = MFCreateMemoryBuffer (GST_VIDEO_INFO_SIZE (info), &media_buffer);
} else {
GST_TRACE_OBJECT (self, "Can use input buffer without copy");
hr = IGstMFVideoBuffer::CreateInstanceWrapped (&vframe->info,
(BYTE *) GST_VIDEO_FRAME_PLANE_DATA (vframe, 0),
GST_VIDEO_INFO_SIZE (&vframe->info), &media_buffer);
}
media_buffer->Unlock ();
if (!gst_mf_result (hr))
goto error;
if (!need_copy) {
hr = media_buffer.As (&video_buffer);
if (!gst_mf_result (hr))
goto error;
} else {
hr = media_buffer->Lock (&data, NULL, NULL);
if (!gst_mf_result (hr))
goto error;
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (info); i++) {
guint8 *src, *dst;
gint src_stride, dst_stride;
gint width;
src = (guint8 *) GST_VIDEO_FRAME_PLANE_DATA (vframe, i);
dst = data + GST_VIDEO_INFO_PLANE_OFFSET (info, i);
src_stride = GST_VIDEO_FRAME_PLANE_STRIDE (vframe, i);
dst_stride = GST_VIDEO_INFO_PLANE_STRIDE (info, i);
width = GST_VIDEO_INFO_COMP_WIDTH (info, i)
* GST_VIDEO_INFO_COMP_PSTRIDE (info, i);
for (j = 0; j < GST_VIDEO_INFO_COMP_HEIGHT (info, i); j++) {
memcpy (dst, src, width);
src += src_stride;
dst += dst_stride;
}
}
media_buffer->Unlock ();
}
hr = media_buffer->SetCurrentLength (GST_VIDEO_INFO_SIZE (info));
if (!gst_mf_result (hr))
goto done;
goto error;
hr = sample->AddBuffer (media_buffer.Get ());
if (!gst_mf_result (hr))
goto done;
goto error;
frame_data = g_new0 (GstMFVideoEncFrameData, 1);
frame_data->mf_pts = frame->pts / 100;
@ -354,12 +468,12 @@ gst_mf_video_enc_process_input (GstMFVideoEnc * self,
hr = sample->SetSampleTime (frame_data->mf_pts);
if (!gst_mf_result (hr))
goto done;
goto error;
hr = sample->SetSampleDuration (
GST_CLOCK_TIME_IS_VALID (frame->duration) ? frame->duration / 100 : 0);
if (!gst_mf_result (hr))
goto done;
goto error;
if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
if (klass->can_force_keyframe) {
@ -371,9 +485,22 @@ gst_mf_video_enc_process_input (GstMFVideoEnc * self,
}
}
if (!need_copy) {
/* IGstMFVideoBuffer will hold GstVideoFrame (+ GstBuffer), then it will be
* cleared when it's no more referenced by Media Foundation internals */
hr = video_buffer->SetUserData ((gpointer) vframe,
(GDestroyNotify) gst_mf_video_buffer_free);
if (!gst_mf_result (hr))
goto error;
} else {
gst_video_frame_unmap (vframe);
g_free (vframe);
vframe = NULL;
}
if (!gst_mf_transform_process_input (self->transform, sample.Get ())) {
GST_ERROR_OBJECT (self, "Failed to process input");
goto done;
goto error;
}
if (unset_force_keyframe) {
@ -381,12 +508,15 @@ gst_mf_video_enc_process_input (GstMFVideoEnc * self,
&CODECAPI_AVEncVideoForceKeyFrame, FALSE);
}
res = TRUE;
return TRUE;
done:
gst_video_frame_unmap (&vframe);
error:
if (vframe) {
gst_video_frame_unmap (vframe);
g_free (vframe);
}
return res;
return FALSE;
}
static GstVideoCodecFrame *

View file

@ -12,6 +12,7 @@ mf_sources = [
'gstmfaudioenc.cpp',
'gstmfaacenc.cpp',
'gstmfmp3enc.cpp',
'gstmfvideobuffer.cpp',
]
mf_desktop_sources = [

View file

@ -40,6 +40,7 @@ GST_DEBUG_CATEGORY (gst_mf_debug);
GST_DEBUG_CATEGORY (gst_mf_utils_debug);
GST_DEBUG_CATEGORY (gst_mf_source_object_debug);
GST_DEBUG_CATEGORY (gst_mf_transform_debug);
GST_DEBUG_CATEGORY (gst_mf_video_buffer_debug);
#define GST_CAT_DEFAULT gst_mf_debug
@ -68,6 +69,8 @@ plugin_init (GstPlugin * plugin)
"mfsourceobject", 0, "mfsourceobject");
GST_DEBUG_CATEGORY_INIT (gst_mf_transform_debug,
"mftransform", 0, "mftransform");
GST_DEBUG_CATEGORY_INIT (gst_mf_video_buffer_debug,
"mfvideobuffer", 0, "mfvideobuffer");
hr = MFStartup (MF_VERSION, MFSTARTUP_NOSOCKET);
if (!gst_mf_result (hr)) {