msdkenc: Add support for YV12, YUY2, UYVY and BGRA

By doing conversion with VPP to NV12 before the actual encoding.

https://bugzilla.gnome.org/show_bug.cgi?id=789847
This commit is contained in:
Sebastian Dröge 2017-11-16 11:32:52 +02:00
parent 0ef2a41d84
commit d3eeb98f0c
3 changed files with 398 additions and 68 deletions

View file

@ -72,7 +72,7 @@ static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK, GST_PAD_SINK,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw, " GST_STATIC_CAPS ("video/x-raw, "
"format = (string) { NV12 }, " "format = (string) { NV12, I420, YV12, YUY2, UYVY, BGRA }, "
"framerate = (fraction) [0, MAX], " "framerate = (fraction) [0, MAX], "
"width = (int) [ 16, MAX ], height = (int) [ 16, MAX ]," "width = (int) [ 16, MAX ], height = (int) [ 16, MAX ],"
"interlace-mode = (string) progressive") "interlace-mode = (string) progressive")
@ -140,6 +140,65 @@ gst_msdkenc_add_extra_param (GstMsdkEnc * thiz, mfxExtBuffer * param)
} }
} }
static void
gst_msdkenc_alloc_surfaces (GstMsdkEnc * thiz, GstVideoFormat format,
gint width, gint height, guint num_surfaces, mfxFrameSurface1 * surfaces)
{
gsize Y_size = 0, U_size = 0;
gsize pitch;
gsize size;
gint i;
width = GST_ROUND_UP_32 (width);
height = GST_ROUND_UP_32 (height);
switch (format) {
case GST_VIDEO_FORMAT_NV12:
Y_size = width * height;
pitch = width;
size = Y_size + (Y_size >> 1);
break;
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_I420:
Y_size = width * height;
pitch = width;
U_size = (width / 2) * (height / 2);
size = Y_size + 2 * U_size;
break;
case GST_VIDEO_FORMAT_YUY2:
case GST_VIDEO_FORMAT_UYVY:
size = 2 * width * height;
pitch = 2 * width;
break;
case GST_VIDEO_FORMAT_BGRA:
size = 4 * width * height;
pitch = 4 * width;
break;
default:
g_assert_not_reached ();
break;
}
for (i = 0; i < num_surfaces; i++) {
mfxFrameSurface1 *surface = &surfaces[i];
mfxU8 *data = _aligned_alloc (32, size);
if (!data) {
GST_ERROR_OBJECT (thiz, "Memory allocation failed");
return;
}
surface->Data.MemId = (mfxMemId) data;
surface->Data.Pitch = pitch;
surface->Data.Y = data;
if (U_size) {
surface->Data.U = data + Y_size;
surface->Data.V = data + Y_size + U_size;
} else if (Y_size) {
surface->Data.UV = data + Y_size;
}
}
}
static gboolean static gboolean
gst_msdkenc_init_encoder (GstMsdkEnc * thiz) gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
{ {
@ -147,7 +206,7 @@ gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
GstVideoInfo *info; GstVideoInfo *info;
mfxSession session; mfxSession session;
mfxStatus status; mfxStatus status;
mfxFrameAllocRequest request; mfxFrameAllocRequest request[2];
guint i; guint i;
if (!thiz->input_state) { if (!thiz->input_state) {
@ -166,6 +225,104 @@ gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
} }
GST_OBJECT_LOCK (thiz); GST_OBJECT_LOCK (thiz);
session = msdk_context_get_session (thiz->context);
thiz->has_vpp = FALSE;
if (info->finfo->format != GST_VIDEO_FORMAT_NV12) {
thiz->vpp_param.IOPattern =
MFX_IOPATTERN_IN_SYSTEM_MEMORY | MFX_IOPATTERN_OUT_SYSTEM_MEMORY;
thiz->vpp_param.vpp.In.Width = GST_ROUND_UP_32 (info->width);
thiz->vpp_param.vpp.In.Height = GST_ROUND_UP_32 (info->height);
thiz->vpp_param.vpp.In.CropW = info->width;
thiz->vpp_param.vpp.In.CropH = info->height;
thiz->vpp_param.vpp.In.FrameRateExtN = info->fps_n;
thiz->vpp_param.vpp.In.FrameRateExtD = info->fps_d;
thiz->vpp_param.vpp.In.AspectRatioW = info->par_n;
thiz->vpp_param.vpp.In.AspectRatioH = info->par_d;
thiz->vpp_param.vpp.In.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
switch (info->finfo->format) {
case GST_VIDEO_FORMAT_NV12:
thiz->vpp_param.vpp.In.FourCC = MFX_FOURCC_NV12;
thiz->vpp_param.vpp.In.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
break;
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_I420:
thiz->vpp_param.vpp.In.FourCC = MFX_FOURCC_YV12;
thiz->vpp_param.vpp.In.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
break;
case GST_VIDEO_FORMAT_YUY2:
thiz->vpp_param.vpp.In.FourCC = MFX_FOURCC_YUY2;
thiz->vpp_param.vpp.In.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
break;
case GST_VIDEO_FORMAT_UYVY:
thiz->vpp_param.vpp.In.FourCC = MFX_FOURCC_UYVY;
thiz->vpp_param.vpp.In.ChromaFormat = MFX_CHROMAFORMAT_YUV422;
break;
case GST_VIDEO_FORMAT_BGRA:
thiz->vpp_param.vpp.In.FourCC = MFX_FOURCC_RGB4;
thiz->vpp_param.vpp.In.ChromaFormat = MFX_CHROMAFORMAT_YUV444;
break;
default:
g_assert_not_reached ();
break;
}
thiz->vpp_param.vpp.Out = thiz->vpp_param.vpp.In;
thiz->vpp_param.vpp.Out.FourCC = MFX_FOURCC_NV12;
thiz->vpp_param.vpp.Out.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
/* validate parameters and allow the Media SDK to make adjustments */
status = MFXVideoVPP_Query (session, &thiz->vpp_param, &thiz->vpp_param);
if (status < MFX_ERR_NONE) {
GST_ERROR_OBJECT (thiz, "Video VPP Query failed (%s)",
msdk_status_to_string (status));
goto no_vpp;
} else if (status > MFX_ERR_NONE) {
GST_WARNING_OBJECT (thiz, "Video VPP Query returned: %s",
msdk_status_to_string (status));
}
status = MFXVideoVPP_QueryIOSurf (session, &thiz->vpp_param, request);
if (status < MFX_ERR_NONE) {
GST_ERROR_OBJECT (thiz, "VPP Query IO surfaces failed (%s)",
msdk_status_to_string (status));
goto no_vpp;
} else if (status > MFX_ERR_NONE) {
GST_WARNING_OBJECT (thiz, "VPP Query IO surfaces returned: %s",
msdk_status_to_string (status));
}
thiz->num_vpp_surfaces = request[0].NumFrameSuggested;
thiz->vpp_surfaces = g_new0 (mfxFrameSurface1, thiz->num_vpp_surfaces);
for (i = 0; i < thiz->num_vpp_surfaces; i++) {
memcpy (&thiz->vpp_surfaces[i].Info, &thiz->vpp_param.vpp.In,
sizeof (mfxFrameInfo));
}
status = MFXVideoVPP_Init (session, &thiz->vpp_param);
if (status < MFX_ERR_NONE) {
GST_ERROR_OBJECT (thiz, "Init failed (%s)",
msdk_status_to_string (status));
goto no_vpp;
} else if (status > MFX_ERR_NONE) {
GST_WARNING_OBJECT (thiz, "Init returned: %s",
msdk_status_to_string (status));
}
status = MFXVideoVPP_GetVideoParam (session, &thiz->vpp_param);
if (status < MFX_ERR_NONE) {
GST_ERROR_OBJECT (thiz, "Get VPP Parameters failed (%s)",
msdk_status_to_string (status));
MFXVideoVPP_Close (session);
goto no_vpp;
} else if (status > MFX_ERR_NONE) {
GST_WARNING_OBJECT (thiz, "Get VPP Parameters returned: %s",
msdk_status_to_string (status));
}
thiz->has_vpp = TRUE;
}
thiz->param.AsyncDepth = thiz->async_depth; thiz->param.AsyncDepth = thiz->async_depth;
thiz->param.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY; thiz->param.IOPattern = MFX_IOPATTERN_IN_SYSTEM_MEMORY;
@ -196,7 +353,6 @@ gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
thiz->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE; thiz->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
thiz->param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12; thiz->param.mfx.FrameInfo.FourCC = MFX_FOURCC_NV12;
thiz->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420; thiz->param.mfx.FrameInfo.ChromaFormat = MFX_CHROMAFORMAT_YUV420;
thiz->param.mfx.FrameInfo.PicStruct = MFX_PICSTRUCT_PROGRESSIVE;
/* allow subclass configure further */ /* allow subclass configure further */
if (klass->configure) { if (klass->configure) {
@ -209,7 +365,6 @@ gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
thiz->param.ExtParam = thiz->extra_params; thiz->param.ExtParam = thiz->extra_params;
} }
session = msdk_context_get_session (thiz->context);
/* validate parameters and allow the Media SDK to make adjustments */ /* validate parameters and allow the Media SDK to make adjustments */
status = MFXVideoENCODE_Query (session, &thiz->param, &thiz->param); status = MFXVideoENCODE_Query (session, &thiz->param, &thiz->param);
if (status < MFX_ERR_NONE) { if (status < MFX_ERR_NONE) {
@ -221,54 +376,51 @@ gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
msdk_status_to_string (status)); msdk_status_to_string (status));
} }
status = MFXVideoENCODE_QueryIOSurf (session, &thiz->param, &request); status = MFXVideoENCODE_QueryIOSurf (session, &thiz->param, request);
if (status < MFX_ERR_NONE) { if (status < MFX_ERR_NONE) {
GST_ERROR_OBJECT (thiz, "Query IO surfaces failed (%s)", GST_ERROR_OBJECT (thiz, "Encode Query IO surfaces failed (%s)",
msdk_status_to_string (status)); msdk_status_to_string (status));
goto failed; goto failed;
} else if (status > MFX_ERR_NONE) { } else if (status > MFX_ERR_NONE) {
GST_WARNING_OBJECT (thiz, "Query IO surfaces returned: %s", GST_WARNING_OBJECT (thiz, "Encode Query IO surfaces returned: %s",
msdk_status_to_string (status)); msdk_status_to_string (status));
} }
if (request.NumFrameSuggested < thiz->param.AsyncDepth) { /* Maximum of VPP output and encoder input, if using VPP */
if (thiz->has_vpp)
request[0].NumFrameSuggested =
MAX (request[0].NumFrameSuggested, request[1].NumFrameSuggested);
if (request[0].NumFrameSuggested < thiz->param.AsyncDepth) {
GST_ERROR_OBJECT (thiz, "Required %d surfaces (%d suggested), async %d", GST_ERROR_OBJECT (thiz, "Required %d surfaces (%d suggested), async %d",
request.NumFrameMin, request.NumFrameSuggested, thiz->param.AsyncDepth); request[0].NumFrameMin, request[0].NumFrameSuggested,
thiz->param.AsyncDepth);
goto failed; goto failed;
} }
thiz->num_surfaces = request.NumFrameSuggested; /* These are VPP output (if any) and encoder input */
thiz->num_surfaces = request[0].NumFrameSuggested;
thiz->surfaces = g_new0 (mfxFrameSurface1, thiz->num_surfaces); thiz->surfaces = g_new0 (mfxFrameSurface1, thiz->num_surfaces);
for (i = 0; i < thiz->num_surfaces; i++) { for (i = 0; i < thiz->num_surfaces; i++) {
memcpy (&thiz->surfaces[i].Info, &thiz->param.mfx.FrameInfo, memcpy (&thiz->surfaces[i].Info, &thiz->param.mfx.FrameInfo,
sizeof (mfxFrameInfo)); sizeof (mfxFrameInfo));
} }
if (GST_ROUND_UP_32 (info->width) != info->width
|| GST_ROUND_UP_32 (info->height) != info->height) {
guint width = GST_ROUND_UP_32 (info->width);
guint height = GST_ROUND_UP_32 (info->height);
gsize Y_size = width * height;
gsize size = Y_size + (Y_size >> 1);
for (i = 0; i < thiz->num_surfaces; i++) {
mfxFrameSurface1 *surface = &thiz->surfaces[i];
mfxU8 *data = _aligned_alloc (32, size);
if (!data) {
GST_ERROR_OBJECT (thiz, "Memory allocation failed");
goto failed;
}
surface->Data.MemId = (mfxMemId) data;
surface->Data.Pitch = width;
surface->Data.Y = data;
surface->Data.UV = data + Y_size;
}
if ((GST_ROUND_UP_32 (info->width) != info->width
|| GST_ROUND_UP_32 (info->height) != info->height)) {
gst_msdkenc_alloc_surfaces (thiz, info->finfo->format, info->width,
info->height,
thiz->has_vpp ? thiz->num_vpp_surfaces : thiz->num_surfaces,
thiz->has_vpp ? thiz->vpp_surfaces : thiz->surfaces);
GST_DEBUG_OBJECT (thiz, GST_DEBUG_OBJECT (thiz,
"Allocated aligned memory, pixel data will be copied"); "Allocated aligned memory, pixel data will be copied");
} }
if (thiz->has_vpp) {
gst_msdkenc_alloc_surfaces (thiz, GST_VIDEO_FORMAT_NV12, info->width,
info->height, thiz->num_surfaces, thiz->surfaces);
}
GST_DEBUG_OBJECT (thiz, "Required %d surfaces (%d suggested), allocated %d", GST_DEBUG_OBJECT (thiz, "Required %d surfaces (%d suggested), allocated %d",
request.NumFrameMin, request.NumFrameSuggested, thiz->num_surfaces); request[0].NumFrameMin, request[0].NumFrameSuggested, thiz->num_surfaces);
status = MFXVideoENCODE_Init (session, &thiz->param); status = MFXVideoENCODE_Init (session, &thiz->param);
if (status < MFX_ERR_NONE) { if (status < MFX_ERR_NONE) {
@ -309,6 +461,7 @@ gst_msdkenc_init_encoder (GstMsdkEnc * thiz)
return TRUE; return TRUE;
no_vpp:
failed: failed:
GST_OBJECT_UNLOCK (thiz); GST_OBJECT_UNLOCK (thiz);
msdk_close_context (thiz->context); msdk_close_context (thiz->context);
@ -344,6 +497,16 @@ gst_msdkenc_close_encoder (GstMsdkEnc * thiz)
g_free (thiz->tasks); g_free (thiz->tasks);
thiz->tasks = NULL; thiz->tasks = NULL;
/* Close VPP before freeing the surfaces. They are shared between encoder
* and VPP */
if (thiz->has_vpp) {
status = MFXVideoVPP_Close (msdk_context_get_session (thiz->context));
if (status != MFX_ERR_NONE && status != MFX_ERR_NOT_INITIALIZED) {
GST_WARNING_OBJECT (thiz, "VPP close failed (%s)",
msdk_status_to_string (status));
}
}
for (i = 0; i < thiz->num_surfaces; i++) { for (i = 0; i < thiz->num_surfaces; i++) {
mfxFrameSurface1 *surface = &thiz->surfaces[i]; mfxFrameSurface1 *surface = &thiz->surfaces[i];
if (surface->Data.MemId) if (surface->Data.MemId)
@ -352,6 +515,16 @@ gst_msdkenc_close_encoder (GstMsdkEnc * thiz)
g_free (thiz->surfaces); g_free (thiz->surfaces);
thiz->surfaces = NULL; thiz->surfaces = NULL;
if (thiz->has_vpp) {
for (i = 0; i < thiz->num_vpp_surfaces; i++) {
mfxFrameSurface1 *surface = &thiz->vpp_surfaces[i];
if (surface->Data.MemId)
_aligned_free (surface->Data.MemId);
}
g_free (thiz->vpp_surfaces);
thiz->vpp_surfaces = NULL;
}
msdk_close_context (thiz->context); msdk_close_context (thiz->context);
thiz->context = NULL; thiz->context = NULL;
memset (&thiz->param, 0, sizeof (thiz->param)); memset (&thiz->param, 0, sizeof (thiz->param));
@ -394,6 +567,7 @@ gst_msdkenc_dequeue_frame (GstMsdkEnc * thiz, GstVideoCodecFrame * frame)
if (fdata->frame != frame) if (fdata->frame != frame)
continue; continue;
if (fdata->vframe.buffer)
gst_video_frame_unmap (&fdata->vframe); gst_video_frame_unmap (&fdata->vframe);
gst_video_codec_frame_unref (fdata->frame); gst_video_codec_frame_unref (fdata->frame);
g_slice_free (FrameData, fdata); g_slice_free (FrameData, fdata);
@ -519,7 +693,7 @@ gst_msdkenc_encode_frame (GstMsdkEnc * thiz, mfxFrameSurface1 * surface,
if (status != MFX_ERR_NONE && status != MFX_ERR_MORE_DATA) { if (status != MFX_ERR_NONE && status != MFX_ERR_MORE_DATA) {
GST_ELEMENT_ERROR (thiz, STREAM, ENCODE, ("Encode frame failed."), GST_ELEMENT_ERROR (thiz, STREAM, ENCODE, ("Encode frame failed."),
("MSDK encode return code=%d", status)); ("MSDK encode error (%s)", msdk_status_to_string (status)));
gst_msdkenc_dequeue_frame (thiz, input_frame); gst_msdkenc_dequeue_frame (thiz, input_frame);
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (thiz), input_frame); gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (thiz), input_frame);
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
@ -654,6 +828,60 @@ gst_msdkenc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame)
if (G_UNLIKELY (thiz->context == NULL)) if (G_UNLIKELY (thiz->context == NULL))
goto not_inited; goto not_inited;
if (thiz->has_vpp) {
mfxFrameSurface1 *vpp_surface;
GstVideoFrame vframe;
mfxSession session;
mfxSyncPoint vpp_sync_point = NULL;
mfxStatus status;
vpp_surface =
msdk_get_free_surface (thiz->vpp_surfaces, thiz->num_vpp_surfaces);
if (!vpp_surface)
goto invalid_surface;
surface = msdk_get_free_surface (thiz->surfaces, thiz->num_surfaces);
if (!surface)
goto invalid_surface;
if (!gst_video_frame_map (&vframe, info, frame->input_buffer, GST_MAP_READ))
goto invalid_frame;
msdk_frame_to_surface (&vframe, vpp_surface);
if (frame->pts != GST_CLOCK_TIME_NONE) {
vpp_surface->Data.TimeStamp =
gst_util_uint64_scale (frame->pts, 90000, GST_SECOND);
surface->Data.TimeStamp =
gst_util_uint64_scale (frame->pts, 90000, GST_SECOND);
} else {
vpp_surface->Data.TimeStamp = MFX_TIMESTAMP_UNKNOWN;
surface->Data.TimeStamp = MFX_TIMESTAMP_UNKNOWN;
}
session = msdk_context_get_session (thiz->context);
for (;;) {
status =
MFXVideoVPP_RunFrameVPPAsync (session, vpp_surface, surface, NULL,
&vpp_sync_point);
if (status != MFX_WRN_DEVICE_BUSY)
break;
/* If device is busy, wait 1ms and retry, as per MSDK's recomendation */
g_usleep (1000);
};
gst_video_frame_unmap (&vframe);
if (status != MFX_ERR_NONE && status != MFX_ERR_MORE_DATA) {
GST_ELEMENT_ERROR (thiz, STREAM, ENCODE, ("Converting frame failed."),
("MSDK VPP error (%s)", msdk_status_to_string (status)));
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (thiz), frame);
return GST_FLOW_ERROR;
}
fdata = g_slice_new0 (FrameData);
fdata->frame = gst_video_codec_frame_ref (frame);
thiz->pending_frames = g_list_prepend (thiz->pending_frames, fdata);
} else {
surface = msdk_get_free_surface (thiz->surfaces, thiz->num_surfaces); surface = msdk_get_free_surface (thiz->surfaces, thiz->num_surfaces);
if (!surface) if (!surface)
goto invalid_surface; goto invalid_surface;
@ -669,6 +897,7 @@ gst_msdkenc_handle_frame (GstVideoEncoder * encoder, GstVideoCodecFrame * frame)
} else { } else {
surface->Data.TimeStamp = MFX_TIMESTAMP_UNKNOWN; surface->Data.TimeStamp = MFX_TIMESTAMP_UNKNOWN;
} }
}
return gst_msdkenc_encode_frame (thiz, surface, frame); return gst_msdkenc_encode_frame (thiz, surface, frame);

View file

@ -77,6 +77,12 @@ struct _GstMsdkEnc
MsdkEncTask *tasks; MsdkEncTask *tasks;
guint next_task; guint next_task;
gboolean has_vpp;
mfxVideoParam vpp_param;
guint num_vpp_surfaces;
/* Input interfaces, output above */
mfxFrameSurface1 *vpp_surfaces;
mfxExtBuffer *extra_params[MAX_EXTRA_PARAMS]; mfxExtBuffer *extra_params[MAX_EXTRA_PARAMS];
guint num_extra_params; guint num_extra_params;

View file

@ -77,21 +77,52 @@ msdk_frame_to_surface (GstVideoFrame * frame, mfxFrameSurface1 * surface)
guint8 *src, *dst; guint8 *src, *dst;
guint sstride, dstride; guint sstride, dstride;
guint width, height; guint width, height;
guint i; guint i, p;
if (!surface->Data.MemId) { if (!surface->Data.MemId) {
switch (frame->info.finfo->format) {
case GST_VIDEO_FORMAT_NV12:
surface->Data.Y = GST_VIDEO_FRAME_COMP_DATA (frame, 0); surface->Data.Y = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
surface->Data.UV = GST_VIDEO_FRAME_COMP_DATA (frame, 1); surface->Data.UV = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
surface->Data.Pitch = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); surface->Data.Pitch = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
break;
case GST_VIDEO_FORMAT_YV12:
case GST_VIDEO_FORMAT_I420:
surface->Data.Y = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
surface->Data.U = GST_VIDEO_FRAME_COMP_DATA (frame, 1);
surface->Data.V = GST_VIDEO_FRAME_COMP_DATA (frame, 2);
surface->Data.Pitch = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
break;
case GST_VIDEO_FORMAT_YUY2:
case GST_VIDEO_FORMAT_UYVY:
surface->Data.Y = GST_VIDEO_FRAME_PLANE_DATA (frame, 0);
surface->Data.Pitch = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
break;
case GST_VIDEO_FORMAT_BGRA:
surface->Data.R = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
surface->Data.Pitch = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
break;
default:
g_assert_not_reached ();
break;
}
return; return;
} }
/* Y Plane */
switch (frame->info.finfo->format) {
case GST_VIDEO_FORMAT_NV12:
width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0); width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0); for (p = 0; p < 2; p++) {
src = GST_VIDEO_FRAME_COMP_DATA (frame, 0); height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, p);
sstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0); src = GST_VIDEO_FRAME_COMP_DATA (frame, p);
dst = surface->Data.Y; sstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, p);
dst = p == 0 ? surface->Data.Y : surface->Data.UV;
dstride = surface->Data.Pitch; dstride = surface->Data.Pitch;
for (i = 0; i < height; i++) { for (i = 0; i < height; i++) {
@ -99,12 +130,33 @@ msdk_frame_to_surface (GstVideoFrame * frame, mfxFrameSurface1 * surface)
src += sstride; src += sstride;
dst += dstride; dst += dstride;
} }
}
break;
/* UV Plane */ case GST_VIDEO_FORMAT_YV12:
height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 1); case GST_VIDEO_FORMAT_I420:
src = GST_VIDEO_FRAME_COMP_DATA (frame, 1); for (p = 0; p < 3; p++) {
sstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 1); width = GST_VIDEO_FRAME_COMP_WIDTH (frame, p);
dst = surface->Data.UV; height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, p);
src = GST_VIDEO_FRAME_COMP_DATA (frame, p);
sstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, p);
switch (p) {
case 0:
dst = surface->Data.Y;
break;
case 1:
dst = surface->Data.U;
break;
case 2:
dst = surface->Data.V;
break;
default:
g_assert_not_reached ();
break;
}
dstride = surface->Data.Pitch;
if (p > 0)
dstride = dstride / 2;
for (i = 0; i < height; i++) { for (i = 0; i < height; i++) {
memcpy (dst, src, width); memcpy (dst, src, width);
@ -112,6 +164,49 @@ msdk_frame_to_surface (GstVideoFrame * frame, mfxFrameSurface1 * surface)
dst += dstride; dst += dstride;
} }
} }
break;
case GST_VIDEO_FORMAT_YUY2:
case GST_VIDEO_FORMAT_UYVY:
width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
src = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
sstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
dst = surface->Data.Y;
dstride = surface->Data.Pitch;
width *= 2;
width = MIN (sstride, width);
for (i = 0; i < height; i++) {
memcpy (dst, src, width);
src += sstride;
dst += dstride;
}
break;
case GST_VIDEO_FORMAT_BGRA:
width = GST_VIDEO_FRAME_COMP_WIDTH (frame, 0);
height = GST_VIDEO_FRAME_COMP_HEIGHT (frame, 0);
src = GST_VIDEO_FRAME_COMP_DATA (frame, 0);
sstride = GST_VIDEO_FRAME_COMP_STRIDE (frame, 0);
dst = surface->Data.R;
dstride = surface->Data.Pitch;
width *= 4;
for (i = 0; i < height; i++) {
memcpy (dst, src, width);
src += sstride;
dst += dstride;
}
break;
default:
g_assert_not_reached ();
break;
}
}
const gchar * const gchar *
msdk_status_to_string (mfxStatus status) msdk_status_to_string (mfxStatus status)