vdpauvideopostprocess: add support for video/x-raw-yuv input

This commit is contained in:
Carl-Anton Ingmarsson 2009-10-08 20:13:04 +02:00
parent a801e81efb
commit 4609e07c5e
7 changed files with 393 additions and 105 deletions

View file

@ -40,7 +40,7 @@ libgstvdp_@GST_MAJORMINOR@include_HEADERS = \
gstvdpoutputbuffer.h gstvdpoutputbuffer.h
libgstvdp_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) $(VDPAU_CFLAGS) $(X11_CFLAGS) libgstvdp_@GST_MAJORMINOR@_la_CFLAGS = $(GST_CFLAGS) $(VDPAU_CFLAGS) $(X11_CFLAGS)
libgstvdp_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) $(VDPAU_LIBS) $(X11_LIBS) libgstvdp_@GST_MAJORMINOR@_la_LIBADD = $(GST_LIBS) $(VDPAU_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR)
libgstvdp_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_LT_LDFLAGS) $(GST_ALL_LDFLAGS) libgstvdp_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_LT_LDFLAGS) $(GST_ALL_LDFLAGS)
libgstvdp_@GST_MAJORMINOR@_la_LIBTOOLFLAGS = --tag=disable-static libgstvdp_@GST_MAJORMINOR@_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -122,6 +122,23 @@ gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device)
return result; return result;
} }
GstCaps *
gst_vdp_yuv_to_output_caps (GstCaps * caps)
{
GstCaps *result;
gint i;
result = gst_caps_copy (caps);
for (i = 0; i < gst_caps_get_size (result); i++) {
GstStructure *structure = gst_caps_get_structure (result, i);
gst_structure_set_name (structure, "video/x-vdpau-output");
gst_structure_remove_field (structure, "format");
}
return result;
}
GstCaps * GstCaps *
gst_vdp_video_to_output_caps (GstCaps * caps) gst_vdp_video_to_output_caps (GstCaps * caps)
{ {

View file

@ -28,5 +28,6 @@ GstCaps *gst_vdp_video_to_yuv_caps (GstCaps *caps, GstVdpDevice *device);
GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device); GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device);
GstCaps *gst_vdp_video_to_output_caps (GstCaps * caps); GstCaps *gst_vdp_video_to_output_caps (GstCaps * caps);
GstCaps *gst_vdp_yuv_to_output_caps (GstCaps *caps);
#endif /* _GST_VDP_UTILS_H_ */ #endif /* _GST_VDP_UTILS_H_ */

View file

@ -130,7 +130,7 @@ gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_type)
} }
structure = gst_structure_new ("video/x-vdpau-video", structure = gst_structure_new ("video/x-vdpau-video",
"chroma-type", G_TYPE_INT, formats[i].chroma_type, "chroma-type", G_TYPE_INT, chroma_types[i],
"width", GST_TYPE_INT_RANGE, 1, 4096, "width", GST_TYPE_INT_RANGE, 1, 4096,
"height", GST_TYPE_INT_RANGE, 1, 4096, NULL); "height", GST_TYPE_INT_RANGE, 1, 4096, NULL);
gst_caps_append_structure (video_caps, structure); gst_caps_append_structure (video_caps, structure);
@ -307,8 +307,45 @@ gst_vdp_video_buffer_calculate_size (GstCaps * caps, guint * size)
} }
gboolean gboolean
gst_vdp_video_buffer_download (GstVdpVideoBuffer * inbuf, GstBuffer * outbuf, gst_vdp_video_buffer_parse_yuv_caps (GstCaps * yuv_caps,
GstCaps * outcaps) VdpChromaType * chroma_type, gint * width, gint * height)
{
GstStructure *structure;
guint32 fourcc;
gint i;
g_return_val_if_fail (GST_IS_CAPS (yuv_caps), FALSE);
g_return_val_if_fail (!gst_caps_is_empty (yuv_caps), FALSE);
g_return_val_if_fail (chroma_type, FALSE);
g_return_val_if_fail (width, FALSE);
g_return_val_if_fail (height, FALSE);
structure = gst_caps_get_structure (yuv_caps, 0);
if (!gst_structure_has_name (structure, "video/x-raw-yuv"))
return FALSE;
if (!gst_structure_get_fourcc (structure, "format", &fourcc) ||
!gst_structure_get_int (structure, "width", width) ||
!gst_structure_get_int (structure, "height", height))
return FALSE;
*chroma_type = -1;
for (i = 0; i < G_N_ELEMENTS (formats); i++) {
if (formats[i].fourcc == fourcc) {
*chroma_type = formats[i].chroma_type;
break;
}
}
if (*chroma_type == -1)
return FALSE;
return TRUE;
}
gboolean
gst_vdp_video_buffer_download (GstVdpVideoBuffer * video_buf,
GstBuffer * outbuf, GstCaps * outcaps)
{ {
GstStructure *structure; GstStructure *structure;
gint width, height; gint width, height;
@ -321,7 +358,7 @@ gst_vdp_video_buffer_download (GstVdpVideoBuffer * inbuf, GstBuffer * outbuf,
VdpVideoSurface surface; VdpVideoSurface surface;
VdpStatus status; VdpStatus status;
g_return_val_if_fail (GST_IS_VDP_VIDEO_BUFFER (inbuf), FALSE); g_return_val_if_fail (GST_IS_VDP_VIDEO_BUFFER (video_buf), FALSE);
g_return_val_if_fail (GST_IS_BUFFER (outbuf), FALSE); g_return_val_if_fail (GST_IS_BUFFER (outbuf), FALSE);
g_return_val_if_fail (GST_IS_CAPS (outcaps), FALSE); g_return_val_if_fail (GST_IS_CAPS (outcaps), FALSE);
@ -416,17 +453,17 @@ gst_vdp_video_buffer_download (GstVdpVideoBuffer * inbuf, GstBuffer * outbuf,
return FALSE; return FALSE;
} }
device = inbuf->device; device = video_buf->device;
surface = inbuf->surface; surface = video_buf->surface;
GST_LOG_OBJECT (inbuf, "Entering vdp_video_surface_get_bits_ycbcr"); GST_LOG_OBJECT (video_buf, "Entering vdp_video_surface_get_bits_ycbcr");
status = status =
device->vdp_video_surface_get_bits_ycbcr (surface, device->vdp_video_surface_get_bits_ycbcr (surface,
VDP_YCBCR_FORMAT_YV12, (void *) data, stride); VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
GST_LOG_OBJECT (inbuf, GST_LOG_OBJECT (video_buf,
"Got status %d from vdp_video_surface_get_bits_ycbcr", status); "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
if (G_UNLIKELY (status != VDP_STATUS_OK)) { if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ERROR_OBJECT (inbuf, GST_ERROR_OBJECT (video_buf,
"Couldn't get data from vdpau, Error returned from vdpau was: %s", "Couldn't get data from vdpau, Error returned from vdpau was: %s",
device->vdp_get_error_string (status)); device->vdp_get_error_string (status));
return FALSE; return FALSE;
@ -434,3 +471,109 @@ gst_vdp_video_buffer_download (GstVdpVideoBuffer * inbuf, GstBuffer * outbuf,
return TRUE; return TRUE;
} }
gboolean
gst_vdp_video_buffer_upload (GstVdpVideoBuffer * video_buf, GstBuffer * src_buf,
guint fourcc, gint width, gint height)
{
guint8 *data[3];
guint32 stride[3];
VdpYCbCrFormat format;
GstVdpDevice *device;
VdpStatus status;
g_return_val_if_fail (GST_IS_VDP_VIDEO_BUFFER (video_buf), FALSE);
g_return_val_if_fail (GST_IS_BUFFER (src_buf), FALSE);
switch (fourcc) {
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
{
data[0] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
0, width, height);
data[1] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
2, width, height);
data[2] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
1, width, height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
0, width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
2, width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
1, width);
format = VDP_YCBCR_FORMAT_YV12;
break;
}
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
{
data[0] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
0, width, height);
data[1] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
2, width, height);
data[2] = GST_BUFFER_DATA (src_buf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
1, width, height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
0, width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
2, width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
1, width);
format = VDP_YCBCR_FORMAT_YV12;
break;
}
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
{
data[0] = GST_BUFFER_DATA (src_buf);
data[1] = GST_BUFFER_DATA (src_buf) + width * height;
stride[0] = width;
stride[1] = width;
format = VDP_YCBCR_FORMAT_NV12;
break;
}
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
{
data[0] = GST_BUFFER_DATA (src_buf);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_UYVY,
0, width);
format = VDP_YCBCR_FORMAT_UYVY;
break;
}
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
{
data[0] = GST_BUFFER_DATA (src_buf);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YUY2,
0, width);
format = VDP_YCBCR_FORMAT_YUYV;
break;
}
default:
return FALSE;
}
device = video_buf->device;
status = device->vdp_video_surface_put_bits_ycbcr (video_buf->surface, format,
(void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ERROR_OBJECT (video_buf, "Couldn't push YUV data to VDPAU, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
return FALSE;
}
return TRUE;
}

View file

@ -99,8 +99,11 @@ GstCaps *gst_vdp_video_buffer_get_caps (gboolean filter, VdpChromaType chroma_ty
GstCaps *gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device); GstCaps *gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device);
GstCaps *gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device); GstCaps *gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device);
gboolean gst_vdp_video_buffer_parse_yuv_caps (GstCaps *yuv_caps, VdpChromaType *chroma_type, gint *width, gint *height);
gboolean gst_vdp_video_buffer_calculate_size (GstCaps *caps, guint *size); gboolean gst_vdp_video_buffer_calculate_size (GstCaps *caps, guint *size);
gboolean gst_vdp_video_buffer_download (GstVdpVideoBuffer *inbuf, GstBuffer *outbuf, GstCaps *outcaps); gboolean gst_vdp_video_buffer_download (GstVdpVideoBuffer *inbuf, GstBuffer *outbuf, GstCaps *outcaps);
gboolean gst_vdp_video_buffer_upload (GstVdpVideoBuffer *video_buf, GstBuffer *src_buf, guint fourcc, gint width, gint height);
#define GST_VDP_VIDEO_CAPS \ #define GST_VDP_VIDEO_CAPS \
"video/x-vdpau-video, " \ "video/x-vdpau-video, " \

View file

@ -75,10 +75,6 @@ enum
* *
* describe the real formats here. * describe the real formats here.
*/ */
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
@ -369,10 +365,6 @@ gst_vdp_vpp_create_mixer (GstVdpVideoPostProcess * vpp, GstVdpDevice * device)
#define VDP_NUM_MIXER_PARAMETER 3 #define VDP_NUM_MIXER_PARAMETER 3
#define MAX_NUM_FEATURES 5 #define MAX_NUM_FEATURES 5
GstStructure *structure;
gint chroma_type;
gint width, height;
VdpStatus status; VdpStatus status;
VdpVideoMixerFeature features[5]; VdpVideoMixerFeature features[5];
@ -384,15 +376,9 @@ gst_vdp_vpp_create_mixer (GstVdpVideoPostProcess * vpp, GstVdpDevice * device)
}; };
const void *parameter_values[VDP_NUM_MIXER_PARAMETER]; const void *parameter_values[VDP_NUM_MIXER_PARAMETER];
structure = gst_caps_get_structure (GST_PAD_CAPS (vpp->sinkpad), 0); parameter_values[0] = &vpp->width;
if (!gst_structure_get_int (structure, "chroma-type", &chroma_type) || parameter_values[1] = &vpp->height;
!gst_structure_get_int (structure, "width", &width) || parameter_values[2] = &vpp->chroma_type;
!gst_structure_get_int (structure, "height", &height))
return GST_FLOW_ERROR;
parameter_values[0] = &width;
parameter_values[1] = &height;
parameter_values[2] = &chroma_type;
if (gst_vdp_vpp_is_interlaced (vpp) if (gst_vdp_vpp_is_interlaced (vpp)
&& vpp->method != GST_VDP_DEINTERLACE_METHOD_BOB) { && vpp->method != GST_VDP_DEINTERLACE_METHOD_BOB) {
@ -432,23 +418,53 @@ gst_vdp_vpp_create_mixer (GstVdpVideoPostProcess * vpp, GstVdpDevice * device)
} }
static GstFlowReturn static GstFlowReturn
gst_vdp_vpp_alloc_output_buffer (GstVdpVideoPostProcess * vpp, GstCaps * caps, gst_vdp_vpp_open_device (GstVdpVideoPostProcess * vpp)
GstVdpOutputBuffer ** outbuf)
{ {
GstCaps *src_caps;
GstStructure *src_structure;
GstFlowReturn ret; GstFlowReturn ret;
ret = gst_pad_alloc_buffer_and_set_caps (vpp->srcpad, 0, 0, src_caps = gst_pad_get_allowed_caps (vpp->srcpad);
caps, (GstBuffer **) outbuf); if (G_UNLIKELY (!src_caps))
if (ret != GST_FLOW_OK) return GST_FLOW_NOT_NEGOTIATED;
return ret;
if (!vpp->device) { if (G_UNLIKELY (gst_caps_is_empty (src_caps))) {
ret = gst_vdp_vpp_create_mixer (vpp, (*outbuf)->device); gst_caps_unref (src_caps);
return GST_FLOW_NOT_NEGOTIATED;
} }
if (ret != GST_FLOW_OK) gst_caps_truncate (src_caps);
gst_buffer_unref (GST_BUFFER (*outbuf)); gst_pad_fixate_caps (vpp->srcpad, src_caps);
src_structure = gst_caps_get_structure (src_caps, 0);
if (gst_structure_has_name (src_structure, "video/x-vdpau-output")) {
GstVdpOutputBuffer *outbuf;
/* we allocate a buffer from downstream to get a GstVdpDevice */
ret = gst_pad_alloc_buffer (vpp->srcpad, GST_BUFFER_OFFSET_NONE, 0,
src_caps, (GstBuffer **) & outbuf);
if (ret != GST_FLOW_OK)
goto error;
vpp->device = g_object_ref (outbuf->device);
gst_buffer_unref (GST_BUFFER (outbuf));
} else {
vpp->device = gst_vdp_get_device (vpp->display_name);
if (!vpp->device)
goto device_error;
}
ret = GST_FLOW_OK;
error:
gst_caps_unref (src_caps);
return ret; return ret;
device_error:
GST_ELEMENT_ERROR (vpp, RESOURCE, OPEN_READ,
("Couldn't create GstVdpDevice"), (NULL));
ret = GST_FLOW_ERROR;
goto error;
} }
static gint static gint
@ -502,43 +518,76 @@ gst_vdp_vpp_sink_setcaps (GstPad * pad, GstCaps * caps)
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
GstStructure *structure; GstStructure *structure;
GstCaps *output_caps, *allowed_caps, *src_caps; GstCaps *output_caps, *allowed_caps, *src_caps;
gboolean res; gboolean res = FALSE;
/* extract interlaced flag */ /* extract interlaced flag */
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure (caps, 0);
gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced); gst_structure_get_boolean (structure, "interlaced", &vpp->interlaced);
/* extract par */
if (gst_structure_has_field_typed (structure, "pixel-aspect-ratio",
GST_TYPE_FRACTION)) {
gst_structure_get_fraction (structure, "pixel-aspect-ratio", &vpp->par_n,
&vpp->par_d);
vpp->got_par = TRUE;
} else
vpp->got_par = FALSE;
if (gst_structure_has_name (structure, "video/x-vdpau-video")) {
if (!gst_structure_get_int (structure, "width", &vpp->width) ||
!gst_structure_get_int (structure, "height", &vpp->height) ||
!gst_structure_get_int (structure, "chroma-type",
(gint *) & vpp->chroma_type))
goto done;
output_caps = gst_vdp_video_to_output_caps (caps);
vpp->native_input = TRUE;
} else {
vpp->native_input = FALSE;
if (!gst_vdp_video_buffer_parse_yuv_caps (caps, &vpp->chroma_type,
&vpp->width, &vpp->height))
goto done;
if (!gst_structure_get_fourcc (structure, "format", &vpp->fourcc))
goto done;
output_caps = gst_vdp_yuv_to_output_caps (caps);
}
allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad); allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad);
structure = gst_caps_get_structure (allowed_caps, 0); GST_DEBUG ("allowed_caps: %" GST_PTR_FORMAT, allowed_caps);
output_caps = gst_vdp_video_to_output_caps (caps); if (G_UNLIKELY (!allowed_caps))
goto allowed_caps_error;
if (G_UNLIKELY (gst_caps_is_empty (allowed_caps))) {
gst_caps_unref (allowed_caps);
goto allowed_caps_error;
}
src_caps = gst_caps_intersect (output_caps, allowed_caps); src_caps = gst_caps_intersect (output_caps, allowed_caps);
gst_caps_truncate (src_caps); gst_caps_unref (output_caps);
gst_caps_unref (allowed_caps);
if (gst_caps_is_empty (src_caps)) if (gst_caps_is_empty (src_caps))
goto invalid_caps; goto done;
gst_caps_truncate (src_caps);
gst_pad_fixate_caps (vpp->srcpad, src_caps);
GST_DEBUG ("output_caps: %" GST_PTR_FORMAT " allowed_caps: %" GST_PTR_FORMAT GST_DEBUG ("output_caps: %" GST_PTR_FORMAT " allowed_caps: %" GST_PTR_FORMAT
" src_caps: %" GST_PTR_FORMAT, output_caps, allowed_caps, src_caps); " src_caps: %" GST_PTR_FORMAT, output_caps, allowed_caps, src_caps);
gst_caps_unref (output_caps);
gst_caps_unref (allowed_caps);
if (gst_vdp_vpp_is_interlaced (vpp)) { if (gst_vdp_vpp_is_interlaced (vpp)) {
gint fps_n, fps_d; gint fps_n, fps_d;
structure = gst_caps_get_structure (src_caps, 0); structure = gst_caps_get_structure (src_caps, 0);
if (!gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) { if (gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d)) {
gst_caps_unref (src_caps); gst_fraction_double (&fps_n, &fps_d);
goto invalid_caps; gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n,
fps_d, NULL);
vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
} }
gst_fraction_double (&fps_n, &fps_d);
gst_structure_set (structure, "framerate", GST_TYPE_FRACTION, fps_n, fps_d,
NULL);
gst_structure_remove_field (structure, "interlaced"); gst_structure_remove_field (structure, "interlaced");
vpp->field_duration = gst_util_uint64_scale (GST_SECOND, fps_d, fps_n);
} }
res = gst_pad_set_caps (vpp->srcpad, src_caps); res = gst_pad_set_caps (vpp->srcpad, src_caps);
@ -547,9 +596,8 @@ done:
gst_object_unref (vpp); gst_object_unref (vpp);
return res; return res;
invalid_caps: allowed_caps_error:
GST_ERROR_OBJECT (vpp, "invalid caps: %" GST_PTR_FORMAT, caps); gst_caps_unref (output_caps);
res = FALSE;
goto done; goto done;
} }
@ -620,6 +668,42 @@ gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
gst_vdp_vpp_flush (vpp); gst_vdp_vpp_flush (vpp);
} }
if (!vpp->native_input) {
GstVdpVideoBuffer *video_buf;
if (G_UNLIKELY (!vpp->device)) {
ret = gst_vdp_vpp_open_device (vpp);
if (ret != GST_FLOW_OK)
goto error;
}
video_buf = gst_vdp_video_buffer_new (vpp->device, vpp->chroma_type,
vpp->width, vpp->height);
if (G_UNLIKELY (!video_buf))
goto video_buf_error;
if (!gst_vdp_video_buffer_upload (video_buf, buffer, vpp->fourcc,
vpp->width, vpp->height)) {
gst_buffer_unref (GST_BUFFER (video_buf));
GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
("Couldn't upload YUV data to vdpau"), (NULL));
ret = GST_FLOW_ERROR;
goto error;
}
gst_buffer_copy_metadata (GST_BUFFER (video_buf), buffer,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
gst_buffer_unref (buffer);
buffer = GST_BUFFER (video_buf);
}
if (vpp->mixer == VDP_INVALID_HANDLE) {
ret = gst_vdp_vpp_create_mixer (vpp, vpp->device);
if (ret != GST_FLOW_OK)
goto error;
}
gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer)); gst_vdp_vpp_add_buffer (vpp, GST_VDP_VIDEO_BUFFER (buffer));
while (gst_vdp_vpp_get_next_picture (vpp, while (gst_vdp_vpp_get_next_picture (vpp,
@ -632,28 +716,23 @@ gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
GstVideoRectangle src_r = { 0, } GstVideoRectangle src_r = { 0, }
, dest_r = { , dest_r = {
0,}; 0,};
gint par_n, par_d;
VdpRect rect; VdpRect rect;
GstVdpDevice *device; GstVdpDevice *device;
VdpStatus status; VdpStatus status;
ret = ret =
gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad), gst_pad_alloc_buffer_and_set_caps (vpp->srcpad, GST_BUFFER_OFFSET_NONE,
&outbuf); 0, GST_PAD_CAPS (vpp->srcpad), (GstBuffer **) & outbuf);
if (ret != GST_FLOW_OK) if (ret != GST_FLOW_OK)
break; break;
structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0); src_r.w = vpp->width;
if (!gst_structure_get_int (structure, "width", &src_r.w) || src_r.h = vpp->height;
!gst_structure_get_int (structure, "height", &src_r.h)) if (vpp->got_par) {
goto invalid_caps;
if (gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n,
&par_d)) {
gint new_width; gint new_width;
new_width = gst_util_uint64_scale_int (src_r.w, par_n, par_d); new_width = gst_util_uint64_scale_int (src_r.w, vpp->par_n, vpp->par_d);
src_r.x += (src_r.w - new_width) / 2; src_r.x += (src_r.w - new_width) / 2;
src_r.w = new_width; src_r.w = new_width;
} }
@ -724,6 +803,17 @@ done:
gst_object_unref (vpp); gst_object_unref (vpp);
return ret; return ret;
error:
gst_buffer_unref (buffer);
goto done;
video_buf_error:
gst_buffer_unref (GST_BUFFER (buffer));
GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
("Couldn't create GstVdpVideoBuffer"), (NULL));
ret = GST_FLOW_ERROR;
goto done;
} }
static GstCaps * static GstCaps *
@ -733,10 +823,20 @@ gst_vdp_vpp_sink_getcaps (GstPad * pad)
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
GstCaps *caps; GstCaps *caps;
if (vpp->device) if (vpp->device) {
caps = gst_vdp_video_buffer_get_allowed_video_caps (vpp->device); GstCaps *video_caps, *yuv_caps;
else video_caps = gst_vdp_video_buffer_get_allowed_video_caps (vpp->device);
caps = gst_static_pad_template_get_caps (&sink_template); yuv_caps = gst_vdp_video_buffer_get_allowed_yuv_caps (vpp->device);
gst_caps_append (video_caps, yuv_caps);
caps = video_caps;
} else {
GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vpp);
GstPadTemplate *sink_template;
sink_template = gst_element_class_get_pad_template (element_class, "sink");
caps = gst_caps_copy (gst_pad_template_get_caps (sink_template));
}
GST_DEBUG ("returning caps: %" GST_PTR_FORMAT, caps);
gst_object_unref (vpp); gst_object_unref (vpp);
@ -749,41 +849,34 @@ gst_vdp_vpp_sink_bufferalloc (GstPad * pad, guint64 offset, guint size,
{ {
GstVdpVideoPostProcess *vpp = GstVdpVideoPostProcess *vpp =
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad)); GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
GstVdpOutputBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_ERROR; GstFlowReturn ret = GST_FLOW_ERROR;
GstVdpDevice *device = NULL;
GstStructure *structure; GstStructure *structure;
gint width, height;
gint chroma_type;
if (!vpp->device) {
/* if we haven't got a device yet we must alloc a buffer downstream to get it */
GstCaps *src_caps = gst_pad_get_allowed_caps (vpp->srcpad);
gst_pad_fixate_caps (vpp->srcpad, src_caps);
ret = gst_pad_alloc_buffer (vpp->srcpad, 0, 0, src_caps,
(GstBuffer **) & outbuf);
gst_caps_unref (src_caps);
if (ret != GST_FLOW_OK)
goto error;
device = outbuf->device;
gst_buffer_unref (GST_BUFFER (outbuf));
} else
device = vpp->device;
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure (caps, 0);
if (gst_structure_has_name (structure, "video/x-vdpau-video")) {
gint width, height;
VdpChromaType chroma_type;
if (!gst_structure_get_int (structure, "width", &width) || if (G_UNLIKELY (!vpp->device)) {
!gst_structure_get_int (structure, "height", &height) || ret = gst_vdp_vpp_open_device (vpp);
!gst_structure_get_int (structure, "chroma-type", &chroma_type)) if (ret != GST_FLOW_OK)
goto error; goto done;
}
*buf = GST_BUFFER (gst_vdp_video_buffer_new (device, if (!gst_structure_get_int (structure, "width", &width) ||
chroma_type, width, height)); !gst_structure_get_int (structure, "height", &height) ||
!gst_structure_get_int (structure, "chroma-type",
(gint *) & chroma_type))
goto invalid_caps;
if (*buf == NULL) *buf =
goto error; GST_BUFFER (gst_vdp_video_buffer_new (vpp->device, chroma_type, width,
height));
if (*buf == NULL)
goto video_buffer_error;
} else
*buf = gst_buffer_new_and_alloc (size);
GST_BUFFER_SIZE (*buf) = size; GST_BUFFER_SIZE (*buf) = size;
GST_BUFFER_OFFSET (*buf) = offset; GST_BUFFER_OFFSET (*buf) = offset;
@ -792,10 +885,20 @@ gst_vdp_vpp_sink_bufferalloc (GstPad * pad, guint64 offset, guint size,
ret = GST_FLOW_OK; ret = GST_FLOW_OK;
error: done:
gst_object_unref (vpp); gst_object_unref (vpp);
return ret; return ret;
invalid_caps:
GST_ELEMENT_ERROR (vpp, STREAM, FAILED, ("Invalid caps"), (NULL));
ret = GST_FLOW_ERROR;
goto done;
video_buffer_error:
GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
("Couldn't create GstVdpVideoBuffer"), (NULL));
ret = GST_FLOW_ERROR;
goto done;
} }
static gboolean static gboolean
@ -986,6 +1089,8 @@ static void
gst_vdp_vpp_base_init (gpointer gclass) gst_vdp_vpp_base_init (gpointer gclass)
{ {
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass); GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
GstCaps *caps;
GstPadTemplate *sink_template;
gst_element_class_set_details_simple (element_class, gst_element_class_set_details_simple (element_class,
"VdpauVideoPostProcess", "VdpauVideoPostProcess",
@ -993,8 +1098,11 @@ gst_vdp_vpp_base_init (gpointer gclass)
"Post process GstVdpVideoBuffers and output GstVdpOutputBuffers", "Post process GstVdpVideoBuffers and output GstVdpOutputBuffers",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>"); "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gst_element_class_add_pad_template (element_class, caps = gst_vdp_video_buffer_get_caps (FALSE, 0);
gst_static_pad_template_get (&sink_template)); sink_template = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
caps);
gst_element_class_add_pad_template (element_class, sink_template);
gst_element_class_add_pad_template (element_class, gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template)); gst_static_pad_template_get (&src_template));
} }
@ -1053,8 +1161,12 @@ static void
gst_vdp_vpp_init (GstVdpVideoPostProcess * vpp, gst_vdp_vpp_init (GstVdpVideoPostProcess * vpp,
GstVdpVideoPostProcessClass * gclass) GstVdpVideoPostProcessClass * gclass)
{ {
GstPadTemplate *sink_template;
vpp->device = NULL; vpp->device = NULL;
vpp->display_name = NULL;
vpp->force_aspect_ratio = FALSE; vpp->force_aspect_ratio = FALSE;
vpp->mode = GST_VDP_DEINTERLACE_MODE_AUTO; vpp->mode = GST_VDP_DEINTERLACE_MODE_AUTO;
vpp->method = GST_VDP_DEINTERLACE_METHOD_BOB; vpp->method = GST_VDP_DEINTERLACE_METHOD_BOB;
@ -1067,7 +1179,9 @@ gst_vdp_vpp_init (GstVdpVideoPostProcess * vpp,
gst_element_add_pad (GST_ELEMENT (vpp), vpp->srcpad); gst_element_add_pad (GST_ELEMENT (vpp), vpp->srcpad);
/* SINK PAD */ /* SINK PAD */
vpp->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink"); sink_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (gclass), "sink");
vpp->sinkpad = gst_pad_new_from_template (sink_template, "sink");
gst_element_add_pad (GST_ELEMENT (vpp), vpp->sinkpad); gst_element_add_pad (GST_ELEMENT (vpp), vpp->sinkpad);
gst_pad_set_getcaps_function (vpp->sinkpad, gst_vdp_vpp_sink_getcaps); gst_pad_set_getcaps_function (vpp->sinkpad, gst_vdp_vpp_sink_getcaps);

View file

@ -23,7 +23,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/vdpau/gstvdpdevice.h> #include <gst/vdpau/gstvdpdevice.h>
#include <gst/vdpau/gstvdpvideobuffer.h> #include "gstvdpvideobuffer.h"
G_BEGIN_DECLS G_BEGIN_DECLS
@ -65,7 +65,15 @@ struct _GstVdpVideoPostProcess
GstElement element; GstElement element;
GstPad *sinkpad, *srcpad; GstPad *sinkpad, *srcpad;
gboolean native_input;
VdpChromaType chroma_type;
gint width, height;
guint32 fourcc;
gboolean got_par;
gint par_n, par_d;
gboolean interlaced; gboolean interlaced;
GstClockTime field_duration; GstClockTime field_duration;
@ -82,6 +90,8 @@ struct _GstVdpVideoPostProcess
GstVdpDeinterlaceModes mode; GstVdpDeinterlaceModes mode;
GstVdpDeinterlaceMethods method; GstVdpDeinterlaceMethods method;
/* properties */
gchar *display_name;
gfloat noise_reduction; gfloat noise_reduction;
gfloat sharpening; gfloat sharpening;
gboolean inverse_telecine; gboolean inverse_telecine;