vdpau: don't put the GstVdpDevice in the caps

This commit is contained in:
Carl-Anton Ingmarsson 2009-06-24 11:36:28 +02:00 committed by Jan Schmidt
parent 054840555b
commit 58c8198f4d
6 changed files with 71 additions and 36 deletions

View file

@ -41,6 +41,7 @@
#include <string.h>
#include "mpegutil.h"
#include "gstvdputils.h"
#include "gstvdpmpegdec.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug);
@ -55,8 +56,7 @@ enum
enum
{
PROP_0,
PROP_DISPLAY
PROP_0
};
/* the capabilities of the inputs and outputs.
@ -314,6 +314,38 @@ gst_vdp_mpeg_dec_push_video_buffer (GstVdpMpegDec * mpeg_dec,
return gst_pad_push (mpeg_dec->src, GST_BUFFER (buffer));
}
static GstFlowReturn
gst_vdp_mpeg_dec_alloc_buffer (GstVdpMpegDec * mpeg_dec, GstBuffer ** outbuf)
{
GstFlowReturn ret;
ret = gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
GST_PAD_CAPS (mpeg_dec->src), outbuf);
if (ret != GST_FLOW_OK)
return ret;
if (!mpeg_dec->device) {
GstVdpDevice *device;
VdpStatus status;
GST_WARNING ("ASDASD");
device = mpeg_dec->device =
g_object_ref (GST_VDP_VIDEO_BUFFER (*outbuf)->device);
status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
("Could not create vdpau decoder"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
ret = GST_FLOW_ERROR;
}
}
return ret;
}
static GstFlowReturn
gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
GstClockTime timestamp, gint64 size)
@ -346,26 +378,12 @@ gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
info->backward_reference = VDP_INVALID_HANDLE;
}
if (gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
GST_PAD_CAPS (mpeg_dec->src), &outbuf) != GST_FLOW_OK) {
if (gst_vdp_mpeg_dec_alloc_buffer (mpeg_dec, &outbuf) != GST_FLOW_OK) {
gst_adapter_clear (mpeg_dec->adapter);
return GST_FLOW_ERROR;
}
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
if (mpeg_dec->decoder == VDP_INVALID_HANDLE) {
status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
("Could not create vdpau decoder"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
gst_buffer_unref (outbuf);
return GST_FLOW_ERROR;
}
mpeg_dec->device = g_object_ref (device);
}
if (info->forward_reference != VDP_INVALID_HANDLE &&
info->picture_coding_type != I_FRAME)
@ -1036,10 +1054,6 @@ gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
gobject_class->finalize = gst_vdp_mpeg_dec_finalize;
g_object_class_install_property (gobject_class, PROP_DISPLAY,
g_param_spec_string ("display", "Display", "X Display name",
NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
gstelement_class->change_state = gst_vdp_mpeg_dec_change_state;
}

View file

@ -81,13 +81,11 @@ error:
}
GstCaps *
gst_vdp_video_to_yuv_caps (GstCaps * caps)
gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device)
{
GstCaps *new_caps, *allowed_caps, *result;
gint i;
GstStructure *structure;
const GValue *value;
GstVdpDevice *device = NULL;
new_caps = gst_caps_new_empty ();
@ -115,7 +113,6 @@ gst_vdp_video_to_yuv_caps (GstCaps * caps)
gst_structure_set_name (new_struct, "video/x-raw-yuv");
gst_structure_remove_field (new_struct, "chroma-type");
gst_structure_remove_field (new_struct, "device");
gst_structure_set (new_struct, "format", GST_TYPE_FOURCC,
GPOINTER_TO_INT (iter->data), NULL);
@ -126,10 +123,6 @@ gst_vdp_video_to_yuv_caps (GstCaps * caps)
}
structure = gst_caps_get_structure (caps, 0);
value = gst_structure_get_value (structure, "device");
if (value)
device = g_value_get_object (value);
if (device) {
allowed_caps = gst_vdp_get_allowed_yuv_caps (device);
result = gst_caps_intersect (new_caps, allowed_caps);
@ -211,8 +204,6 @@ gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device)
NULL);
gst_structure_set_name (structure, "video/x-vdpau-video");
if (device)
gst_structure_set (structure, "device", G_TYPE_OBJECT, device, NULL);
}
if (device) {

View file

@ -76,7 +76,7 @@ static const VdpauFormats formats[N_FORMATS] = {
},
};
GstCaps *gst_vdp_video_to_yuv_caps (GstCaps *caps);
GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device);
GstCaps *gst_vdp_video_to_yuv_caps (GstCaps *caps, GstVdpDevice *device);
GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device);
#endif /* _GST_VDP_UTILS_H_ */

View file

@ -325,7 +325,7 @@ gst_vdp_video_yuv_transform_caps (GstBaseTransform * trans,
GstCaps *result;
if (direction == GST_PAD_SINK)
result = gst_vdp_video_to_yuv_caps (caps);
result = gst_vdp_video_to_yuv_caps (caps, video_yuv->device);
else if (direction == GST_PAD_SRC)
result = gst_vdp_yuv_to_video_caps (caps, video_yuv->device);
@ -461,6 +461,7 @@ static void
gst_vdp_video_yuv_class_init (GstVdpVideoYUVClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseTransformClass *trans_class;
gobject_class = (GObjectClass *) klass;

View file

@ -83,6 +83,9 @@ gst_vdp_yuv_video_transform (GstBaseTransform * trans, GstBuffer * inbuf,
VdpVideoSurface surface;
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
if (!yuv_video->device)
yuv_video->device = g_object_ref (device);
surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
switch (yuv_video->format) {
@ -258,12 +261,13 @@ static GstCaps *
gst_vdp_yuv_video_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
GstCaps *result;
if (direction == GST_PAD_SINK) {
result = gst_vdp_yuv_to_video_caps (caps, NULL);
result = gst_vdp_yuv_to_video_caps (caps, yuv_video->device);
} else if (direction == GST_PAD_SRC) {
result = gst_vdp_video_to_yuv_caps (caps);
result = gst_vdp_video_to_yuv_caps (caps, yuv_video->device);
}
GST_LOG ("transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, caps, result);
@ -271,6 +275,27 @@ gst_vdp_yuv_video_transform_caps (GstBaseTransform * trans,
return result;
}
static gboolean
gst_vdp_yuv_video_start (GstBaseTransform * trans)
{
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
yuv_video->device = NULL;
return TRUE;
}
static gboolean
gst_vdp_yuv_video_stop (GstBaseTransform * trans)
{
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
if (yuv_video->device)
g_object_unref (yuv_video->device);
return TRUE;
}
/* GObject vmethod implementations */
static void
@ -299,6 +324,8 @@ gst_vdp_yuv_video_class_init (GstVdpYUVVideoClass * klass)
gobject_class = (GObjectClass *) klass;
trans_class = (GstBaseTransformClass *) klass;
trans_class->start = gst_vdp_yuv_video_start;
trans_class->stop = gst_vdp_yuv_video_stop;
trans_class->transform_caps = gst_vdp_yuv_video_transform_caps;
trans_class->transform_size = gst_vdp_yuv_video_transform_size;
trans_class->set_caps = gst_vdp_yuv_video_set_caps;

View file

@ -41,6 +41,8 @@ typedef struct _GstVdpYUVVideoClass GstVdpYUVVideoClass;
struct _GstVdpYUVVideo {
GstBaseTransform trans;
GstVdpDevice *device;
guint32 format;
gint width, height;
};