mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-23 18:21:04 +00:00
vdpau: make GstVdpVideoYUV implement pad_alloc
also change GstVdpMpegDec and GstVdpYUVVideo to make use of this
This commit is contained in:
parent
ecd81041e0
commit
054840555b
6 changed files with 189 additions and 221 deletions
|
@ -82,10 +82,6 @@ GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec,
|
|||
|
||||
static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info);
|
||||
static void gst_vdp_mpeg_dec_finalize (GObject * object);
|
||||
static void gst_vdp_mpeg_dec_set_property (GObject * object,
|
||||
guint prop_id, const GValue * value, GParamSpec * pspec);
|
||||
static void gst_vdp_mpeg_dec_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec);
|
||||
|
||||
guint8 *
|
||||
mpeg_util_find_start_code (guint32 * sync_word, guint8 * cur, guint8 * end)
|
||||
|
@ -172,10 +168,6 @@ gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps)
|
|||
gboolean res;
|
||||
|
||||
const GValue *value;
|
||||
/* Default to MPEG1 until we find otherwise */
|
||||
VdpDecoderProfile profile = VDP_DECODER_PROFILE_MPEG1;
|
||||
GstVdpDevice *device;
|
||||
VdpStatus status;
|
||||
|
||||
structure = gst_caps_get_structure (caps, 0);
|
||||
|
||||
|
@ -187,7 +179,6 @@ gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps)
|
|||
gst_structure_get_boolean (structure, "interlaced", &interlaced);
|
||||
|
||||
src_caps = gst_caps_new_simple ("video/x-vdpau-video",
|
||||
"device", G_TYPE_OBJECT, mpeg_dec->device,
|
||||
"chroma-type", G_TYPE_INT, VDP_CHROMA_TYPE_420,
|
||||
"width", G_TYPE_INT, width,
|
||||
"height", G_TYPE_INT, height,
|
||||
|
@ -212,6 +203,9 @@ gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps)
|
|||
/* parse caps to setup decoder */
|
||||
gst_structure_get_int (structure, "mpegversion", &mpeg_dec->version);
|
||||
|
||||
/* Default to MPEG1 until we find otherwise */
|
||||
mpeg_dec->profile = VDP_DECODER_PROFILE_MPEG1;
|
||||
|
||||
value = gst_structure_get_value (structure, "codec_data");
|
||||
if (value) {
|
||||
GstBuffer *codec_data, *buf;
|
||||
|
@ -240,10 +234,10 @@ gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps)
|
|||
if (mpeg_dec->version != 1) {
|
||||
switch (ext.profile) {
|
||||
case 5:
|
||||
profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
|
||||
mpeg_dec->profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
|
||||
break;
|
||||
default:
|
||||
profile = VDP_DECODER_PROFILE_MPEG2_MAIN;
|
||||
mpeg_dec->profile = VDP_DECODER_PROFILE_MPEG2_MAIN;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -261,23 +255,6 @@ gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps)
|
|||
}
|
||||
}
|
||||
|
||||
device = mpeg_dec->device;
|
||||
|
||||
if (mpeg_dec->decoder != VDP_INVALID_HANDLE) {
|
||||
device->vdp_decoder_destroy (mpeg_dec->decoder);
|
||||
mpeg_dec->decoder = VDP_INVALID_HANDLE;
|
||||
}
|
||||
|
||||
status = device->vdp_decoder_create (device->device, profile, mpeg_dec->width,
|
||||
mpeg_dec->height, 2, &mpeg_dec->decoder);
|
||||
if (status != VDP_STATUS_OK) {
|
||||
GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
|
||||
("Could not create vdpau decoder"),
|
||||
("Error returned from vdpau was: %s",
|
||||
device->vdp_get_error_string (status)));
|
||||
res = FALSE;
|
||||
goto done;
|
||||
}
|
||||
res = TRUE;
|
||||
|
||||
done:
|
||||
|
@ -343,7 +320,7 @@ gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
|
|||
{
|
||||
VdpPictureInfoMPEG1Or2 *info;
|
||||
GstBuffer *buffer;
|
||||
GstVdpVideoBuffer *outbuf;
|
||||
GstBuffer *outbuf;
|
||||
VdpVideoSurface surface;
|
||||
GstVdpDevice *device;
|
||||
VdpBitstreamBuffer vbit[1];
|
||||
|
@ -351,27 +328,6 @@ gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
|
|||
|
||||
info = &mpeg_dec->vdp_info;
|
||||
|
||||
buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
|
||||
gst_adapter_available (mpeg_dec->adapter));
|
||||
|
||||
outbuf = gst_vdp_video_buffer_new (mpeg_dec->device, VDP_CHROMA_TYPE_420,
|
||||
mpeg_dec->width, mpeg_dec->height);
|
||||
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
|
||||
GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration;
|
||||
GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr;
|
||||
GST_BUFFER_SIZE (outbuf) = size;
|
||||
|
||||
if (info->picture_coding_type == I_FRAME)
|
||||
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||
else
|
||||
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||
|
||||
if (info->top_field_first)
|
||||
GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF);
|
||||
else
|
||||
GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);
|
||||
|
||||
|
||||
if (info->picture_coding_type != B_FRAME) {
|
||||
if (info->backward_reference != VDP_INVALID_HANDLE) {
|
||||
gst_buffer_ref (mpeg_dec->b_buffer);
|
||||
|
@ -390,19 +346,56 @@ gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
|
|||
info->backward_reference = VDP_INVALID_HANDLE;
|
||||
}
|
||||
|
||||
if (gst_pad_alloc_buffer_and_set_caps (mpeg_dec->src, 0, 0,
|
||||
GST_PAD_CAPS (mpeg_dec->src), &outbuf) != GST_FLOW_OK) {
|
||||
gst_adapter_clear (mpeg_dec->adapter);
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
|
||||
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
|
||||
if (mpeg_dec->decoder == VDP_INVALID_HANDLE) {
|
||||
status = device->vdp_decoder_create (device->device, mpeg_dec->profile,
|
||||
mpeg_dec->width, mpeg_dec->height, 2, &mpeg_dec->decoder);
|
||||
if (status != VDP_STATUS_OK) {
|
||||
GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
|
||||
("Could not create vdpau decoder"),
|
||||
("Error returned from vdpau was: %s",
|
||||
device->vdp_get_error_string (status)));
|
||||
gst_buffer_unref (outbuf);
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
mpeg_dec->device = g_object_ref (device);
|
||||
}
|
||||
|
||||
if (info->forward_reference != VDP_INVALID_HANDLE &&
|
||||
info->picture_coding_type != I_FRAME)
|
||||
gst_vdp_video_buffer_add_reference (outbuf,
|
||||
gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf),
|
||||
GST_VDP_VIDEO_BUFFER (mpeg_dec->f_buffer));
|
||||
|
||||
if (info->backward_reference != VDP_INVALID_HANDLE
|
||||
&& info->picture_coding_type == B_FRAME)
|
||||
gst_vdp_video_buffer_add_reference (outbuf,
|
||||
gst_vdp_video_buffer_add_reference (GST_VDP_VIDEO_BUFFER (outbuf),
|
||||
GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));
|
||||
|
||||
surface = outbuf->surface;
|
||||
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
|
||||
GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration;
|
||||
GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr;
|
||||
GST_BUFFER_SIZE (outbuf) = size;
|
||||
|
||||
device = mpeg_dec->device;
|
||||
if (info->picture_coding_type == I_FRAME)
|
||||
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||
else
|
||||
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||
|
||||
if (info->top_field_first)
|
||||
GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF);
|
||||
else
|
||||
GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);
|
||||
|
||||
buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
|
||||
gst_adapter_available (mpeg_dec->adapter));
|
||||
|
||||
surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
|
||||
|
||||
vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
|
||||
vbit[0].bitstream = GST_BUFFER_DATA (buffer);
|
||||
|
@ -601,12 +594,13 @@ gst_vdp_mpeg_dec_reset (GstVdpMpegDec * mpeg_dec)
|
|||
{
|
||||
gst_vdp_mpeg_dec_flush (mpeg_dec);
|
||||
|
||||
if (mpeg_dec->decoder != VDP_INVALID_HANDLE)
|
||||
mpeg_dec->device->vdp_decoder_destroy (mpeg_dec->decoder);
|
||||
mpeg_dec->decoder = VDP_INVALID_HANDLE;
|
||||
if (mpeg_dec->device)
|
||||
if (mpeg_dec->device) {
|
||||
if (mpeg_dec->decoder != VDP_INVALID_HANDLE)
|
||||
mpeg_dec->device->vdp_decoder_destroy (mpeg_dec->decoder);
|
||||
mpeg_dec->decoder = VDP_INVALID_HANDLE;
|
||||
|
||||
g_object_unref (mpeg_dec->device);
|
||||
mpeg_dec->device = NULL;
|
||||
}
|
||||
|
||||
mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_SEQUENCE;
|
||||
|
||||
|
@ -998,14 +992,6 @@ gst_vdp_mpeg_dec_change_state (GstElement * element, GstStateChange transition)
|
|||
|
||||
mpeg_dec = GST_VDP_MPEG_DEC (element);
|
||||
|
||||
switch (transition) {
|
||||
case GST_STATE_CHANGE_READY_TO_PAUSED:
|
||||
mpeg_dec->device = gst_vdp_get_device (mpeg_dec->display_name);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
|
||||
|
||||
switch (transition) {
|
||||
|
@ -1049,15 +1035,12 @@ gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
|
|||
gstelement_class = (GstElementClass *) klass;
|
||||
|
||||
gobject_class->finalize = gst_vdp_mpeg_dec_finalize;
|
||||
gobject_class->set_property = gst_vdp_mpeg_dec_set_property;
|
||||
gobject_class->get_property = gst_vdp_mpeg_dec_get_property;
|
||||
|
||||
gstelement_class->change_state =
|
||||
GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_change_state);
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_DISPLAY,
|
||||
g_param_spec_string ("display", "Display", "X Display name",
|
||||
NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
|
||||
|
||||
gstelement_class->change_state = gst_vdp_mpeg_dec_change_state;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -1099,7 +1082,6 @@ gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass)
|
|||
GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_sink_event));
|
||||
gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->sink);
|
||||
|
||||
mpeg_dec->display_name = NULL;
|
||||
mpeg_dec->adapter = gst_adapter_new ();
|
||||
|
||||
mpeg_dec->device = NULL;
|
||||
|
@ -1120,36 +1102,3 @@ gst_vdp_mpeg_dec_finalize (GObject * object)
|
|||
g_object_unref (mpeg_dec->adapter);
|
||||
g_mutex_free (mpeg_dec->mutex);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_mpeg_dec_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DISPLAY:
|
||||
g_free (mpeg_dec->display_name);
|
||||
mpeg_dec->display_name = g_value_dup_string (value);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_mpeg_dec_get_property (GObject * object, guint prop_id,
|
||||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DISPLAY:
|
||||
g_value_set_string (value, mpeg_dec->display_name);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -52,8 +52,8 @@ struct _GstVdpMpegDec
|
|||
GstPad *src;
|
||||
GstPad *sink;
|
||||
|
||||
gchar *display_name;
|
||||
GstVdpDevice *device;
|
||||
VdpDecoderProfile profile;
|
||||
VdpDecoder decoder;
|
||||
|
||||
/* stream info */
|
||||
|
|
|
@ -41,7 +41,8 @@ enum
|
|||
|
||||
enum
|
||||
{
|
||||
PROP_0
|
||||
PROP_0,
|
||||
PROP_DISPLAY
|
||||
};
|
||||
|
||||
static GstStaticPadTemplate sink_template =
|
||||
|
@ -320,19 +321,82 @@ GstCaps *
|
|||
gst_vdp_video_yuv_transform_caps (GstBaseTransform * trans,
|
||||
GstPadDirection direction, GstCaps * caps)
|
||||
{
|
||||
GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans);
|
||||
GstCaps *result;
|
||||
|
||||
if (direction == GST_PAD_SINK)
|
||||
result = gst_vdp_video_to_yuv_caps (caps);
|
||||
|
||||
else if (direction == GST_PAD_SRC)
|
||||
result = gst_vdp_yuv_to_video_caps (caps, NULL);
|
||||
result = gst_vdp_yuv_to_video_caps (caps, video_yuv->device);
|
||||
|
||||
GST_LOG ("transformed %" GST_PTR_FORMAT " to %" GST_PTR_FORMAT, caps, result);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_vdp_video_yuv_start (GstBaseTransform * trans)
|
||||
{
|
||||
GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans);
|
||||
|
||||
video_yuv->device = gst_vdp_get_device (video_yuv->display);
|
||||
if (!video_yuv->device)
|
||||
return FALSE;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_vdp_video_yuv_stop (GstBaseTransform * trans)
|
||||
{
|
||||
GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans);
|
||||
|
||||
g_object_unref (video_yuv->device);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_vdp_video_yuv_buffer_alloc (GstPad * pad, guint64 offset,
|
||||
guint size, GstCaps * caps, GstBuffer ** buf)
|
||||
{
|
||||
GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (gst_pad_get_parent (pad));
|
||||
GstFlowReturn ret = GST_FLOW_ERROR;
|
||||
GstStructure *structure;
|
||||
gint width, height;
|
||||
gint chroma_type;
|
||||
|
||||
structure = gst_caps_get_structure (caps, 0);
|
||||
if (!structure)
|
||||
goto error;
|
||||
|
||||
if (!gst_structure_get_int (structure, "width", &width))
|
||||
goto error;
|
||||
if (!gst_structure_get_int (structure, "height", &height))
|
||||
goto error;
|
||||
|
||||
if (!gst_structure_get_int (structure, "chroma-type", &chroma_type))
|
||||
goto error;
|
||||
|
||||
*buf = GST_BUFFER (gst_vdp_video_buffer_new (video_yuv->device,
|
||||
chroma_type, width, height));
|
||||
|
||||
if (*buf == NULL)
|
||||
goto error;
|
||||
|
||||
GST_BUFFER_SIZE (*buf) = size;
|
||||
GST_BUFFER_OFFSET (*buf) = offset;
|
||||
|
||||
gst_buffer_set_caps (*buf, caps);
|
||||
|
||||
ret = GST_FLOW_OK;
|
||||
|
||||
error:
|
||||
gst_object_unref (video_yuv);
|
||||
return ret;
|
||||
}
|
||||
|
||||
/* GObject vmethod implementations */
|
||||
|
||||
static void
|
||||
|
@ -352,22 +416,77 @@ gst_vdp_video_yuv_base_init (gpointer klass)
|
|||
gst_static_pad_template_get (&src_template));
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_video_yuv_finalize (GObject * object)
|
||||
{
|
||||
GstVdpVideoYUV *video_yuv = (GstVdpVideoYUV *) object;
|
||||
|
||||
g_free (video_yuv->display);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_video_yuv_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DISPLAY:
|
||||
g_free (video_yuv->display);
|
||||
video_yuv->display = g_value_dup_string (value);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_video_yuv_get_property (GObject * object, guint prop_id,
|
||||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DISPLAY:
|
||||
g_value_set_string (value, video_yuv->display);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_video_yuv_class_init (GstVdpVideoYUVClass * klass)
|
||||
{
|
||||
GObjectClass *gobject_class;
|
||||
GstBaseTransformClass *transform_class;
|
||||
GstBaseTransformClass *trans_class;
|
||||
|
||||
gobject_class = (GObjectClass *) klass;
|
||||
transform_class = (GstBaseTransformClass *) klass;
|
||||
trans_class = (GstBaseTransformClass *) klass;
|
||||
|
||||
transform_class->transform_caps = gst_vdp_video_yuv_transform_caps;
|
||||
transform_class->transform_size = gst_vdp_video_transform_size;
|
||||
transform_class->transform = gst_vdp_video_yuv_transform;
|
||||
transform_class->set_caps = gst_vdp_video_yuv_set_caps;
|
||||
gobject_class->finalize = gst_vdp_video_yuv_finalize;
|
||||
gobject_class->set_property = gst_vdp_video_yuv_set_property;
|
||||
gobject_class->get_property = gst_vdp_video_yuv_get_property;
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_DISPLAY,
|
||||
g_param_spec_string ("display", "Display", "X Display name",
|
||||
NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
|
||||
|
||||
trans_class->start = gst_vdp_video_yuv_start;
|
||||
trans_class->stop = gst_vdp_video_yuv_stop;
|
||||
trans_class->transform_caps = gst_vdp_video_yuv_transform_caps;
|
||||
trans_class->transform_size = gst_vdp_video_transform_size;
|
||||
trans_class->transform = gst_vdp_video_yuv_transform;
|
||||
trans_class->set_caps = gst_vdp_video_yuv_set_caps;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_video_yuv_init (GstVdpVideoYUV * video_yuv, GstVdpVideoYUVClass * klass)
|
||||
{
|
||||
video_yuv->display = NULL;
|
||||
|
||||
gst_pad_set_bufferalloc_function (GST_BASE_TRANSFORM_SINK_PAD (video_yuv),
|
||||
gst_vdp_video_yuv_buffer_alloc);
|
||||
}
|
||||
|
|
|
@ -40,6 +40,9 @@ typedef struct _GstVdpVideoYUVClass GstVdpVideoYUVClass;
|
|||
|
||||
struct _GstVdpVideoYUV {
|
||||
GstBaseTransform transform;
|
||||
|
||||
gchar *display;
|
||||
GstVdpDevice *device;
|
||||
|
||||
gint width, height;
|
||||
guint format;
|
||||
|
|
|
@ -41,8 +41,7 @@ enum
|
|||
|
||||
enum
|
||||
{
|
||||
PROP_0,
|
||||
PROP_DISPLAY
|
||||
PROP_0
|
||||
};
|
||||
|
||||
static GstStaticPadTemplate sink_template =
|
||||
|
@ -65,23 +64,6 @@ GST_STATIC_PAD_TEMPLATE (GST_BASE_TRANSFORM_SRC_NAME,
|
|||
GST_BOILERPLATE_FULL (GstVdpYUVVideo, gst_vdp_yuv_video, GstBaseTransform,
|
||||
GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
|
||||
|
||||
static GstFlowReturn
|
||||
gst_vdp_yuv_video_prepare_output_buffer (GstBaseTransform * trans,
|
||||
GstBuffer * input, gint size, GstCaps * caps, GstBuffer ** buf)
|
||||
{
|
||||
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
|
||||
|
||||
*buf = GST_BUFFER (gst_vdp_video_buffer_new (yuv_video->device,
|
||||
yuv_video->chroma_type, yuv_video->width, yuv_video->height));
|
||||
|
||||
if (*buf == NULL)
|
||||
return GST_FLOW_ERROR;
|
||||
|
||||
gst_buffer_set_caps (*buf, caps);
|
||||
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_vdp_yuv_video_transform_size (GstBaseTransform * trans,
|
||||
GstPadDirection direction, GstCaps * caps, guint size,
|
||||
|
@ -100,7 +82,7 @@ gst_vdp_yuv_video_transform (GstBaseTransform * trans, GstBuffer * inbuf,
|
|||
GstVdpDevice *device;
|
||||
VdpVideoSurface surface;
|
||||
|
||||
device = yuv_video->device;
|
||||
device = GST_VDP_VIDEO_BUFFER (outbuf)->device;
|
||||
surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
|
||||
|
||||
switch (yuv_video->format) {
|
||||
|
@ -269,11 +251,6 @@ gst_vdp_yuv_video_set_caps (GstBaseTransform * trans, GstCaps * incaps,
|
|||
if (!gst_structure_get_fourcc (structure, "format", &yuv_video->format))
|
||||
return FALSE;
|
||||
|
||||
structure = gst_caps_get_structure (outcaps, 0);
|
||||
if (!gst_structure_get_int (structure, "chroma-type",
|
||||
&yuv_video->chroma_type))
|
||||
return FALSE;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
|
@ -281,11 +258,10 @@ static GstCaps *
|
|||
gst_vdp_yuv_video_transform_caps (GstBaseTransform * trans,
|
||||
GstPadDirection direction, GstCaps * caps)
|
||||
{
|
||||
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
|
||||
GstCaps *result;
|
||||
|
||||
if (direction == GST_PAD_SINK) {
|
||||
result = gst_vdp_yuv_to_video_caps (caps, yuv_video->device);
|
||||
result = gst_vdp_yuv_to_video_caps (caps, NULL);
|
||||
} else if (direction == GST_PAD_SRC) {
|
||||
result = gst_vdp_video_to_yuv_caps (caps);
|
||||
}
|
||||
|
@ -295,28 +271,6 @@ gst_vdp_yuv_video_transform_caps (GstBaseTransform * trans,
|
|||
return result;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_vdp_yuv_video_start (GstBaseTransform * trans)
|
||||
{
|
||||
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
|
||||
|
||||
yuv_video->device = gst_vdp_get_device (yuv_video->display);
|
||||
if (!yuv_video->device)
|
||||
return FALSE;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_vdp_yuv_video_stop (GstBaseTransform * trans)
|
||||
{
|
||||
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
|
||||
|
||||
g_object_unref (yuv_video->device);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
/* GObject vmethod implementations */
|
||||
|
||||
static void
|
||||
|
@ -336,47 +290,6 @@ gst_vdp_yuv_video_base_init (gpointer klass)
|
|||
gst_static_pad_template_get (&src_template));
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_yuv_video_finalize (GObject * object)
|
||||
{
|
||||
GstVdpYUVVideo *yuv_video = (GstVdpYUVVideo *) object;
|
||||
|
||||
g_free (yuv_video->display);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_yuv_video_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DISPLAY:
|
||||
g_free (yuv_video->display);
|
||||
yuv_video->display = g_value_dup_string (value);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_yuv_video_get_property (GObject * object, guint prop_id,
|
||||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_DISPLAY:
|
||||
g_value_set_string (value, yuv_video->display);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_yuv_video_class_init (GstVdpYUVVideoClass * klass)
|
||||
{
|
||||
|
@ -386,25 +299,13 @@ gst_vdp_yuv_video_class_init (GstVdpYUVVideoClass * klass)
|
|||
gobject_class = (GObjectClass *) klass;
|
||||
trans_class = (GstBaseTransformClass *) klass;
|
||||
|
||||
gobject_class->finalize = gst_vdp_yuv_video_finalize;
|
||||
gobject_class->set_property = gst_vdp_yuv_video_set_property;
|
||||
gobject_class->get_property = gst_vdp_yuv_video_get_property;
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_DISPLAY,
|
||||
g_param_spec_string ("display", "Display", "X Display name",
|
||||
NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
|
||||
|
||||
trans_class->start = gst_vdp_yuv_video_start;
|
||||
trans_class->stop = gst_vdp_yuv_video_stop;
|
||||
trans_class->transform_caps = gst_vdp_yuv_video_transform_caps;
|
||||
trans_class->transform_size = gst_vdp_yuv_video_transform_size;
|
||||
trans_class->set_caps = gst_vdp_yuv_video_set_caps;
|
||||
trans_class->transform = gst_vdp_yuv_video_transform;
|
||||
trans_class->prepare_output_buffer = gst_vdp_yuv_video_prepare_output_buffer;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_vdp_yuv_video_init (GstVdpYUVVideo * yuv_video, GstVdpYUVVideoClass * klass)
|
||||
{
|
||||
yuv_video->display = NULL;
|
||||
}
|
||||
|
|
|
@ -41,11 +41,7 @@ typedef struct _GstVdpYUVVideoClass GstVdpYUVVideoClass;
|
|||
struct _GstVdpYUVVideo {
|
||||
GstBaseTransform trans;
|
||||
|
||||
gchar *display;
|
||||
GstVdpDevice *device;
|
||||
|
||||
guint32 format;
|
||||
gint chroma_type;
|
||||
gint width, height;
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in a new issue