vdpau: add new GstVdpauVideoYUV element

GstVdpauDecoder now pushes GstVdpauVideoBuffers instead of doing
VdpSurface -> YUV conversion. To get YUV data you now put in a GstVdpauVideoYUV
element which takes GstVdpauVideoBuffers and outputs YUV data.
This commit is contained in:
Carl-Anton Ingmarsson 2009-04-04 20:53:35 +02:00 committed by Jan Schmidt
parent 5e73b7272b
commit 85158b07ea
9 changed files with 721 additions and 306 deletions

View file

@ -4,7 +4,9 @@ libgstvdpau_la_SOURCES = \
gstvdpaudevice.c \ gstvdpaudevice.c \
gstvdpaudecoder.c \ gstvdpaudecoder.c \
gstvdpaumpegdecoder.c \ gstvdpaumpegdecoder.c \
mpegutil.c mpegutil.c \
gstvdpauvideoyuv.c \
gstvdpauvideobuffer.c
libgstvdpau_la_CFLAGS = $(GST_CFLAGS) $(X11_CFLAGS) -Ivdpau libgstvdpau_la_CFLAGS = $(GST_CFLAGS) $(X11_CFLAGS) -Ivdpau
libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \ libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
@ -16,6 +18,8 @@ noinst_HEADERS = \
gstvdpaudevice.h \ gstvdpaudevice.h \
gstvdpaudecoder.h \ gstvdpaudecoder.h \
gstvdpaumpegdecoder.h \ gstvdpaumpegdecoder.h \
gstvdpauvideoyuv.h \
gstvdpauvideobuffer.h \
mpegutil.h mpegutil.h

View file

@ -25,6 +25,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h> #include <gst/video/video.h>
#include "gstvdpauvideobuffer.h"
#include "gstvdpaudecoder.h" #include "gstvdpaudecoder.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdpau_decoder_debug); GST_DEBUG_CATEGORY_STATIC (gst_vdpau_decoder_debug);
@ -47,9 +48,7 @@ enum
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src", static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv, " GST_STATIC_CAPS ("video/vdpau-video, " "chroma-type = (int) 0"));
"framerate = (fraction) [ 0, MAX ], "
"width = (int) [ 1, MAX ], " "height = (int) [ 1, MAX ]"));
#define DEBUG_INIT(bla) \ #define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdpau_decoder_debug, "vdpaudecoder", 0, "vdpaudecoder base class"); GST_DEBUG_CATEGORY_INIT (gst_vdpau_decoder_debug, "vdpaudecoder", 0, "vdpaudecoder base class");
@ -64,100 +63,9 @@ static void gst_vdpau_decoder_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec); GValue * value, GParamSpec * pspec);
GstFlowReturn GstFlowReturn
gst_vdpau_decoder_push_video_surface (GstVdpauDecoder * dec, gst_vdpau_decoder_push_video_buffer (GstVdpauDecoder * dec,
VdpVideoSurface surface) GstVdpauVideoBuffer * buffer)
{ {
GstVdpauDevice *device;
GstBuffer *buffer;
device = dec->device;
switch (dec->format) {
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
{
gint size;
GstFlowReturn result;
VdpStatus status;
guint8 *data[3];
guint32 stride[3];
size =
gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, dec->width,
dec->height);
result =
gst_pad_alloc_buffer_and_set_caps (dec->src, GST_BUFFER_OFFSET_NONE,
size, GST_PAD_CAPS (dec->src), &buffer);
if (G_UNLIKELY (result != GST_FLOW_OK))
return result;
data[0] = GST_BUFFER_DATA (buffer) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
0, dec->width, dec->height);
data[1] = data[0] +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
2, dec->width, dec->height);
data[2] = data[0] +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
1, dec->width, dec->height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
0, dec->width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
2, dec->width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
1, dec->width);
status =
device->vdp_video_surface_get_bits_ycbcr (surface,
VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Couldn't get data from vdpau"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
}
break;
}
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
{
gint size;
GstFlowReturn result;
VdpStatus status;
guint8 *data[2];
guint32 stride[2];
size = dec->width * dec->height + dec->width * dec->height / 2;
result =
gst_pad_alloc_buffer_and_set_caps (dec->src, GST_BUFFER_OFFSET_NONE,
size, GST_PAD_CAPS (dec->src), &buffer);
if (G_UNLIKELY (result != GST_FLOW_OK))
return result;
data[0] = GST_BUFFER_DATA (buffer);
data[1] = data[0] + dec->width * dec->height;
stride[0] = dec->width;
stride[1] = dec->width;
status =
device->vdp_video_surface_get_bits_ycbcr (surface,
VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Couldn't get data from vdpau"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
}
break;
}
default:
break;
}
GST_BUFFER_TIMESTAMP (buffer) = GST_BUFFER_TIMESTAMP (buffer) =
gst_util_uint64_scale_int (GST_SECOND * dec->frame_nr, gst_util_uint64_scale_int (GST_SECOND * dec->frame_nr,
dec->framerate_denominator, dec->framerate_numerator); dec->framerate_denominator, dec->framerate_numerator);
@ -167,167 +75,9 @@ gst_vdpau_decoder_push_video_surface (GstVdpauDecoder * dec,
GST_BUFFER_OFFSET (buffer) = dec->frame_nr; GST_BUFFER_OFFSET (buffer) = dec->frame_nr;
dec->frame_nr++; dec->frame_nr++;
GST_BUFFER_OFFSET_END (buffer) = dec->frame_nr; GST_BUFFER_OFFSET_END (buffer) = dec->frame_nr;
gst_buffer_set_caps (GST_BUFFER (buffer), GST_PAD_CAPS (dec->src));
return gst_pad_push (dec->src, buffer); return gst_pad_push (dec->src, GST_BUFFER (buffer));
}
typedef struct
{
VdpChromaType chroma_type;
VdpYCbCrFormat format;
guint32 fourcc;
} VdpauFormats;
static VdpChromaType chroma_types[3] =
{ VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 };
static VdpauFormats formats[6] = {
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_NV12,
GST_MAKE_FOURCC ('N', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_UYVY,
GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_V8U8Y8A8,
GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_Y8U8V8A8,
GST_MAKE_FOURCC ('A', 'V', 'U', 'Y')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_YUYV,
GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('Y', 'V', '1', '2')
}
};
VdpVideoSurface
gst_vdpau_decoder_create_video_surface (GstVdpauDecoder * dec)
{
GstVdpauDevice *device;
VdpChromaType chroma_type;
gint i;
VdpStatus status;
VdpVideoSurface surface;
device = dec->device;
chroma_type = VDP_CHROMA_TYPE_422;
for (i = 0; i < 6; i++) {
if (formats[i].fourcc == dec->format) {
chroma_type = formats[i].chroma_type;
break;
}
}
status =
device->vdp_video_surface_create (device->device, chroma_type, dec->width,
dec->height, &surface);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Couldn't create a VdpVideoSurface"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return VDP_INVALID_HANDLE;
}
return surface;
}
static GstCaps *
gst_vdpau_decoder_get_vdpau_support (GstVdpauDecoder * dec)
{
GstVdpauDevice *device;
GstCaps *caps;
gint i;
device = dec->device;
caps = gst_caps_new_empty ();
for (i = 0; i < 3; i++) {
VdpStatus status;
VdpBool is_supported;
guint32 max_w, max_h;
status =
device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Could not get query VDPAU video surface capabilites"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return NULL;
}
if (is_supported) {
gint j;
for (j = 0; j < 6; j++) {
if (formats[j].chroma_type != chroma_types[i])
continue;
status =
device->vdp_video_surface_query_ycbcr_capabilities (device->device,
formats[j].chroma_type, formats[j].format, &is_supported);
if (status != VDP_STATUS_OK
&& status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
GST_ELEMENT_ERROR (dec, RESOURCE, READ,
("Could not query VDPAU YCbCr capabilites"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return NULL;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, formats[j].fourcc,
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
gst_caps_append (caps, format_caps);
}
}
}
}
if (gst_caps_is_empty (caps)) {
gst_caps_unref (caps);
return NULL;
}
return caps;
}
static gboolean
gst_vdpau_decoder_init_vdpau (GstVdpauDecoder * dec)
{
GstCaps *caps;
dec->device = gst_vdpau_get_device (dec->display_name);
caps = gst_vdpau_decoder_get_vdpau_support (dec);
if (!caps)
return FALSE;
dec->src_caps = caps;
return TRUE;
} }
static GstStateChangeReturn static GstStateChangeReturn
@ -339,8 +89,7 @@ gst_vdpau_decoder_change_state (GstElement * element, GstStateChange transition)
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY: case GST_STATE_CHANGE_NULL_TO_READY:
if (!gst_vdpau_decoder_init_vdpau (dec)) dec->device = gst_vdpau_get_device (dec->display_name);
return GST_STATE_CHANGE_FAILURE;
break; break;
case GST_STATE_CHANGE_READY_TO_NULL: case GST_STATE_CHANGE_READY_TO_NULL:
g_object_unref (dec->device); g_object_unref (dec->device);
@ -364,7 +113,6 @@ gst_vdpau_decoder_sink_set_caps (GstPad * pad, GstCaps * caps)
gint width, height; gint width, height;
gint framerate_numerator, framerate_denominator; gint framerate_numerator, framerate_denominator;
gint par_numerator, par_denominator; gint par_numerator, par_denominator;
guint32 fourcc_format;
gboolean res; gboolean res;
structure = gst_caps_get_structure (caps, 0); structure = gst_caps_get_structure (caps, 0);
@ -382,8 +130,9 @@ gst_vdpau_decoder_sink_set_caps (GstPad * pad, GstCaps * caps)
new_caps = gst_caps_copy_nth (src_caps, 0); new_caps = gst_caps_copy_nth (src_caps, 0);
gst_caps_unref (src_caps); gst_caps_unref (src_caps);
structure = gst_caps_get_structure (new_caps, 0); structure = gst_caps_get_structure (new_caps, 0);
gst_structure_get_fourcc (structure, "format", &fourcc_format);
gst_structure_set (structure, gst_structure_set (structure,
"device", G_TYPE_OBJECT, dec->device,
"chroma-type", G_TYPE_INT, VDP_CHROMA_TYPE_420,
"width", G_TYPE_INT, width, "width", G_TYPE_INT, width,
"height", G_TYPE_INT, height, "height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_numerator, "framerate", GST_TYPE_FRACTION, framerate_numerator,
@ -403,7 +152,6 @@ gst_vdpau_decoder_sink_set_caps (GstPad * pad, GstCaps * caps)
dec->height = height; dec->height = height;
dec->framerate_numerator = framerate_numerator; dec->framerate_numerator = framerate_numerator;
dec->framerate_denominator = framerate_denominator; dec->framerate_denominator = framerate_denominator;
dec->format = fourcc_format;
if (dec_class->set_caps && !dec_class->set_caps (dec, caps)) if (dec_class->set_caps && !dec_class->set_caps (dec, caps))
return FALSE; return FALSE;
@ -411,22 +159,6 @@ gst_vdpau_decoder_sink_set_caps (GstPad * pad, GstCaps * caps)
return TRUE; return TRUE;
} }
static GstCaps *
gst_vdpau_decoder_src_getcaps (GstPad * pad)
{
GstVdpauDecoder *dec;
dec = GST_VDPAU_DECODER (GST_OBJECT_PARENT (pad));
if (GST_PAD_CAPS (dec->src))
return gst_caps_ref (GST_PAD_CAPS (dec->src));
if (dec->src_caps)
return gst_caps_ref (dec->src_caps);
return gst_caps_copy (gst_pad_get_pad_template_caps (dec->src));
}
/* GObject vmethod implementations */ /* GObject vmethod implementations */
static void static void
@ -475,18 +207,15 @@ gst_vdpau_decoder_init (GstVdpauDecoder * dec, GstVdpauDecoderClass * klass)
dec->display_name = NULL; dec->display_name = NULL;
dec->device = NULL; dec->device = NULL;
dec->silent = FALSE; dec->silent = FALSE;
dec->src_caps = NULL;
dec->height = 0; dec->height = 0;
dec->width = 0; dec->width = 0;
dec->framerate_numerator = 0; dec->framerate_numerator = 0;
dec->framerate_denominator = 0; dec->framerate_denominator = 0;
dec->format = 0;
dec->frame_nr = 0; dec->frame_nr = 0;
dec->src = gst_pad_new_from_static_template (&src_template, "src"); dec->src = gst_pad_new_from_static_template (&src_template, "src");
gst_pad_set_getcaps_function (dec->src, gst_vdpau_decoder_src_getcaps);
gst_element_add_pad (GST_ELEMENT (dec), dec->src); gst_element_add_pad (GST_ELEMENT (dec), dec->src);
dec->sink = gst_pad_new_from_template (gst_element_class_get_pad_template dec->sink = gst_pad_new_from_template (gst_element_class_get_pad_template
@ -501,8 +230,6 @@ gst_vdpau_decoder_finalize (GObject * object)
{ {
GstVdpauDecoder *dec = (GstVdpauDecoder *) object; GstVdpauDecoder *dec = (GstVdpauDecoder *) object;
if (dec->src_caps)
g_object_unref (dec->src_caps);
if (dec->device) if (dec->device)
g_object_unref (dec->device); g_object_unref (dec->device);

View file

@ -47,8 +47,6 @@ struct _GstVdpauDecoder {
GstPad *src; GstPad *src;
GstPad *sink; GstPad *sink;
GstCaps *src_caps;
gint width, height; gint width, height;
gint framerate_numerator, framerate_denominator; gint framerate_numerator, framerate_denominator;
guint32 format; guint32 format;
@ -66,8 +64,8 @@ struct _GstVdpauDecoderClass {
GType gst_vdpau_decoder_get_type (void); GType gst_vdpau_decoder_get_type (void);
gboolean gst_vdpau_decoder_push_video_surface (GstVdpauDecoder * dec, gboolean gst_vdpau_decoder_push_video_buffer (GstVdpauDecoder * dec,
VdpVideoSurface surface); GstVdpauVideoBuffer *buffer);
VdpVideoSurface gst_vdpau_decoder_create_video_surface (GstVdpauDecoder *dec); VdpVideoSurface gst_vdpau_decoder_create_video_surface (GstVdpauDecoder *dec);
G_END_DECLS G_END_DECLS

View file

@ -38,6 +38,8 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <string.h> #include <string.h>
#include "gstvdpauvideobuffer.h"
#include "gstvdpauvideoyuv.h"
#include "mpegutil.h" #include "mpegutil.h"
#include "gstvdpaumpegdecoder.h" #include "gstvdpaumpegdecoder.h"
@ -133,19 +135,20 @@ gst_vdpau_mpeg_decoder_decode (GstVdpauMpegDecoder * mpeg_dec)
{ {
GstVdpauDecoder *dec; GstVdpauDecoder *dec;
GstBuffer *buffer; GstBuffer *buffer;
GstVdpauVideoBuffer *outbuf;
VdpVideoSurface surface; VdpVideoSurface surface;
GstVdpauDevice *device; GstVdpauDevice *device;
VdpBitstreamBuffer vbit[1]; VdpBitstreamBuffer vbit[1];
VdpStatus status; VdpStatus status;
GstFlowReturn ret;
dec = GST_VDPAU_DECODER (mpeg_dec); dec = GST_VDPAU_DECODER (mpeg_dec);
buffer = gst_adapter_take_buffer (mpeg_dec->adapter, buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
gst_adapter_available (mpeg_dec->adapter)); gst_adapter_available (mpeg_dec->adapter));
surface = outbuf = gst_vdpau_video_buffer_new (dec->device, VDP_CHROMA_TYPE_420,
gst_vdpau_decoder_create_video_surface (GST_VDPAU_DECODER (mpeg_dec)); dec->width, dec->height);
surface = outbuf->surface;
device = dec->device; device = dec->device;
@ -165,23 +168,23 @@ gst_vdpau_mpeg_decoder_decode (GstVdpauMpegDecoder * mpeg_dec)
device->vdp_get_error_string (status))); device->vdp_get_error_string (status)));
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
device->vdp_video_surface_destroy (mpeg_dec->vdp_info.forward_reference); gst_buffer_unref (mpeg_dec->f_buffer);
device->vdp_video_surface_destroy (surface); gst_buffer_unref (GST_BUFFER (outbuf));
return GST_FLOW_ERROR; return GST_FLOW_ERROR;
} }
ret = gst_buffer_ref (GST_BUFFER (outbuf));
gst_vdpau_decoder_push_video_surface (GST_VDPAU_DECODER (mpeg_dec),
surface);
if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
device->vdp_video_surface_destroy (mpeg_dec->vdp_info.forward_reference); gst_buffer_unref (mpeg_dec->f_buffer);
mpeg_dec->vdp_info.forward_reference = surface; mpeg_dec->vdp_info.forward_reference = surface;
mpeg_dec->f_buffer = GST_BUFFER (outbuf);
return ret; return gst_vdpau_decoder_push_video_buffer (GST_VDPAU_DECODER (mpeg_dec),
outbuf);
} }
static gboolean static gboolean
@ -228,8 +231,7 @@ gst_vdpau_mpeg_decoder_parse_picture (GstVdpauMpegDecoder * mpeg_dec,
if (pic_hdr.pic_type == I_FRAME && if (pic_hdr.pic_type == I_FRAME &&
mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) { mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE) {
dec->device->vdp_video_surface_destroy (mpeg_dec-> gst_buffer_unref (mpeg_dec->f_buffer);
vdp_info.forward_reference);
mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE; mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE;
} }
@ -421,11 +423,6 @@ gst_vdpau_mpeg_decoder_finalize (GObject * object)
{ {
GstVdpauMpegDecoder *mpeg_dec = (GstVdpauMpegDecoder *) object; GstVdpauMpegDecoder *mpeg_dec = (GstVdpauMpegDecoder *) object;
#if 0 /* FIXME: can't free the decoder since the device already has been freed */
if (mpeg_dec->decoder != VDP_INVALID_HANDLE)
dec->device->vdp_decoder_destroy (mpeg_dec->decoder);
#endif
g_object_unref (mpeg_dec->adapter); g_object_unref (mpeg_dec->adapter);
} }
@ -475,8 +472,12 @@ vdpaumpegdecoder_init (GstPlugin * vdpaumpegdecoder)
GST_DEBUG_CATEGORY_INIT (gst_vdpau_mpeg_decoder_debug, "vdpaumpegdecoder", GST_DEBUG_CATEGORY_INIT (gst_vdpau_mpeg_decoder_debug, "vdpaumpegdecoder",
0, "Template vdpaumpegdecoder"); 0, "Template vdpaumpegdecoder");
return gst_element_register (vdpaumpegdecoder, "vdpaumpegdecoder", gst_element_register (vdpaumpegdecoder, "vdpaumpegdecoder",
GST_RANK_NONE, GST_TYPE_VDPAU_MPEG_DECODER); GST_RANK_NONE, GST_TYPE_VDPAU_MPEG_DECODER);
gst_element_register (vdpaumpegdecoder, "vdpauvideoyuv",
GST_RANK_NONE, GST_TYPE_VDPAU_VIDEO_YUV);
return TRUE;
} }
/* gstreamer looks for this structure to register vdpaumpegdecoders /* gstreamer looks for this structure to register vdpaumpegdecoders

View file

@ -47,10 +47,10 @@ struct _GstVdpauMpegDecoder
VdpDecoder decoder; VdpDecoder decoder;
VdpPictureInfoMPEG1Or2 vdp_info; VdpPictureInfoMPEG1Or2 vdp_info;
GstBuffer *f_buffer;
GstAdapter *adapter; GstAdapter *adapter;
gint slices; gint slices;
}; };
struct _GstVdpauMpegDecoderClass struct _GstVdpauMpegDecoderClass

View file

@ -0,0 +1,114 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpauvideobuffer.h"
static GObjectClass *gst_vdpau_video_buffer_parent_class;
static void
gst_vdpau_video_buffer_finalize (GstVdpauVideoBuffer * buffer)
{
GstVdpauDevice *device = buffer->device;
VdpStatus status;
status = device->vdp_video_surface_destroy (buffer->surface);
if (status != VDP_STATUS_OK)
GST_ERROR
("Couldn't destroy the buffers VdpVideoSurface, error returned was: %s",
device->vdp_get_error_string (status));
g_object_unref (buffer->device);
GST_MINI_OBJECT_CLASS (gst_vdpau_video_buffer_parent_class)->finalize
(GST_MINI_OBJECT (buffer));
}
static void
gst_vdpau_video_buffer_init (GstVdpauVideoBuffer * buffer, gpointer g_class)
{
buffer->device = NULL;
buffer->surface = VDP_INVALID_HANDLE;
}
static void
gst_vdpau_video_buffer_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_vdpau_video_buffer_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_vdpau_video_buffer_finalize;
}
GType
gst_vdpau_video_buffer_get_type (void)
{
static GType _gst_vdpau_video_buffer_type;
if (G_UNLIKELY (_gst_vdpau_video_buffer_type == 0)) {
static const GTypeInfo info = {
sizeof (GstBufferClass),
NULL,
NULL,
gst_vdpau_video_buffer_class_init,
NULL,
NULL,
sizeof (GstVdpauVideoBuffer),
0,
(GInstanceInitFunc) gst_vdpau_video_buffer_init,
NULL
};
_gst_vdpau_video_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
"GstVdpauVideoBuffer", &info, 0);
}
return _gst_vdpau_video_buffer_type;
}
GstVdpauVideoBuffer *
gst_vdpau_video_buffer_new (GstVdpauDevice * device, VdpChromaType chroma_type,
gint width, gint height)
{
GstVdpauVideoBuffer *buffer;
VdpStatus status;
VdpVideoSurface surface;
status = device->vdp_video_surface_create (device->device, chroma_type, width,
height, &surface);
if (status != VDP_STATUS_OK) {
GST_ERROR ("Couldn't create a VdpVideoSurface, error returned was: %s",
device->vdp_get_error_string (status));
return NULL;
}
buffer =
(GstVdpauVideoBuffer *) gst_mini_object_new (GST_TYPE_VDPAU_VIDEO_BUFFER);
buffer->device = g_object_ref (device);
buffer->surface = surface;
return buffer;
}

View file

@ -0,0 +1,55 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VDPAU_VIDEO_BUFFER_H_
#define _GST_VDPAU_VIDEO_BUFFER_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gstvdpaudevice.h"
#include "gstvdpauvideobuffer.h"
typedef struct _GstVdpauVideoBuffer GstVdpauVideoBuffer;
#define GST_TYPE_VDPAU_VIDEO_BUFFER (gst_vdpau_video_buffer_get_type())
#define GST_IS_VDPAU_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDPAU_VIDEO_BUFFER))
#define GST_VDPAU_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDPAU_VIDEO_BUFFER, GstVdpauVideoBuffer))
struct _GstVdpauVideoBuffer {
GstBuffer buffer;
GstVdpauDevice *device;
VdpVideoSurface surface;
};
GType gst_vdpau_video_buffer_get_type (void);
GstVdpauVideoBuffer* gst_vdpau_video_buffer_new (GstVdpauDevice * device, VdpChromaType chroma_type, gint width, gint height);
#define GST_VDPAU_VIDEO_CAPS \
"video/vdpau-video, " \
"chroma-type = (int)[0,2], " \
"width = (int)[1,4096], " \
"height = (int)[1,4096]"
#endif

View file

@ -0,0 +1,454 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gstvdpauvideobuffer.h"
#include "gstvdpauvideoyuv.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdpau_video_yuv_debug);
#define GST_CAT_DEFAULT gst_vdpau_video_yuv_debug
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
PROP_0,
PROP_SILENT
};
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VDPAU_VIDEO_CAPS));
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv, "
"framerate = (fraction) [ 0, MAX ], "
"width = (int) [ 1, MAX ], " "height = (int) [ 1, MAX ]"));
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdpau_video_yuv_debug, "vdpauvideo_yuv", 0, "vdpauvideo_yuv base class");
GST_BOILERPLATE_FULL (GstVdpauVideoYUV, gst_vdpau_video_yuv, GstElement,
GST_TYPE_ELEMENT, DEBUG_INIT);
static void gst_vdpau_video_yuv_finalize (GObject * object);
static void gst_vdpau_video_yuv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_vdpau_video_yuv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
GstFlowReturn
gst_vdpau_video_yuv_chain (GstPad * pad, GstBuffer * buffer)
{
GstVdpauVideoYUV *video_yuv;
GstVdpauDevice *device;
VdpVideoSurface surface;
GstBuffer *outbuf = NULL;
video_yuv = GST_VDPAU_VIDEO_YUV (GST_OBJECT_PARENT (pad));
device = GST_VDPAU_VIDEO_BUFFER (buffer)->device;
surface = GST_VDPAU_VIDEO_BUFFER (buffer)->surface;
switch (video_yuv->format) {
case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
{
gint size;
GstFlowReturn result;
VdpStatus status;
guint8 *data[3];
guint32 stride[3];
size =
gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, video_yuv->width,
video_yuv->height);
result =
gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
if (G_UNLIKELY (result != GST_FLOW_OK))
return result;
data[0] = GST_BUFFER_DATA (outbuf) +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
0, video_yuv->width, video_yuv->height);
data[1] = data[0] +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
2, video_yuv->width, video_yuv->height);
data[2] = data[0] +
gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
1, video_yuv->width, video_yuv->height);
stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
0, video_yuv->width);
stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
2, video_yuv->width);
stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
1, video_yuv->width);
status =
device->vdp_video_surface_get_bits_ycbcr (surface,
VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
("Couldn't get data from vdpau"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
}
break;
}
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
{
gint size;
GstFlowReturn result;
VdpStatus status;
guint8 *data[2];
guint32 stride[2];
size =
video_yuv->width * video_yuv->height +
video_yuv->width * video_yuv->height / 2;
result =
gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
if (G_UNLIKELY (result != GST_FLOW_OK))
return result;
data[0] = GST_BUFFER_DATA (outbuf);
data[1] = GST_BUFFER_DATA (outbuf) + video_yuv->width * video_yuv->height;
stride[0] = video_yuv->width;
stride[1] = video_yuv->width;
status =
device->vdp_video_surface_get_bits_ycbcr (surface,
VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
if (G_UNLIKELY (status != VDP_STATUS_OK)) {
GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
("Couldn't get data from vdpau"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return GST_FLOW_ERROR;
}
break;
}
default:
break;
}
if (outbuf) {
gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS);
return gst_pad_push (video_yuv->src, outbuf);
}
return GST_FLOW_ERROR;
}
typedef struct
{
VdpChromaType chroma_type;
VdpYCbCrFormat format;
guint32 fourcc;
} VdpauFormats;
static VdpauFormats formats[6] = {
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_NV12,
GST_MAKE_FOURCC ('N', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_UYVY,
GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_V8U8Y8A8,
GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_Y8U8V8A8,
GST_MAKE_FOURCC ('A', 'V', 'U', 'Y')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_YUYV,
GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('Y', 'V', '1', '2')
}
};
static GstCaps *
gst_vdpau_video_yuv_get_caps (GstVdpauVideoYUV * video_yuv,
GstVdpauDevice * device, gint chroma_type, gint width, gint height,
gint framerate_numerator, gint framerate_denominator, gint par_numerator,
gint par_denominator)
{
GstCaps *caps;
gint i;
caps = gst_caps_new_empty ();
for (i = 0; i < 6; i++) {
VdpStatus status;
VdpBool is_supported;
if (formats[i].chroma_type != chroma_type)
continue;
status =
device->vdp_video_surface_query_ycbcr_capabilities (device->device,
chroma_type, formats[i].format, &is_supported);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
("Could not query VDPAU YCbCr capabilites"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
return NULL;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, formats[i].fourcc,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_numerator,
framerate_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION,
par_numerator, par_denominator, NULL);
gst_caps_append (caps, format_caps);
}
}
if (gst_caps_is_empty (caps)) {
gst_caps_unref (caps);
return NULL;
}
return caps;
}
static gboolean
gst_vdpau_video_yuv_sink_set_caps (GstPad * pad, GstCaps * caps)
{
GstVdpauVideoYUV *video_yuv = GST_VDPAU_VIDEO_YUV (GST_OBJECT_PARENT (pad));
GstCaps *src_caps, *new_caps;
GstStructure *structure;
const GValue *value;
GstVdpauDevice *device;
gint chroma_type;
gint width, height;
gint framerate_numerator, framerate_denominator;
gint par_numerator, par_denominator;
guint32 fourcc_format;
gboolean res;
structure = gst_caps_get_structure (caps, 0);
value = gst_structure_get_value (structure, "device");
device = g_value_get_object (value);
gst_structure_get_int (structure, "chroma-type", &chroma_type);
gst_structure_get_int (structure, "width", &width);
gst_structure_get_int (structure, "height", &height);
gst_structure_get_fraction (structure, "framerate",
&framerate_numerator, &framerate_denominator);
gst_structure_get_fraction (structure, "pixel-aspect-ratio",
&par_numerator, &par_denominator);
src_caps =
gst_vdpau_video_yuv_get_caps (video_yuv, device, chroma_type, width,
height, framerate_numerator, framerate_denominator, par_numerator,
par_denominator);
if (G_UNLIKELY (!src_caps))
return FALSE;
video_yuv->src_caps = src_caps;
src_caps = gst_pad_get_allowed_caps (video_yuv->src);
if (G_UNLIKELY (!src_caps || !gst_caps_get_size (src_caps)))
return FALSE;
new_caps = gst_caps_copy_nth (src_caps, 0);
gst_caps_unref (src_caps);
if (G_UNLIKELY (!new_caps))
return FALSE;
structure = gst_caps_get_structure (new_caps, 0);
gst_structure_get_fourcc (structure, "format", &fourcc_format);
gst_pad_fixate_caps (video_yuv->src, new_caps);
res = gst_pad_set_caps (video_yuv->src, new_caps);
gst_caps_unref (new_caps);
if (G_UNLIKELY (!res))
return FALSE;
video_yuv->width = width;
video_yuv->height = height;
video_yuv->framerate_numerator = framerate_numerator;
video_yuv->framerate_denominator = framerate_denominator;
video_yuv->format = fourcc_format;
return TRUE;
}
static GstCaps *
gst_vdpau_video_yuv_src_getcaps (GstPad * pad)
{
GstVdpauVideoYUV *video_yuv;
video_yuv = GST_VDPAU_VIDEO_YUV (GST_OBJECT_PARENT (pad));
if (video_yuv->src_caps)
return gst_caps_copy (video_yuv->src_caps);
if (GST_PAD_CAPS (video_yuv->src))
return gst_caps_copy (GST_PAD_CAPS (video_yuv->src));
return gst_caps_copy (gst_pad_get_pad_template_caps (video_yuv->src));
}
/* GObject vmethod implementations */
static void
gst_vdpau_video_yuv_base_init (gpointer klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_set_details_simple (element_class,
"VdpauVideoYUV",
"Covideo_yuv/Decoder/Video",
"VDPAU video surface to YUV",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
}
static void
gst_vdpau_video_yuv_class_init (GstVdpauVideoYUVClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gobject_class->finalize = gst_vdpau_video_yuv_finalize;
gobject_class->set_property = gst_vdpau_video_yuv_set_property;
gobject_class->get_property = gst_vdpau_video_yuv_get_property;
g_object_class_install_property (gobject_class, PROP_SILENT,
g_param_spec_boolean ("silent", "Silent", "Produce verbose output ?",
FALSE, G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
}
static void
gst_vdpau_video_yuv_init (GstVdpauVideoYUV * video_yuv,
GstVdpauVideoYUVClass * klass)
{
video_yuv->silent = FALSE;
video_yuv->src_caps = NULL;
video_yuv->height = 0;
video_yuv->width = 0;
video_yuv->framerate_numerator = 0;
video_yuv->framerate_denominator = 0;
video_yuv->par_numerator = 1;
video_yuv->par_denominator = 1;
video_yuv->src = gst_pad_new_from_static_template (&src_template, "src");
gst_pad_set_getcaps_function (video_yuv->src,
gst_vdpau_video_yuv_src_getcaps);
gst_element_add_pad (GST_ELEMENT (video_yuv), video_yuv->src);
video_yuv->sink = gst_pad_new_from_static_template (&sink_template, "sink");
gst_pad_set_setcaps_function (video_yuv->sink,
gst_vdpau_video_yuv_sink_set_caps);
gst_pad_set_chain_function (video_yuv->sink, gst_vdpau_video_yuv_chain);
gst_element_add_pad (GST_ELEMENT (video_yuv), video_yuv->sink);
}
static void
gst_vdpau_video_yuv_finalize (GObject * object)
{
GstVdpauVideoYUV *video_yuv = (GstVdpauVideoYUV *) object;
if (video_yuv->src_caps)
gst_caps_unref (video_yuv->src_caps);
}
static void
gst_vdpau_video_yuv_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpauVideoYUV *video_yuv = GST_VDPAU_VIDEO_YUV (object);
switch (prop_id) {
case PROP_SILENT:
video_yuv->silent = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_vdpau_video_yuv_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec)
{
GstVdpauVideoYUV *video_yuv = GST_VDPAU_VIDEO_YUV (object);
switch (prop_id) {
case PROP_SILENT:
g_value_set_boolean (value, video_yuv->silent);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}

View file

@ -0,0 +1,62 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VDPAU_VIDEO_YUV_H__
#define __GST_VDPAU_VIDEO_YUV_H__
#include <gst/gst.h>
#include "gstvdpaudevice.h"
G_BEGIN_DECLS
#define GST_TYPE_VDPAU_VIDEO_YUV (gst_vdpau_video_yuv_get_type())
#define GST_VDPAU_VIDEO_YUV(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDPAU_VIDEO_YUV,GstVdpauVideoYUV))
#define GST_VDPAU_VIDEO_YUV_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDPAU_VIDEO_YUV,GstVdpauVideoYUVClass))
#define GST_VDPAU_VIDEO_YUV_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDPAU_VIDEO_YUV, GstVdpauVideoYUVClass))
#define GST_IS_VDPAU_VIDEO_YUV(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDPAU_VIDEO_YUV))
#define GST_IS_VDPAU_VIDEO_YUV_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDPAU_VIDEO_YUV))
typedef struct _GstVdpauVideoYUV GstVdpauVideoYUV;
typedef struct _GstVdpauVideoYUVClass GstVdpauVideoYUVClass;
struct _GstVdpauVideoYUV {
GstElement element;
GstPad *src, *sink;
GstCaps *src_caps;
gint width, height;
gint framerate_numerator, framerate_denominator;
gint par_numerator, par_denominator;
guint format;
gboolean silent;
};
struct _GstVdpauVideoYUVClass {
GstElementClass parent_class;
};
GType gst_vdpau_video_yuv_get_type (void);
G_END_DECLS
#endif /* __GST_VDPAU_VIDEO_YUV_H__ */