vdpau: add new vdpausink and vdpauvideopostprocess elements

vdpausink is as the name implies a sink which takes vide/x-vdpau-output buffers
and outputs it to the screen.

vdpauvideopostprocess is an element which takes video/x-vdpau-video buffers and
outputs video/x-vdpau-output buffers. The element can also do things like
deinterlacing and other postprocessing but this is not implemented yet.
This commit is contained in:
Carl-Anton Ingmarsson 2009-06-30 23:55:13 +02:00 committed by Jan Schmidt
parent 3b2faded12
commit 3e05ddca61
16 changed files with 2685 additions and 164 deletions

View file

@ -8,11 +8,14 @@ libgstvdpau_la_SOURCES = \
gstvdpvideobuffer.c \
gstvdp.c \
gstvdpyuvvideo.c \
gstvdputils.c
gstvdputils.c \
gstvdpvideopostprocess.c \
gstvdpoutputbuffer.c \
gstvdpsink.c
libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
$(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) -lgstinterfaces-$(GST_MAJORMINOR) \
$(VDPAU_LIBS)
libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static
@ -24,6 +27,9 @@ noinst_HEADERS = \
gstvdpvideoyuv.h \
gstvdpvideobuffer.h \
gstvdpyuvvideo.h \
gstvdputils.h
gstvdputils.h \
gstvdpvideopostprocess.h \
gstvdpoutputbuffer.h \
gstvdpsink.c

View file

@ -8,6 +8,8 @@
#include "gstvdpmpegdec.h"
#include "gstvdpvideoyuv.h"
#include "gstvdpyuvvideo.h"
#include "gstvdpvideopostprocess.h"
#include "gstvdpsink.h"
static gboolean
vdpau_init (GstPlugin * vdpau_plugin)

View file

@ -18,7 +18,6 @@
* Boston, MA 02111-1307, USA.
*/
#include <vdpau/vdpau_x11.h>
#include <gst/gst.h>
#include "gstvdpdevice.h"
@ -100,6 +99,25 @@ gst_vdp_device_constructed (GObject * object)
&device->vdp_decoder_query_capabilities},
{VDP_FUNC_ID_DECODER_GET_PARAMETERS,
&device->vdp_decoder_get_parameters},
{VDP_FUNC_ID_VIDEO_MIXER_CREATE, &device->vdp_video_mixer_create},
{VDP_FUNC_ID_VIDEO_MIXER_DESTROY, &device->vdp_video_mixer_destroy},
{VDP_FUNC_ID_VIDEO_MIXER_RENDER, &device->vdp_video_mixer_render},
{VDP_FUNC_ID_OUTPUT_SURFACE_CREATE, &device->vdp_output_surface_create},
{VDP_FUNC_ID_OUTPUT_SURFACE_DESTROY, &device->vdp_output_surface_destroy},
{VDP_FUNC_ID_OUTPUT_SURFACE_QUERY_CAPABILITIES,
&device->vdp_output_surface_query_capabilities},
{VDP_FUNC_ID_PRESENTATION_QUEUE_TARGET_CREATE_X11,
&device->vdp_presentation_queue_target_create_x11},
{VDP_FUNC_ID_PRESENTATION_QUEUE_CREATE,
&device->vdp_presentation_queue_create},
{VDP_FUNC_ID_PRESENTATION_QUEUE_DESTROY,
&device->vdp_presentation_queue_destroy},
{VDP_FUNC_ID_PRESENTATION_QUEUE_DISPLAY,
&device->vdp_presentation_queue_display},
{VDP_FUNC_ID_PRESENTATION_QUEUE_BLOCK_UNTIL_SURFACE_IDLE,
&device->vdp_presentation_queue_block_until_surface_idle},
{VDP_FUNC_ID_PRESENTATION_QUEUE_SET_BACKGROUND_COLOR,
&device->vdp_presentation_queue_set_background_color},
{0, NULL}
};

View file

@ -23,6 +23,7 @@
#include <X11/Xlib.h>
#include <vdpau/vdpau.h>
#include <vdpau/vdpau_x11.h>
#include <glib-object.h>
@ -68,6 +69,21 @@ struct _GstVdpDevice
VdpDecoderRender *vdp_decoder_render;
VdpDecoderQueryCapabilities *vdp_decoder_query_capabilities;
VdpDecoderGetParameters *vdp_decoder_get_parameters;
VdpVideoMixerCreate *vdp_video_mixer_create;
VdpVideoMixerDestroy *vdp_video_mixer_destroy;
VdpVideoMixerRender *vdp_video_mixer_render;
VdpOutputSurfaceCreate *vdp_output_surface_create;
VdpOutputSurfaceDestroy *vdp_output_surface_destroy;
VdpOutputSurfaceQueryCapabilities *vdp_output_surface_query_capabilities;
VdpPresentationQueueTargetCreateX11 *vdp_presentation_queue_target_create_x11;
VdpPresentationQueueCreate *vdp_presentation_queue_create;
VdpPresentationQueueDestroy *vdp_presentation_queue_destroy;
VdpPresentationQueueDisplay *vdp_presentation_queue_display;
VdpPresentationQueueBlockUntilSurfaceIdle *vdp_presentation_queue_block_until_surface_idle;
VdpPresentationQueueSetBackgroundColor *vdp_presentation_queue_set_background_color;
};
GType gst_vdp_device_get_type (void) G_GNUC_CONST;

View file

@ -0,0 +1,217 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvdpoutputbuffer.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_output_buffer_debug);
#define GST_CAT_DEFAULT gst_vdp_output_buffer_debug
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_output_buffer_debug, "vdpauoutputbuffer", 0, "VDPAU output buffer");
GstVdpOutputBuffer *
gst_vdp_output_buffer_new (GstVdpDevice * device, VdpRGBAFormat rgba_format,
gint width, gint height)
{
GstVdpOutputBuffer *buffer;
VdpStatus status;
VdpOutputSurface surface;
status =
device->vdp_output_surface_create (device->device, rgba_format, width,
height, &surface);
if (status != VDP_STATUS_OK) {
GST_ERROR ("Couldn't create a VdpOutputSurface, error returned was: %s",
device->vdp_get_error_string (status));
return NULL;
}
buffer =
(GstVdpOutputBuffer *) gst_mini_object_new (GST_TYPE_VDP_OUTPUT_BUFFER);
buffer->device = g_object_ref (device);
buffer->surface = surface;
return buffer;
}
static GObjectClass *gst_vdp_output_buffer_parent_class;
static void
gst_vdp_output_buffer_finalize (GstVdpOutputBuffer * buffer)
{
GstVdpDevice *device;
VdpStatus status;
device = buffer->device;
status = device->vdp_output_surface_destroy (buffer->surface);
if (status != VDP_STATUS_OK)
GST_ERROR
("Couldn't destroy the buffers VdpOutputSurface, error returned was: %s",
device->vdp_get_error_string (status));
g_object_unref (buffer->device);
GST_MINI_OBJECT_CLASS (gst_vdp_output_buffer_parent_class)->finalize
(GST_MINI_OBJECT (buffer));
}
static void
gst_vdp_output_buffer_init (GstVdpOutputBuffer * buffer, gpointer g_class)
{
buffer->device = NULL;
buffer->surface = VDP_INVALID_HANDLE;
}
static void
gst_vdp_output_buffer_class_init (gpointer g_class, gpointer class_data)
{
GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
gst_vdp_output_buffer_parent_class = g_type_class_peek_parent (g_class);
mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
gst_vdp_output_buffer_finalize;
}
GType
gst_vdp_output_buffer_get_type (void)
{
static GType _gst_vdp_output_buffer_type;
if (G_UNLIKELY (_gst_vdp_output_buffer_type == 0)) {
static const GTypeInfo info = {
sizeof (GstBufferClass),
NULL,
NULL,
gst_vdp_output_buffer_class_init,
NULL,
NULL,
sizeof (GstVdpOutputBuffer),
0,
(GInstanceInitFunc) gst_vdp_output_buffer_init,
NULL
};
_gst_vdp_output_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
"GstVdpOutputBuffer", &info, 0);
DEBUG_INIT ();
}
return _gst_vdp_output_buffer_type;
}
typedef struct
{
VdpRGBAFormat format;
GstStaticCaps caps;
} GstVdpOutputBufferFormats;
GstVdpOutputBufferFormats rgba_formats[] = {
{VDP_RGBA_FORMAT_A8,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)8, "
"depth = (int)0, "
"endianness = G_BIG_ENDIAN, "
"red_mask = (int)0x00, "
"green_mask = (int)0x00, "
"blue_mask = (int)0x00, " "alpha_mask = (int)0xff")},
{VDP_RGBA_FORMAT_B10G10R10A2,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)30, "
"endianness = G_BIG_ENDIAN, "
"red_mask = (int)0x000003fc, "
"green_mask = (int)0x003ff000, "
"blue_mask = (int)0xffc00000, " "alpha_mask = (int)0x00000003")},
{VDP_RGBA_FORMAT_B8G8R8A8,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)24, "
"endianness = G_BIG_ENDIAN, "
"red_mask = (int)0x0000ff00, "
"green_mask = (int)0x00ff0000, "
"blue_mask = (int)0xff000000, " "alpha_mask = (int)0x000000ff")},
{VDP_RGBA_FORMAT_R10G10B10A2,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)30, "
"endianness = G_BIG_ENDIAN, "
"red_mask = (int)0xffc00000, "
"green_mask = (int)0x003ff000, "
"blue_mask = (int)0x000003fc, " "alpha_mask = (int)0x00000003")},
{VDP_RGBA_FORMAT_R8G8B8A8,
GST_STATIC_CAPS ("video/x-raw-rgb, "
"bpp = (int)32, "
"depth = (int)24, "
"endianness = G_BIG_ENDIAN"
"red_mask = (int)0xff000000, "
"green_mask = (int)0x00ff0000, "
"blue_mask = (int)0x0000ff00, " "alpha_mask = (int)0x000000ff")},
};
int n_rgba_formats = G_N_ELEMENTS (rgba_formats);
GstCaps *
gst_vdp_output_buffer_get_allowed_caps (GstVdpDevice * device)
{
GstCaps *caps;
gint i;
g_return_val_if_fail (GST_IS_VDP_DEVICE (device), NULL);
caps = gst_caps_new_empty ();
for (i = 0; i < n_rgba_formats; i++) {
VdpStatus status;
VdpBool is_supported;
guint max_w, max_h;
status = device->vdp_output_surface_query_capabilities (device->device,
rgba_formats[i].format, &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_RGBA_FORMAT) {
GST_ERROR_OBJECT (device,
"Could not get query VDPAU output surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-vdpau-output",
"rgba-format", G_TYPE_INT, rgba_formats[i].format,
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps);
}
}
error:
return caps;
}

View file

@ -0,0 +1,53 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_VDP_OUTPUT_BUFFER_H_
#define _GST_VDP_OUTPUT_BUFFER_H_
#include <gst/gst.h>
#include "gstvdpdevice.h"
typedef struct _GstVdpOutputBuffer GstVdpOutputBuffer;
#define GST_TYPE_VDP_OUTPUT_BUFFER (gst_vdp_output_buffer_get_type())
#define GST_IS_VDP_OUTPUT_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_OUTPUT_BUFFER))
#define GST_VDP_OUTPUT_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_OUTPUT_BUFFER, GstVdpOutputBuffer))
struct _GstVdpOutputBuffer {
GstBuffer buffer;
GstVdpDevice *device;
VdpOutputSurface surface;
};
GType gst_vdp_output_buffer_get_type (void);
GstVdpOutputBuffer* gst_vdp_output_buffer_new (GstVdpDevice * device, VdpRGBAFormat rgba_format, gint width, gint height);
GstCaps *gst_vdp_output_buffer_get_allowed_caps (GstVdpDevice *device);
#define GST_VDP_OUTPUT_CAPS \
"video/x-vdpau-output, " \
"rgba-format = (int)[0,4], " \
"width = (int)[1,8192], " \
"height = (int)[1,8192]"
#endif

1476
sys/vdpau/gstvdpsink.c Normal file

File diff suppressed because it is too large Load diff

135
sys/vdpau/gstvdpsink.h Normal file
View file

@ -0,0 +1,135 @@
/* GStreamer
* Copyright (C) <2005> Julien Moutte <julien@moutte.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VDP_SINK_H__
#define __GST_VDP_SINK_H__
#include <gst/video/gstvideosink.h>
#include <X11/Xlib.h>
#include <X11/Xutil.h>
#include <string.h>
#include <math.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_SINK \
(gst_vdp_sink_get_type())
#define GST_VDP_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_VDP_SINK, VdpSink))
#define GST_VDP_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_VDP_SINK, VdpSinkClass))
#define GST_IS_VDP_SINK(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_VDP_SINK))
#define GST_IS_VDP_SINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_VDP_SINK))
typedef struct _GstXContext GstXContext;
typedef struct _GstVdpWindow GstVdpWindow;
typedef struct _VdpSink VdpSink;
typedef struct _VdpSinkClass VdpSinkClass;
/*
* GstVdpWindow:
* @win: the Window ID of this X11 window
* @target the VdpPresentationQueueTarget of this window
* @queue the VdpPresentationQueue of this window
* @width: the width in pixels of Window @win
* @height: the height in pixels of Window @win
* @internal: used to remember if Window @win was created internally or passed
* through the #GstXOverlay interface
*
* Structure used to store informations about a Window.
*/
struct _GstVdpWindow {
Window win;
VdpPresentationQueueTarget target;
VdpPresentationQueue queue;
gint width, height;
gboolean internal;
};
/**
* VdpSink:
* @display_name: the name of the Display we want to render to
* @device: the GstVdpDevice associated with the display_name
* @window: the #GstVdpWindow we are rendering to
* @cur_image: a reference to the last #GstBuffer that was put to @window. It
* is used when Expose events are received to redraw the latest video frame
* @event_thread: a thread listening for events on @window and handling them
* @running: used to inform @event_thread if it should run/shutdown
* @fps_n: the framerate fraction numerator
* @fps_d: the framerate fraction denominator
* @x_lock: used to protect X calls as we are not using the XLib in threaded
* mode
* @flow_lock: used to protect data flow routines from external calls such as
* events from @event_thread or methods from the #GstXOverlay interface
* @par: used to override calculated pixel aspect ratio from @xcontext
* @synchronous: used to store if XSynchronous should be used or not (for
* debugging purpose only)
* @handle_events: used to know if we should handle select XEvents or not
*
* The #VdpSink data structure.
*/
struct _VdpSink {
/* Our element stuff */
GstVideoSink videosink;
char *display_name;
GstVdpDevice *device;
GstCaps *caps;
GstVdpWindow *window;
GstBuffer *cur_image;
GThread *event_thread;
gboolean running;
/* Framerate numerator and denominator */
gint fps_n;
gint fps_d;
GMutex *x_lock;
GMutex *flow_lock;
/* object-set pixel aspect ratio */
GValue *par;
gboolean synchronous;
gboolean handle_events;
gboolean handle_expose;
/* stream metadata */
gchar *media_title;
};
struct _VdpSinkClass {
GstVideoSinkClass parent_class;
};
GType gst_vdp_sink_get_type(void);
G_END_DECLS
#endif /* __GST_VDP_SINK_H__ */

View file

@ -18,68 +18,10 @@
* Boston, MA 02111-1307, USA.
*/
#include "gstvdpvideobuffer.h"
#include "gstvdputils.h"
static GstCaps *
gst_vdp_get_allowed_yuv_caps (GstVdpDevice * device)
{
GstCaps *caps;
gint i;
caps = gst_caps_new_empty ();
for (i = 0; i < N_CHROMA_TYPES; i++) {
VdpStatus status;
VdpBool is_supported;
guint32 max_w, max_h;
status =
device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
GST_ERROR_OBJECT (device,
"Could not get query VDPAU video surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
gint j;
for (j = 0; j < N_FORMATS; j++) {
if (formats[j].chroma_type != chroma_types[i])
continue;
status =
device->vdp_video_surface_query_ycbcr_capabilities (device->device,
formats[j].chroma_type, formats[j].format, &is_supported);
if (status != VDP_STATUS_OK
&& status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, formats[j].fourcc,
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps);
}
}
}
}
error:
return caps;
}
GstCaps *
gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device)
{
@ -124,7 +66,7 @@ gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device)
structure = gst_caps_get_structure (caps, 0);
if (device) {
allowed_caps = gst_vdp_get_allowed_yuv_caps (device);
allowed_caps = gst_vdp_video_buffer_get_allowed_yuv_caps (device);
result = gst_caps_intersect (new_caps, allowed_caps);
gst_caps_unref (new_caps);
@ -135,46 +77,6 @@ gst_vdp_video_to_yuv_caps (GstCaps * caps, GstVdpDevice * device)
return result;
}
static GstCaps *
gst_vdp_get_allowed_video_caps (GstVdpDevice * device)
{
GstCaps *caps;
gint i;
caps = gst_caps_new_empty ();
for (i = 0; i < N_CHROMA_TYPES; i++) {
VdpStatus status;
VdpBool is_supported;
guint32 max_w, max_h;
status =
device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
GST_ERROR_OBJECT (device,
"Could not get query VDPAU video surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-vdpau-video",
"chroma-type", G_TYPE_INT, chroma_types[i],
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps);
}
}
error:
return caps;
}
GstCaps *
gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device)
{
@ -209,7 +111,7 @@ gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device)
if (device) {
GstCaps *allowed_caps;
allowed_caps = gst_vdp_get_allowed_video_caps (device);
allowed_caps = gst_vdp_video_buffer_get_allowed_video_caps (device);
result = gst_caps_intersect (new_caps, allowed_caps);
gst_caps_unref (new_caps);
@ -219,3 +121,20 @@ gst_vdp_yuv_to_video_caps (GstCaps * caps, GstVdpDevice * device)
return result;
}
GstCaps *
gst_vdp_video_to_output_caps (GstCaps * caps)
{
GstCaps *result;
gint i;
result = gst_caps_copy (caps);
for (i = 0; i < gst_caps_get_size (result); i++) {
GstStructure *structure = gst_caps_get_structure (result, i);
gst_structure_set_name (structure, "video/x-vdpau-output");
gst_structure_remove_field (structure, "chroma-type");
}
return result;
}

View file

@ -25,58 +25,9 @@
#include "gstvdpdevice.h"
typedef struct
{
VdpChromaType chroma_type;
VdpYCbCrFormat format;
guint32 fourcc;
} VdpauFormats;
#define N_CHROMA_TYPES 3
#define N_FORMATS 7
static const VdpChromaType chroma_types[N_CHROMA_TYPES] =
{ VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 };
static const VdpauFormats formats[N_FORMATS] = {
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('I', '4', '2', '0')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('Y', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_NV12,
GST_MAKE_FOURCC ('N', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_UYVY,
GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_V8U8Y8A8,
GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_Y8U8V8A8,
GST_MAKE_FOURCC ('A', 'V', 'U', 'Y')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_YUYV,
GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
},
};
GstCaps *gst_vdp_video_to_yuv_caps (GstCaps *caps, GstVdpDevice *device);
GstCaps *gst_vdp_yuv_to_video_caps (GstCaps *caps, GstVdpDevice *device);
GstCaps *gst_vdp_video_to_output_caps (GstCaps * caps);
#endif /* _GST_VDP_UTILS_H_ */

View file

@ -136,3 +136,103 @@ gst_vdp_video_buffer_get_type (void)
}
return _gst_vdp_video_buffer_type;
}
GstCaps *
gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device)
{
GstCaps *caps;
gint i;
caps = gst_caps_new_empty ();
for (i = 0; i < N_CHROMA_TYPES; i++) {
VdpStatus status;
VdpBool is_supported;
guint32 max_w, max_h;
status =
device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
GST_ERROR_OBJECT (device,
"Could not get query VDPAU video surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
gint j;
for (j = 0; j < N_FORMATS; j++) {
if (formats[j].chroma_type != chroma_types[i])
continue;
status =
device->vdp_video_surface_query_ycbcr_capabilities (device->device,
formats[j].chroma_type, formats[j].format, &is_supported);
if (status != VDP_STATUS_OK
&& status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
GST_ERROR_OBJECT (device, "Could not query VDPAU YCbCr capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, formats[j].fourcc,
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps);
}
}
}
}
error:
return caps;
}
GstCaps *
gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device)
{
GstCaps *caps;
gint i;
caps = gst_caps_new_empty ();
for (i = 0; i < N_CHROMA_TYPES; i++) {
VdpStatus status;
VdpBool is_supported;
guint32 max_w, max_h;
status =
device->vdp_video_surface_query_capabilities (device->device,
chroma_types[i], &is_supported, &max_w, &max_h);
if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
GST_ERROR_OBJECT (device,
"Could not get query VDPAU video surface capabilites, "
"Error returned from vdpau was: %s",
device->vdp_get_error_string (status));
goto error;
}
if (is_supported) {
GstCaps *format_caps;
format_caps = gst_caps_new_simple ("video/x-vdpau-video",
"chroma-type", G_TYPE_INT, chroma_types[i],
"width", GST_TYPE_INT_RANGE, 1, max_w,
"height", GST_TYPE_INT_RANGE, 1, max_h, NULL);
gst_caps_append (caps, format_caps);
}
}
error:
return caps;
}

View file

@ -44,12 +44,65 @@ struct _GstVdpVideoBuffer {
GSList *refs;
};
typedef struct
{
VdpChromaType chroma_type;
VdpYCbCrFormat format;
guint32 fourcc;
} GstVdpVideoBufferFormats;
#define N_CHROMA_TYPES 3
#define N_FORMATS 7
static const VdpChromaType chroma_types[N_CHROMA_TYPES] =
{ VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 };
static const GstVdpVideoBufferFormats formats[N_FORMATS] = {
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('I', '4', '2', '0')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_YV12,
GST_MAKE_FOURCC ('Y', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_420,
VDP_YCBCR_FORMAT_NV12,
GST_MAKE_FOURCC ('N', 'V', '1', '2')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_UYVY,
GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_V8U8Y8A8,
GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')
},
{
VDP_CHROMA_TYPE_444,
VDP_YCBCR_FORMAT_Y8U8V8A8,
GST_MAKE_FOURCC ('A', 'V', 'U', 'Y')
},
{
VDP_CHROMA_TYPE_422,
VDP_YCBCR_FORMAT_YUYV,
GST_MAKE_FOURCC ('Y', 'U', 'Y', '2')
},
};
GType gst_vdp_video_buffer_get_type (void);
GstVdpVideoBuffer* gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type, gint width, gint height);
void gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer *buffer, GstVdpVideoBuffer *buf);
GstCaps *gst_vdp_video_buffer_get_allowed_yuv_caps (GstVdpDevice * device);
GstCaps *gst_vdp_video_buffer_get_allowed_video_caps (GstVdpDevice * device);
#define GST_VDP_VIDEO_CAPS \
"video/x-vdpau-video, " \
"chroma-type = (int)[0,2], " \

View file

@ -0,0 +1,512 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-vdpauvideopostprocess
*
* FIXME:Describe vdpaumpegdec here.
*
* <refsect2>
* <title>Example launch line</title>
* |[
* gst-launch -v -m fakesrc ! vdpauvideopostprocess ! fakesink silent=TRUE
* ]|
* </refsect2>
*/
/*
* TODO:
* + add support for postprocessing eg. deinterlace
* + mixing videos. (This should perhaps be done in a separate element based on
* VdpOutputSurface)
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <gst/gst.h>
#include <gst/video/gstvideosink.h>
#include "gstvdputils.h"
#include "gstvdpvideobuffer.h"
#include "gstvdpoutputbuffer.h"
#include "gstvdpvideopostprocess.h"
GST_DEBUG_CATEGORY_STATIC (gst_vdp_vpp_debug);
#define GST_CAT_DEFAULT gst_vdp_vpp_debug
/* Filter signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
PROP_0,
PROP_FORCE_ASPECT_RATIO
};
/* the capabilities of the inputs and outputs.
*
* describe the real formats here.
*/
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VDP_OUTPUT_CAPS));
#define DEBUG_INIT(bla) \
GST_DEBUG_CATEGORY_INIT (gst_vdp_vpp_debug, "vdpauvideopostprocess", 0, "VDPAU video surface to output surface");
GST_BOILERPLATE_FULL (GstVdpVideoPostProcess, gst_vdp_vpp,
GstElement, GST_TYPE_ELEMENT, DEBUG_INIT);
static void gst_vdp_vpp_finalize (GObject * object);
static gboolean
gst_vdp_vpp_set_caps (GstPad * pad, GstCaps * caps)
{
GstVdpVideoPostProcess *vpp =
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
GstCaps *output_caps, *allowed_caps, *src_caps;
gboolean res;
output_caps = gst_vdp_video_to_output_caps (caps);
allowed_caps = gst_pad_get_allowed_caps (vpp->srcpad);
src_caps = gst_caps_intersect (output_caps, allowed_caps);
gst_caps_truncate (src_caps);
GST_DEBUG ("output_caps: %" GST_PTR_FORMAT " allowed_caps: %" GST_PTR_FORMAT
" src_caps: %" GST_PTR_FORMAT, output_caps, allowed_caps, src_caps);
gst_caps_unref (output_caps);
gst_caps_unref (allowed_caps);
res = gst_pad_set_caps (vpp->srcpad, src_caps);
gst_object_unref (vpp);
return res;
}
static void
gst_vdp_vpp_flush (GstVdpVideoPostProcess * vpp)
{
/* TODO: Write this */
}
static void
gst_vdp_vpp_start (GstVdpVideoPostProcess * vpp)
{
vpp->mixer = VDP_INVALID_HANDLE;
vpp->device = NULL;
}
static void
gst_vdp_vpp_stop (GstVdpVideoPostProcess * vpp)
{
if (vpp->mixer != VDP_INVALID_HANDLE)
vpp->device->vdp_video_mixer_destroy (vpp->mixer);
if (!vpp->device)
g_object_unref (vpp->device);
}
static GstFlowReturn
gst_vdp_vpp_alloc_output_buffer (GstVdpVideoPostProcess * vpp, GstCaps * caps,
GstVdpOutputBuffer ** outbuf)
{
GstFlowReturn ret;
ret = gst_pad_alloc_buffer_and_set_caps (vpp->srcpad, 0, 0,
caps, (GstBuffer **) outbuf);
if (ret != GST_FLOW_OK)
return ret;
if (!vpp->device) {
#define VDP_NUM_MIXER_PARAMETER 3
#define MAX_NUM_FEATURES 5
GstStructure *structure;
gint chroma_type;
gint width, height;
VdpStatus status;
GstVdpDevice *device;
VdpVideoMixerFeature features[5];
guint n_features = 0;
VdpVideoMixerParameter parameters[VDP_NUM_MIXER_PARAMETER] = {
VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_WIDTH,
VDP_VIDEO_MIXER_PARAMETER_VIDEO_SURFACE_HEIGHT,
VDP_VIDEO_MIXER_PARAMETER_CHROMA_TYPE
};
const void *parameter_values[VDP_NUM_MIXER_PARAMETER];
structure = gst_caps_get_structure (GST_PAD_CAPS (vpp->sinkpad), 0);
if (!gst_structure_get_int (structure, "chroma-type", &chroma_type) ||
!gst_structure_get_int (structure, "width", &width) ||
!gst_structure_get_int (structure, "height", &height))
goto error;
parameter_values[0] = &width;
parameter_values[1] = &height;
parameter_values[2] = &chroma_type;
device = vpp->device = g_object_ref ((*outbuf)->device);
status =
device->vdp_video_mixer_create (device->device, n_features, features,
VDP_NUM_MIXER_PARAMETER, parameters, parameter_values, &vpp->mixer);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
("Could not create vdpau video mixer"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
goto error;
}
}
return ret;
error:
gst_buffer_unref (GST_BUFFER (*outbuf));
return GST_FLOW_ERROR;
}
static GstFlowReturn
gst_vdp_vpp_chain (GstPad * pad, GstBuffer * buffer)
{
GstVdpVideoPostProcess *vpp =
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
GstFlowReturn ret;
GstVdpOutputBuffer *outbuf;
GstStructure *structure;
GstVideoRectangle src_r, dest_r;
VdpRect rect;
GstVdpDevice *device;
VdpStatus status;
if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
GST_DEBUG_OBJECT (vpp, "Received discont buffer");
gst_vdp_vpp_flush (vpp);
}
ret =
gst_vdp_vpp_alloc_output_buffer (vpp, GST_PAD_CAPS (vpp->srcpad),
&outbuf);
if (ret != GST_FLOW_OK)
goto done;
structure = gst_caps_get_structure (GST_BUFFER_CAPS (buffer), 0);
if (!gst_structure_get_int (structure, "width", &src_r.w) ||
!gst_structure_get_int (structure, "height", &src_r.h))
goto invalid_caps;
structure = gst_caps_get_structure (GST_BUFFER_CAPS (outbuf), 0);
if (!gst_structure_get_int (structure, "width", &dest_r.w) ||
!gst_structure_get_int (structure, "height", &dest_r.h))
goto invalid_caps;
if (vpp->force_aspect_ratio) {
GstVideoRectangle res_r;
gst_video_sink_center_rect (src_r, dest_r, &res_r, TRUE);
rect.x0 = res_r.x;
rect.x1 = res_r.w + res_r.x;
rect.y0 = res_r.y;
rect.y1 = res_r.h + res_r.y;
} else {
rect.x0 = 0;
rect.x1 = dest_r.w;
rect.y0 = 0;
rect.y1 = dest_r.h;
}
device = vpp->device;
status = device->vdp_video_mixer_render (vpp->mixer, VDP_INVALID_HANDLE, NULL,
VDP_VIDEO_MIXER_PICTURE_STRUCTURE_FRAME, 0, NULL,
GST_VDP_VIDEO_BUFFER (buffer)->surface, 0, NULL, NULL, outbuf->surface,
NULL, &rect, 0, NULL);
if (status != VDP_STATUS_OK) {
GST_ELEMENT_ERROR (vpp, RESOURCE, READ,
("Could not post process frame"),
("Error returned from vdpau was: %s",
device->vdp_get_error_string (status)));
ret = GST_FLOW_ERROR;
goto done;
}
gst_buffer_copy_metadata (GST_BUFFER (outbuf), buffer,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
ret = gst_pad_push (vpp->srcpad, GST_BUFFER (outbuf));
done:
gst_buffer_unref (buffer);
gst_object_unref (vpp);
return ret;
invalid_caps:
gst_buffer_unref (GST_BUFFER (outbuf));
ret = GST_FLOW_ERROR;
goto done;
}
static GstCaps *
gst_vdp_vpp_sink_getcaps (GstPad * pad)
{
GstVdpVideoPostProcess *vpp =
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
GstCaps *caps;
if (vpp->device)
caps = gst_vdp_video_buffer_get_allowed_video_caps (vpp->device);
else
caps = gst_static_pad_template_get_caps (&sink_template);
gst_object_unref (vpp);
return caps;
}
static GstFlowReturn
gst_vdp_vpp_sink_bufferalloc (GstPad * pad, guint64 offset, guint size,
GstCaps * caps, GstBuffer ** buf)
{
GstVdpVideoPostProcess *vpp =
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
GstVdpOutputBuffer *outbuf;
GstFlowReturn ret = GST_FLOW_ERROR;
GstVdpDevice *device = NULL;
GstStructure *structure;
gint width, height;
gint chroma_type;
if (!vpp->device) {
/* if we haven't got a device yet we must alloc a buffer downstream to get it */
GstCaps *src_caps = gst_pad_get_allowed_caps (vpp->srcpad);
gst_pad_fixate_caps (vpp->srcpad, src_caps);
ret = gst_pad_alloc_buffer (vpp->srcpad, 0, 0, src_caps,
(GstBuffer **) & outbuf);
gst_caps_unref (src_caps);
if (ret != GST_FLOW_OK)
goto error;
device = outbuf->device;
gst_buffer_unref (GST_BUFFER (outbuf));
} else
device = vpp->device;
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "width", &width) ||
!gst_structure_get_int (structure, "height", &height) ||
!gst_structure_get_int (structure, "chroma-type", &chroma_type))
goto error;
*buf = GST_BUFFER (gst_vdp_video_buffer_new (device,
chroma_type, width, height));
if (*buf == NULL)
goto error;
GST_BUFFER_SIZE (*buf) = size;
GST_BUFFER_OFFSET (*buf) = offset;
gst_buffer_set_caps (*buf, caps);
ret = GST_FLOW_OK;
error:
gst_object_unref (vpp);
return ret;
}
static gboolean
gst_vdp_vpp_sink_event (GstPad * pad, GstEvent * event)
{
GstVdpVideoPostProcess *vpp =
GST_VDP_VIDEO_POST_PROCESS (gst_pad_get_parent (pad));
gboolean res;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_STOP:
{
GST_DEBUG_OBJECT (vpp, "flush stop");
gst_vdp_vpp_flush (vpp);
res = gst_pad_push_event (vpp->srcpad, event);
break;
}
default:
res = gst_pad_event_default (pad, event);
}
gst_object_unref (vpp);
return res;
}
static GstStateChangeReturn
gst_vdp_vpp_change_state (GstElement * element, GstStateChange transition)
{
GstVdpVideoPostProcess *vpp;
GstStateChangeReturn ret;
vpp = GST_VDP_VIDEO_POST_PROCESS (element);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_vdp_vpp_start (vpp);
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_vdp_vpp_stop (vpp);
break;
default:
break;
}
return ret;
}
/* GObject vmethod implementations */
static void
gst_vdp_vpp_get_property (GObject * object, guint property_id, GValue * value,
GParamSpec * pspec)
{
GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (object);
switch (property_id) {
case PROP_FORCE_ASPECT_RATIO:
g_value_set_boolean (value, vpp->force_aspect_ratio);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
/* GObject vmethod implementations */
static void
gst_vdp_vpp_set_property (GObject * object, guint property_id,
const GValue * value, GParamSpec * pspec)
{
GstVdpVideoPostProcess *vpp = GST_VDP_VIDEO_POST_PROCESS (object);
switch (property_id) {
case PROP_FORCE_ASPECT_RATIO:
vpp->force_aspect_ratio = g_value_get_boolean (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
break;
}
}
/* GType vmethod implementations */
static void
gst_vdp_vpp_base_init (gpointer gclass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
gst_element_class_set_details_simple (element_class,
"VDPAU Mpeg Decoder",
"Filter/Converter/Decoder/Video",
"Post process GstVdpVideoBuffers and output GstVdpOutputBuffers",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
}
/* initialize the vdpaumpegdecoder's class */
static void
gst_vdp_vpp_class_init (GstVdpVideoPostProcessClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
gobject_class->get_property = gst_vdp_vpp_get_property;
gobject_class->set_property = gst_vdp_vpp_set_property;
gobject_class->finalize = gst_vdp_vpp_finalize;
g_object_class_install_property (gobject_class, PROP_FORCE_ASPECT_RATIO,
g_param_spec_boolean ("force-aspect-ratio", "Force aspect ratio",
"When enabled, the plugin will only scale up the input surface to the"
"maximum size where the aspect ratio can be preserved", FALSE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gstelement_class->change_state = gst_vdp_vpp_change_state;
}
static void
gst_vdp_vpp_init (GstVdpVideoPostProcess * vpp,
GstVdpVideoPostProcessClass * gclass)
{
vpp->force_aspect_ratio = FALSE;
/* SRC PAD */
vpp->srcpad = gst_pad_new_from_static_template (&src_template, "src");
gst_element_add_pad (GST_ELEMENT (vpp), vpp->srcpad);
/* SINK PAD */
vpp->sinkpad = gst_pad_new_from_static_template (&sink_template, "sink");
gst_element_add_pad (GST_ELEMENT (vpp), vpp->sinkpad);
gst_pad_set_getcaps_function (vpp->sinkpad, gst_vdp_vpp_sink_getcaps);
gst_pad_set_setcaps_function (vpp->sinkpad, gst_vdp_vpp_set_caps);
gst_pad_set_chain_function (vpp->sinkpad,
GST_DEBUG_FUNCPTR (gst_vdp_vpp_chain));
gst_pad_set_event_function (vpp->sinkpad,
GST_DEBUG_FUNCPTR (gst_vdp_vpp_sink_event));
gst_pad_set_bufferalloc_function (vpp->sinkpad, gst_vdp_vpp_sink_bufferalloc);
}
static void
gst_vdp_vpp_finalize (GObject * object)
{
}

View file

@ -0,0 +1,60 @@
/*
* GStreamer
* Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_VDP_VIDEO_POST_PROCESS_H__
#define __GST_VDP_VIDEO_POST_PROCESS_H__
#include <gst/gst.h>
#include "gstvdpdevice.h"
G_BEGIN_DECLS
#define GST_TYPE_VDP_VIDEO_POST_PROCESS (gst_vdp_vpp_get_type())
#define GST_VDP_VIDEO_POST_PROCESS(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_VIDEO_POST_PROCESS,GstVdpVideoPostProcess))
#define GST_VDP_VIDEO_POST_PROCESS_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_VIDEO_POST_PROCESS,GstVdpVideoPostProcessClass))
#define GST_IS_VDP_VIDEO_POST_PROCESS(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_VIDEO_POST_PROCESS))
#define GST_IS_VDP_VIDEO_POST_PROCESS_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_VIDEO_POST_PROCESS))
typedef struct _GstVdpVideoPostProcess GstVdpVideoPostProcess;
typedef struct _GstVdpVideoPostProcessClass GstVdpVideoPostProcessClass;
struct _GstVdpVideoPostProcess
{
GstElement element;
GstPad *sinkpad, *srcpad;
GstVdpDevice *device;
VdpVideoMixer mixer;
gboolean force_aspect_ratio;
};
struct _GstVdpVideoPostProcessClass
{
GstElementClass element_class;
};
GType gst_vdp_vpp_get_type (void);
G_END_DECLS
#endif /* __GST_VDP_VIDEO_POST_PROCESS_H__ */

View file

@ -236,7 +236,9 @@ gst_vdp_video_yuv_transform (GstBaseTransform * trans, GstBuffer * inbuf,
break;
}
gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS);
gst_buffer_copy_metadata (outbuf, inbuf,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
GST_LOG_OBJECT (video_yuv, "Pushing buffer with ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
@ -322,7 +324,7 @@ gst_vdp_video_yuv_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (trans);
GstCaps *result;
GstCaps *result = NULL;
if (direction == GST_PAD_SINK)
result = gst_vdp_video_to_yuv_caps (caps, video_yuv->device);
@ -406,7 +408,7 @@ gst_vdp_video_yuv_base_init (gpointer klass)
gst_element_class_set_details_simple (element_class,
"VdpauVideoYUV",
"Covideo_yuv/Decoder/Video",
"Filter/Converter/Decoder/Video",
"VDPAU video surface to YUV",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");

View file

@ -233,7 +233,8 @@ gst_vdp_yuv_video_transform (GstBaseTransform * trans, GstBuffer * inbuf,
break;
}
gst_buffer_copy_metadata (outbuf, inbuf, GST_BUFFER_COPY_TIMESTAMPS);
gst_buffer_copy_metadata (outbuf, inbuf,
GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS);
return GST_FLOW_OK;
}
@ -262,7 +263,7 @@ gst_vdp_yuv_video_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (trans);
GstCaps *result;
GstCaps *result = NULL;
if (direction == GST_PAD_SINK) {
result = gst_vdp_yuv_to_video_caps (caps, yuv_video->device);
@ -305,7 +306,7 @@ gst_vdp_yuv_video_base_init (gpointer klass)
gst_element_class_set_details_simple (element_class,
"VdpauYUVVideo",
"Coyuv_video/Decoder/Video",
"Filter/Converter/Decoder/Video",
"VDPAU video surface to YUV",
"Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");