plugins: move up interfaces (cosmetics).

Move GstImplementsInterface and GstVideoContext support functions up
so that to keep a clear separation between the plugin element and its
interface hooks.
This commit is contained in:
Gwenole Beauchesne 2013-03-20 18:04:39 +01:00
parent 13c5d3244b
commit 7fd648b8b0
5 changed files with 150 additions and 189 deletions

View file

@ -88,11 +88,36 @@ static GstStaticPadTemplate gst_vaapidecode_src_factory =
GST_PAD_ALWAYS,
GST_STATIC_CAPS(gst_vaapidecode_src_caps_str));
static void
gst_vaapidecode_implements_iface_init(GstImplementsInterfaceClass *iface);
/* GstImplementsInterface interface */
static gboolean
gst_vaapidecode_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface);
gst_vaapidecode_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapidecode_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapidecode_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiDecode *decode = GST_VAAPIDECODE (context);
gst_vaapi_set_display (type, value, &decode->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapidecode_set_video_context;
}
#define GstVideoContextClass GstVideoContextInterface
G_DEFINE_TYPE_WITH_CODE(
@ -417,39 +442,6 @@ gst_vaapidecode_reset_full(GstVaapiDecode *decode, GstCaps *caps, gboolean hard)
return gst_vaapidecode_create(decode, caps);
}
/* GstImplementsInterface interface */
static gboolean
gst_vaapidecode_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_vaapidecode_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapidecode_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapidecode_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiDecode *decode = GST_VAAPIDECODE (context);
gst_vaapi_set_display (type, value, &decode->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapidecode_set_video_context;
}
static void
gst_vaapidecode_finalize(GObject *object)
{

View file

@ -99,11 +99,36 @@ struct _GstVaapiDownloadClass {
GstBaseTransformClass parent_class;
};
static void
gst_vaapidownload_implements_iface_init(GstImplementsInterfaceClass *iface);
/* GstImplementsInterface interface */
static gboolean
gst_vaapidownload_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface);
gst_vaapidownload_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapidownload_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapidownload_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiDownload *download = GST_VAAPIDOWNLOAD (context);
gst_vaapi_set_display (type, value, &download->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapidownload_set_video_context;
}
#define GstVideoContextClass GstVideoContextInterface
G_DEFINE_TYPE_WITH_CODE(
@ -161,39 +186,6 @@ gst_vaapidownload_query(
GstQuery *query
);
/* GstImplementsInterface interface */
static gboolean
gst_vaapidownload_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_vaapidownload_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapidownload_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapidownload_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiDownload *download = GST_VAAPIDOWNLOAD (context);
gst_vaapi_set_display (type, value, &download->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapidownload_set_video_context;
}
static void
gst_vaapidownload_destroy(GstVaapiDownload *download)
{

View file

@ -74,11 +74,40 @@ static GstStaticPadTemplate gst_vaapipostproc_src_factory =
GST_PAD_ALWAYS,
GST_STATIC_CAPS(gst_vaapipostproc_src_caps_str));
static void
gst_vaapipostproc_implements_iface_init(GstImplementsInterfaceClass *iface);
/* GstImplementsInterface interface */
static gboolean
gst_vaapipostproc_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface);
gst_vaapipostproc_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapipostproc_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapipostproc_set_video_context(
GstVideoContext *context,
const gchar *type,
const GValue *value
)
{
GstVaapiPostproc * const postproc = GST_VAAPIPOSTPROC(context);
gst_vaapi_set_display(type, value, &postproc->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapipostproc_set_video_context;
}
#define GstVideoContextClass GstVideoContextInterface
G_DEFINE_TYPE_WITH_CODE(
@ -161,43 +190,6 @@ get_vaapipostproc_from_pad(GstPad *pad)
return GST_VAAPIPOSTPROC(gst_pad_get_parent_element(pad));
}
/* GstImplementsInterface interface */
static gboolean
gst_vaapipostproc_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_vaapipostproc_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapipostproc_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapipostproc_set_video_context(
GstVideoContext *context,
const gchar *type,
const GValue *value
)
{
GstVaapiPostproc * const postproc = GST_VAAPIPOSTPROC(context);
gst_vaapi_set_display(type, value, &postproc->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapipostproc_set_video_context;
}
static inline gboolean
gst_vaapipostproc_ensure_display(GstVaapiPostproc *postproc)
{

View file

@ -85,11 +85,37 @@ static GstStaticPadTemplate gst_vaapisink_sink_factory =
GST_PAD_ALWAYS,
GST_STATIC_CAPS(gst_vaapisink_sink_caps_str));
static void
gst_vaapisink_implements_iface_init(GstImplementsInterfaceClass *iface);
/* GstImplementsInterface interface */
static gboolean
gst_vaapisink_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT ||
type == GST_TYPE_X_OVERLAY);
}
static void
gst_vaapisink_video_context_iface_init(GstVideoContextInterface *iface);
gst_vaapisink_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapisink_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapisink_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiSink *sink = GST_VAAPISINK (context);
gst_vaapi_set_display (type, value, &sink->display);
}
static void
gst_vaapisink_video_context_iface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapisink_set_video_context;
}
static void
gst_vaapisink_xoverlay_iface_init(GstXOverlayClass *iface);
@ -118,40 +144,6 @@ enum {
#define DEFAULT_DISPLAY_TYPE GST_VAAPI_DISPLAY_TYPE_ANY
#define DEFAULT_ROTATION GST_VAAPI_ROTATION_0
/* GstImplementsInterface interface */
static gboolean
gst_vaapisink_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT ||
type == GST_TYPE_X_OVERLAY);
}
static void
gst_vaapisink_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapisink_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapisink_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiSink *sink = GST_VAAPISINK (context);
gst_vaapi_set_display (type, value, &sink->display);
}
static void
gst_vaapisink_video_context_iface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapisink_set_video_context;
}
/* GstXOverlay interface */
#if USE_X11

View file

@ -75,11 +75,41 @@ static GstStaticPadTemplate gst_vaapiupload_src_factory =
GST_PAD_ALWAYS,
GST_STATIC_CAPS(gst_vaapiupload_vaapi_caps_str));
static void
gst_vaapiupload_implements_iface_init(GstImplementsInterfaceClass *iface);
/* GstImplementsInterface interface */
static gboolean
gst_vaapiupload_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface);
gst_vaapiupload_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapiupload_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapiupload_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiUpload * const upload = GST_VAAPIUPLOAD(context);
gst_vaapi_set_display(type, value, &upload->display);
if (upload->uploader)
gst_vaapi_uploader_ensure_display(upload->uploader, upload->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapiupload_set_video_context;
}
#define GstVideoContextClass GstVideoContextInterface
G_DEFINE_TYPE_WITH_CODE(
@ -149,43 +179,6 @@ gst_vaapiupload_query(
GstQuery *query
);
/* GstImplementsInterface interface */
static gboolean
gst_vaapiupload_implements_interface_supported(
GstImplementsInterface *iface,
GType type
)
{
return (type == GST_TYPE_VIDEO_CONTEXT);
}
static void
gst_vaapiupload_implements_iface_init(GstImplementsInterfaceClass *iface)
{
iface->supported = gst_vaapiupload_implements_interface_supported;
}
/* GstVideoContext interface */
static void
gst_vaapiupload_set_video_context(GstVideoContext *context, const gchar *type,
const GValue *value)
{
GstVaapiUpload * const upload = GST_VAAPIUPLOAD(context);
gst_vaapi_set_display(type, value, &upload->display);
if (upload->uploader)
gst_vaapi_uploader_ensure_display(upload->uploader, upload->display);
}
static void
gst_video_context_interface_init(GstVideoContextInterface *iface)
{
iface->set_context = gst_vaapiupload_set_video_context;
}
static void
gst_vaapiupload_destroy(GstVaapiUpload *upload)
{