mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 04:01:08 +00:00
plugins: re-indent common and video context creation utils.
This commit is contained in:
parent
c5581298fb
commit
b80257389d
4 changed files with 373 additions and 385 deletions
|
@ -41,364 +41,354 @@
|
|||
|
||||
/* Preferred first */
|
||||
static const char *display_types[] = {
|
||||
"gst-vaapi-display",
|
||||
"vaapi-display",
|
||||
"gst-vaapi-display",
|
||||
"vaapi-display",
|
||||
#if USE_WAYLAND
|
||||
"wl-display",
|
||||
"wl-display-name",
|
||||
"wl-display",
|
||||
"wl-display-name",
|
||||
#endif
|
||||
#if USE_X11
|
||||
"x11-display",
|
||||
"x11-display-name",
|
||||
"x11-display",
|
||||
"x11-display-name",
|
||||
#endif
|
||||
#if USE_DRM
|
||||
"drm-device",
|
||||
"drm-device-path",
|
||||
"drm-device",
|
||||
"drm-device-path",
|
||||
#endif
|
||||
NULL
|
||||
NULL
|
||||
};
|
||||
|
||||
typedef struct {
|
||||
const gchar *type_str;
|
||||
GstVaapiDisplayType type;
|
||||
GstVaapiDisplay * (*create_display)(const gchar *);
|
||||
typedef struct
|
||||
{
|
||||
const gchar *type_str;
|
||||
GstVaapiDisplayType type;
|
||||
GstVaapiDisplay *(*create_display) (const gchar *);
|
||||
} DisplayMap;
|
||||
|
||||
static const DisplayMap g_display_map[] = {
|
||||
#if USE_WAYLAND
|
||||
{ "wayland",
|
||||
GST_VAAPI_DISPLAY_TYPE_WAYLAND,
|
||||
gst_vaapi_display_wayland_new },
|
||||
{"wayland",
|
||||
GST_VAAPI_DISPLAY_TYPE_WAYLAND,
|
||||
gst_vaapi_display_wayland_new},
|
||||
#endif
|
||||
#if USE_GLX
|
||||
{ "glx",
|
||||
GST_VAAPI_DISPLAY_TYPE_GLX,
|
||||
gst_vaapi_display_glx_new },
|
||||
{"glx",
|
||||
GST_VAAPI_DISPLAY_TYPE_GLX,
|
||||
gst_vaapi_display_glx_new},
|
||||
#endif
|
||||
#if USE_X11
|
||||
{ "x11",
|
||||
GST_VAAPI_DISPLAY_TYPE_X11,
|
||||
gst_vaapi_display_x11_new },
|
||||
{"x11",
|
||||
GST_VAAPI_DISPLAY_TYPE_X11,
|
||||
gst_vaapi_display_x11_new},
|
||||
#endif
|
||||
#if USE_DRM
|
||||
{ "drm",
|
||||
GST_VAAPI_DISPLAY_TYPE_DRM,
|
||||
gst_vaapi_display_drm_new },
|
||||
{"drm",
|
||||
GST_VAAPI_DISPLAY_TYPE_DRM,
|
||||
gst_vaapi_display_drm_new},
|
||||
#endif
|
||||
{ NULL, }
|
||||
{NULL,}
|
||||
};
|
||||
|
||||
static GstVaapiDisplay *
|
||||
gst_vaapi_create_display(GstVaapiDisplayType display_type)
|
||||
gst_vaapi_create_display (GstVaapiDisplayType display_type)
|
||||
{
|
||||
GstVaapiDisplay *display = NULL;
|
||||
const DisplayMap *m;
|
||||
GstVaapiDisplay *display = NULL;
|
||||
const DisplayMap *m;
|
||||
|
||||
for (m = g_display_map; m->type_str != NULL; m++) {
|
||||
if (display_type != GST_VAAPI_DISPLAY_TYPE_ANY &&
|
||||
display_type != m->type)
|
||||
continue;
|
||||
for (m = g_display_map; m->type_str != NULL; m++) {
|
||||
if (display_type != GST_VAAPI_DISPLAY_TYPE_ANY && display_type != m->type)
|
||||
continue;
|
||||
|
||||
display = m->create_display(NULL);
|
||||
if (display || display_type != GST_VAAPI_DISPLAY_TYPE_ANY)
|
||||
break;
|
||||
}
|
||||
return display;
|
||||
display = m->create_display (NULL);
|
||||
if (display || display_type != GST_VAAPI_DISPLAY_TYPE_ANY)
|
||||
break;
|
||||
}
|
||||
return display;
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_vaapi_ensure_display(gpointer element, GstVaapiDisplayType type)
|
||||
gst_vaapi_ensure_display (gpointer element, GstVaapiDisplayType type)
|
||||
{
|
||||
GstVaapiPluginBase * const plugin = GST_VAAPI_PLUGIN_BASE(element);
|
||||
GstVaapiDisplay *display;
|
||||
GstVideoContext *context;
|
||||
GstVaapiPluginBase *const plugin = GST_VAAPI_PLUGIN_BASE (element);
|
||||
GstVaapiDisplay *display;
|
||||
GstVideoContext *context;
|
||||
|
||||
g_return_val_if_fail(GST_IS_VIDEO_CONTEXT(element), FALSE);
|
||||
g_return_val_if_fail (GST_IS_VIDEO_CONTEXT (element), FALSE);
|
||||
|
||||
context = GST_VIDEO_CONTEXT(element);
|
||||
g_return_val_if_fail(context != NULL, FALSE);
|
||||
context = GST_VIDEO_CONTEXT (element);
|
||||
g_return_val_if_fail (context != NULL, FALSE);
|
||||
|
||||
gst_vaapi_video_context_prepare(context, display_types);
|
||||
gst_vaapi_video_context_prepare (context, display_types);
|
||||
|
||||
/* Neighbour found and it updated the display */
|
||||
if (plugin->display && gst_vaapi_display_type_is_compatible(
|
||||
plugin->display_type, type))
|
||||
return TRUE;
|
||||
|
||||
/* If no neighboor, or application not interested, use system default */
|
||||
display = gst_vaapi_create_display(type);
|
||||
if (!display)
|
||||
return FALSE;
|
||||
|
||||
gst_vaapi_video_context_propagate(context, display);
|
||||
GST_VAAPI_PLUGIN_BASE_DISPLAY_REPLACE(plugin, display);
|
||||
gst_vaapi_display_unref(display);
|
||||
/* Neighbour found and it updated the display */
|
||||
if (plugin->display
|
||||
&& gst_vaapi_display_type_is_compatible (plugin->display_type, type))
|
||||
return TRUE;
|
||||
|
||||
/* If no neighboor, or application not interested, use system default */
|
||||
display = gst_vaapi_create_display (type);
|
||||
if (!display)
|
||||
return FALSE;
|
||||
|
||||
gst_vaapi_video_context_propagate (context, display);
|
||||
GST_VAAPI_PLUGIN_BASE_DISPLAY_REPLACE (plugin, display);
|
||||
gst_vaapi_display_unref (display);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
void
|
||||
gst_vaapi_set_display(
|
||||
const gchar *type,
|
||||
const GValue *value,
|
||||
GstVaapiDisplay **display
|
||||
)
|
||||
gst_vaapi_set_display (const gchar * type,
|
||||
const GValue * value, GstVaapiDisplay ** display_ptr)
|
||||
{
|
||||
GstVaapiDisplay *dpy = NULL;
|
||||
GstVaapiDisplay *display = NULL;
|
||||
|
||||
if (!strcmp(type, "vaapi-display")) {
|
||||
g_return_if_fail(G_VALUE_HOLDS_POINTER(value));
|
||||
dpy = gst_vaapi_display_new_with_display(g_value_get_pointer(value));
|
||||
}
|
||||
else if (!strcmp(type, "gst-vaapi-display")) {
|
||||
g_return_if_fail(G_VALUE_HOLDS_POINTER(value));
|
||||
dpy = gst_vaapi_display_ref(g_value_get_pointer(value));
|
||||
}
|
||||
if (!strcmp (type, "vaapi-display")) {
|
||||
g_return_if_fail (G_VALUE_HOLDS_POINTER (value));
|
||||
display = gst_vaapi_display_new_with_display (g_value_get_pointer (value));
|
||||
} else if (!strcmp (type, "gst-vaapi-display")) {
|
||||
g_return_if_fail (G_VALUE_HOLDS_POINTER (value));
|
||||
display = gst_vaapi_display_ref (g_value_get_pointer (value));
|
||||
}
|
||||
#if USE_DRM
|
||||
else if (!strcmp(type, "drm-device")) {
|
||||
gint device;
|
||||
g_return_if_fail(G_VALUE_HOLDS_INT(value));
|
||||
device = g_value_get_int(value);
|
||||
dpy = gst_vaapi_display_drm_new_with_device(device);
|
||||
}
|
||||
else if (!strcmp(type, "drm-device-path")) {
|
||||
const gchar *device_path;
|
||||
g_return_if_fail(G_VALUE_HOLDS_STRING(value));
|
||||
device_path = g_value_get_string(value);
|
||||
dpy = gst_vaapi_display_drm_new(device_path);
|
||||
}
|
||||
else if (!strcmp (type, "drm-device")) {
|
||||
gint device;
|
||||
g_return_if_fail (G_VALUE_HOLDS_INT (value));
|
||||
device = g_value_get_int (value);
|
||||
display = gst_vaapi_display_drm_new_with_device (device);
|
||||
} else if (!strcmp (type, "drm-device-path")) {
|
||||
const gchar *device_path;
|
||||
g_return_if_fail (G_VALUE_HOLDS_STRING (value));
|
||||
device_path = g_value_get_string (value);
|
||||
display = gst_vaapi_display_drm_new (device_path);
|
||||
}
|
||||
#endif
|
||||
#if USE_X11
|
||||
else if (!strcmp(type, "x11-display-name")) {
|
||||
g_return_if_fail(G_VALUE_HOLDS_STRING(value));
|
||||
else if (!strcmp (type, "x11-display-name")) {
|
||||
g_return_if_fail (G_VALUE_HOLDS_STRING (value));
|
||||
#if USE_GLX
|
||||
dpy = gst_vaapi_display_glx_new(g_value_get_string(value));
|
||||
display = gst_vaapi_display_glx_new (g_value_get_string (value));
|
||||
#endif
|
||||
if (!dpy)
|
||||
dpy = gst_vaapi_display_x11_new(g_value_get_string(value));
|
||||
}
|
||||
else if (!strcmp(type, "x11-display")) {
|
||||
g_return_if_fail(G_VALUE_HOLDS_POINTER(value));
|
||||
if (!display)
|
||||
display = gst_vaapi_display_x11_new (g_value_get_string (value));
|
||||
} else if (!strcmp (type, "x11-display")) {
|
||||
g_return_if_fail (G_VALUE_HOLDS_POINTER (value));
|
||||
#if USE_GLX
|
||||
dpy = gst_vaapi_display_glx_new_with_display(g_value_get_pointer(value));
|
||||
display =
|
||||
gst_vaapi_display_glx_new_with_display (g_value_get_pointer (value));
|
||||
#endif
|
||||
if (!dpy)
|
||||
dpy = gst_vaapi_display_x11_new_with_display(g_value_get_pointer(value));
|
||||
}
|
||||
if (!display)
|
||||
display =
|
||||
gst_vaapi_display_x11_new_with_display (g_value_get_pointer (value));
|
||||
}
|
||||
#endif
|
||||
#if USE_WAYLAND
|
||||
else if (!strcmp(type, "wl-display")) {
|
||||
struct wl_display *wl_display;
|
||||
g_return_if_fail(G_VALUE_HOLDS_POINTER(value));
|
||||
wl_display = g_value_get_pointer(value);
|
||||
dpy = gst_vaapi_display_wayland_new_with_display(wl_display);
|
||||
}
|
||||
else if (!strcmp(type, "wl-display-name")) {
|
||||
const gchar *display_name;
|
||||
g_return_if_fail(G_VALUE_HOLDS_STRING(value));
|
||||
display_name = g_value_get_string(value);
|
||||
dpy = gst_vaapi_display_wayland_new(display_name);
|
||||
}
|
||||
else if (!strcmp (type, "wl-display")) {
|
||||
struct wl_display *wl_display;
|
||||
g_return_if_fail (G_VALUE_HOLDS_POINTER (value));
|
||||
wl_display = g_value_get_pointer (value);
|
||||
display = gst_vaapi_display_wayland_new_with_display (wl_display);
|
||||
} else if (!strcmp (type, "wl-display-name")) {
|
||||
const gchar *display_name;
|
||||
g_return_if_fail (G_VALUE_HOLDS_STRING (value));
|
||||
display_name = g_value_get_string (value);
|
||||
display = gst_vaapi_display_wayland_new (display_name);
|
||||
}
|
||||
#endif
|
||||
|
||||
if (dpy) {
|
||||
gst_vaapi_display_replace(display, dpy);
|
||||
gst_vaapi_display_unref(dpy);
|
||||
}
|
||||
if (display) {
|
||||
gst_vaapi_display_replace (display_ptr, display);
|
||||
gst_vaapi_display_unref (display);
|
||||
}
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_vaapi_reply_to_query(GstQuery *query, GstVaapiDisplay *display)
|
||||
gst_vaapi_reply_to_query (GstQuery * query, GstVaapiDisplay * display)
|
||||
{
|
||||
#if GST_CHECK_VERSION(1,1,0)
|
||||
const gchar *type = NULL;
|
||||
GstContext *context;
|
||||
const gchar *type = NULL;
|
||||
GstContext *context;
|
||||
|
||||
if (GST_QUERY_TYPE(query) != GST_QUERY_CONTEXT)
|
||||
return FALSE;
|
||||
if (GST_QUERY_TYPE (query) != GST_QUERY_CONTEXT)
|
||||
return FALSE;
|
||||
|
||||
if (!display)
|
||||
return FALSE;
|
||||
if (!display)
|
||||
return FALSE;
|
||||
|
||||
if (!gst_query_parse_context_type(query, &type))
|
||||
return FALSE;
|
||||
if (!gst_query_parse_context_type (query, &type))
|
||||
return FALSE;
|
||||
|
||||
if (g_strcmp0(type, GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME))
|
||||
return FALSE;
|
||||
if (g_strcmp0 (type, GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME))
|
||||
return FALSE;
|
||||
|
||||
context = gst_vaapi_video_context_new_with_display(display, FALSE);
|
||||
gst_query_set_context(query, context);
|
||||
gst_context_unref(context);
|
||||
context = gst_vaapi_video_context_new_with_display (display, FALSE);
|
||||
gst_query_set_context (query, context);
|
||||
gst_context_unref (context);
|
||||
|
||||
return TRUE;
|
||||
return TRUE;
|
||||
#else
|
||||
GstVaapiDisplayType display_type;
|
||||
const gchar **types;
|
||||
const gchar *type;
|
||||
gint i;
|
||||
gboolean res = FALSE;
|
||||
GstVaapiDisplayType display_type;
|
||||
const gchar **types;
|
||||
const gchar *type;
|
||||
gint i;
|
||||
gboolean res = FALSE;
|
||||
|
||||
if (GST_QUERY_TYPE(query) != GST_QUERY_CUSTOM)
|
||||
return FALSE;
|
||||
if (GST_QUERY_TYPE (query) != GST_QUERY_CUSTOM)
|
||||
return FALSE;
|
||||
|
||||
if (!display)
|
||||
return FALSE;
|
||||
if (!display)
|
||||
return FALSE;
|
||||
|
||||
types = gst_video_context_query_get_supported_types(query);
|
||||
types = gst_video_context_query_get_supported_types (query);
|
||||
|
||||
if (!types)
|
||||
return FALSE;
|
||||
if (!types)
|
||||
return FALSE;
|
||||
|
||||
display_type = gst_vaapi_display_get_display_type(display);
|
||||
for (i = 0; types[i] && !res; i++) {
|
||||
type = types[i];
|
||||
display_type = gst_vaapi_display_get_display_type (display);
|
||||
for (i = 0; types[i] && !res; i++) {
|
||||
type = types[i];
|
||||
|
||||
res = TRUE;
|
||||
if (!strcmp(type, "gst-vaapi-display")) {
|
||||
gst_video_context_query_set_pointer(query, type, display);
|
||||
}
|
||||
else if (!strcmp(type, "vaapi-display")) {
|
||||
VADisplay vadpy = gst_vaapi_display_get_display(display);
|
||||
gst_video_context_query_set_pointer(query, type, vadpy);
|
||||
}
|
||||
else {
|
||||
switch (display_type) {
|
||||
res = TRUE;
|
||||
if (!strcmp (type, "gst-vaapi-display")) {
|
||||
gst_video_context_query_set_pointer (query, type, display);
|
||||
} else if (!strcmp (type, "vaapi-display")) {
|
||||
VADisplay vadpy = gst_vaapi_display_get_display (display);
|
||||
gst_video_context_query_set_pointer (query, type, vadpy);
|
||||
} else {
|
||||
switch (display_type) {
|
||||
#if USE_DRM
|
||||
case GST_VAAPI_DISPLAY_TYPE_DRM: {
|
||||
GstVaapiDisplayDRM * const drm_dpy =
|
||||
GST_VAAPI_DISPLAY_DRM(display);
|
||||
if (!strcmp(type, "drm-device-path"))
|
||||
gst_video_context_query_set_string(query, type,
|
||||
gst_vaapi_display_drm_get_device_path(drm_dpy));
|
||||
case GST_VAAPI_DISPLAY_TYPE_DRM:{
|
||||
GstVaapiDisplayDRM *const drm_dpy = GST_VAAPI_DISPLAY_DRM (display);
|
||||
if (!strcmp (type, "drm-device-path"))
|
||||
gst_video_context_query_set_string (query, type,
|
||||
gst_vaapi_display_drm_get_device_path (drm_dpy));
|
||||
#if 0
|
||||
/* XXX: gst_video_context_query_set_int() does not exist yet */
|
||||
else if (!strcmp(type, "drm-device"))
|
||||
gst_video_context_query_set_int(query, type,
|
||||
gst_vaapi_display_drm_get_device(drm_dpy));
|
||||
/* XXX: gst_video_context_query_set_int() does not exist yet */
|
||||
else if (!strcmp (type, "drm-device"))
|
||||
gst_video_context_query_set_int (query, type,
|
||||
gst_vaapi_display_drm_get_device (drm_dpy));
|
||||
#endif
|
||||
else
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
else
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#if USE_X11
|
||||
#if USE_GLX
|
||||
case GST_VAAPI_DISPLAY_TYPE_GLX:
|
||||
case GST_VAAPI_DISPLAY_TYPE_GLX:
|
||||
#endif
|
||||
case GST_VAAPI_DISPLAY_TYPE_X11: {
|
||||
GstVaapiDisplayX11 * const xvadpy =
|
||||
GST_VAAPI_DISPLAY_X11(display);
|
||||
Display * const x11dpy =
|
||||
gst_vaapi_display_x11_get_display(xvadpy);
|
||||
if (!strcmp(type, "x11-display"))
|
||||
gst_video_context_query_set_pointer(query, type, x11dpy);
|
||||
else if (!strcmp(type, "x11-display-name"))
|
||||
gst_video_context_query_set_string(query, type,
|
||||
DisplayString(x11dpy));
|
||||
else
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
case GST_VAAPI_DISPLAY_TYPE_X11:{
|
||||
GstVaapiDisplayX11 *const xvadpy = GST_VAAPI_DISPLAY_X11 (display);
|
||||
Display *const x11dpy = gst_vaapi_display_x11_get_display (xvadpy);
|
||||
if (!strcmp (type, "x11-display"))
|
||||
gst_video_context_query_set_pointer (query, type, x11dpy);
|
||||
else if (!strcmp (type, "x11-display-name"))
|
||||
gst_video_context_query_set_string (query, type,
|
||||
DisplayString (x11dpy));
|
||||
else
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
#if USE_WAYLAND
|
||||
case GST_VAAPI_DISPLAY_TYPE_WAYLAND: {
|
||||
GstVaapiDisplayWayland * const wlvadpy =
|
||||
GST_VAAPI_DISPLAY_WAYLAND(display);
|
||||
struct wl_display * const wldpy =
|
||||
gst_vaapi_display_wayland_get_display(wlvadpy);
|
||||
if (!strcmp(type, "wl-display"))
|
||||
gst_video_context_query_set_pointer(query, type, wldpy);
|
||||
else
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
case GST_VAAPI_DISPLAY_TYPE_WAYLAND:{
|
||||
GstVaapiDisplayWayland *const wlvadpy =
|
||||
GST_VAAPI_DISPLAY_WAYLAND (display);
|
||||
struct wl_display *const wldpy =
|
||||
gst_vaapi_display_wayland_get_display (wlvadpy);
|
||||
if (!strcmp (type, "wl-display"))
|
||||
gst_video_context_query_set_pointer (query, type, wldpy);
|
||||
else
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
default:
|
||||
res = FALSE;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
||||
return res;
|
||||
#endif /* !GST_CHECK_VERSION(1,1,0) */
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_vaapi_append_surface_caps(GstCaps *out_caps, GstCaps *in_caps)
|
||||
gst_vaapi_append_surface_caps (GstCaps * out_caps, GstCaps * in_caps)
|
||||
{
|
||||
GstStructure *structure;
|
||||
const GValue *v_width, *v_height, *v_framerate, *v_par;
|
||||
guint i, n_structures;
|
||||
GstStructure *structure;
|
||||
const GValue *v_width, *v_height, *v_framerate, *v_par;
|
||||
guint i, n_structures;
|
||||
|
||||
structure = gst_caps_get_structure(in_caps, 0);
|
||||
v_width = gst_structure_get_value(structure, "width");
|
||||
v_height = gst_structure_get_value(structure, "height");
|
||||
v_framerate = gst_structure_get_value(structure, "framerate");
|
||||
v_par = gst_structure_get_value(structure, "pixel-aspect-ratio");
|
||||
if (!v_width || !v_height)
|
||||
return FALSE;
|
||||
structure = gst_caps_get_structure (in_caps, 0);
|
||||
v_width = gst_structure_get_value (structure, "width");
|
||||
v_height = gst_structure_get_value (structure, "height");
|
||||
v_framerate = gst_structure_get_value (structure, "framerate");
|
||||
v_par = gst_structure_get_value (structure, "pixel-aspect-ratio");
|
||||
if (!v_width || !v_height)
|
||||
return FALSE;
|
||||
|
||||
n_structures = gst_caps_get_size(out_caps);
|
||||
for (i = 0; i < n_structures; i++) {
|
||||
structure = gst_caps_get_structure(out_caps, i);
|
||||
gst_structure_set_value(structure, "width", v_width);
|
||||
gst_structure_set_value(structure, "height", v_height);
|
||||
if (v_framerate)
|
||||
gst_structure_set_value(structure, "framerate", v_framerate);
|
||||
if (v_par)
|
||||
gst_structure_set_value(structure, "pixel-aspect-ratio", v_par);
|
||||
}
|
||||
n_structures = gst_caps_get_size (out_caps);
|
||||
for (i = 0; i < n_structures; i++) {
|
||||
structure = gst_caps_get_structure (out_caps, i);
|
||||
gst_structure_set_value (structure, "width", v_width);
|
||||
gst_structure_set_value (structure, "height", v_height);
|
||||
if (v_framerate)
|
||||
gst_structure_set_value (structure, "framerate", v_framerate);
|
||||
if (v_par)
|
||||
gst_structure_set_value (structure, "pixel-aspect-ratio", v_par);
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_vaapi_apply_composition (GstVaapiSurface * surface, GstBuffer * buffer)
|
||||
{
|
||||
#if GST_CHECK_VERSION(1,0,0)
|
||||
GstVideoOverlayCompositionMeta *const cmeta =
|
||||
gst_buffer_get_video_overlay_composition_meta (buffer);
|
||||
GstVideoOverlayComposition *composition;
|
||||
|
||||
if (!cmeta)
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_vaapi_apply_composition(GstVaapiSurface *surface, GstBuffer *buffer)
|
||||
{
|
||||
#if GST_CHECK_VERSION(1,0,0)
|
||||
GstVideoOverlayCompositionMeta * const cmeta =
|
||||
gst_buffer_get_video_overlay_composition_meta(buffer);
|
||||
GstVideoOverlayComposition *composition;
|
||||
|
||||
if (!cmeta)
|
||||
return TRUE;
|
||||
composition = cmeta->overlay;
|
||||
composition = cmeta->overlay;
|
||||
#else
|
||||
GstVideoOverlayComposition * const composition =
|
||||
gst_video_buffer_get_overlay_composition(buffer);
|
||||
GstVideoOverlayComposition *const composition =
|
||||
gst_video_buffer_get_overlay_composition (buffer);
|
||||
#endif
|
||||
if (!composition)
|
||||
return TRUE;
|
||||
return gst_vaapi_surface_set_subpictures_from_composition(surface,
|
||||
composition, TRUE);
|
||||
if (!composition)
|
||||
return TRUE;
|
||||
return gst_vaapi_surface_set_subpictures_from_composition (surface,
|
||||
composition, TRUE);
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_caps_set_interlaced(GstCaps *caps, GstVideoInfo *vip)
|
||||
gst_caps_set_interlaced (GstCaps * caps, GstVideoInfo * vip)
|
||||
{
|
||||
#if GST_CHECK_VERSION(1,0,0)
|
||||
GstVideoInterlaceMode mode;
|
||||
const gchar *mode_str;
|
||||
GstVideoInterlaceMode mode;
|
||||
const gchar *mode_str;
|
||||
|
||||
mode = vip ? GST_VIDEO_INFO_INTERLACE_MODE(vip) :
|
||||
GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
|
||||
switch (mode) {
|
||||
mode = vip ? GST_VIDEO_INFO_INTERLACE_MODE (vip) :
|
||||
GST_VIDEO_INTERLACE_MODE_PROGRESSIVE;
|
||||
switch (mode) {
|
||||
case GST_VIDEO_INTERLACE_MODE_PROGRESSIVE:
|
||||
mode_str = "progressive";
|
||||
break;
|
||||
mode_str = "progressive";
|
||||
break;
|
||||
case GST_VIDEO_INTERLACE_MODE_INTERLEAVED:
|
||||
mode_str = "interleaved";
|
||||
break;
|
||||
mode_str = "interleaved";
|
||||
break;
|
||||
case GST_VIDEO_INTERLACE_MODE_MIXED:
|
||||
mode_str = "mixed";
|
||||
break;
|
||||
mode_str = "mixed";
|
||||
break;
|
||||
default:
|
||||
GST_ERROR("unsupported `interlace-mode' %d", mode);
|
||||
return FALSE;
|
||||
}
|
||||
GST_ERROR ("unsupported `interlace-mode' %d", mode);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
gst_caps_set_simple(caps, "interlace-mode", G_TYPE_STRING, mode_str, NULL);
|
||||
gst_caps_set_simple (caps, "interlace-mode", G_TYPE_STRING, mode_str, NULL);
|
||||
#else
|
||||
gst_caps_set_simple(caps, "interlaced", G_TYPE_BOOLEAN,
|
||||
vip ? GST_VIDEO_INFO_IS_INTERLACED(vip) : FALSE, NULL);
|
||||
gst_caps_set_simple (caps, "interlaced", G_TYPE_BOOLEAN,
|
||||
vip ? GST_VIDEO_INFO_IS_INTERLACED (vip) : FALSE, NULL);
|
||||
#endif
|
||||
return TRUE;
|
||||
return TRUE;
|
||||
}
|
||||
|
|
|
@ -30,27 +30,24 @@
|
|||
|
||||
G_GNUC_INTERNAL
|
||||
gboolean
|
||||
gst_vaapi_ensure_display(gpointer element, GstVaapiDisplayType type);
|
||||
gst_vaapi_ensure_display (gpointer element, GstVaapiDisplayType type);
|
||||
|
||||
G_GNUC_INTERNAL
|
||||
void
|
||||
gst_vaapi_set_display(
|
||||
const gchar *type,
|
||||
const GValue *value,
|
||||
GstVaapiDisplay **display
|
||||
);
|
||||
gst_vaapi_set_display (const gchar * type,
|
||||
const GValue * value, GstVaapiDisplay ** display_ptr);
|
||||
|
||||
G_GNUC_INTERNAL
|
||||
gboolean
|
||||
gst_vaapi_reply_to_query(GstQuery *query, GstVaapiDisplay *display);
|
||||
gst_vaapi_reply_to_query (GstQuery * query, GstVaapiDisplay * display);
|
||||
|
||||
G_GNUC_INTERNAL
|
||||
gboolean
|
||||
gst_vaapi_append_surface_caps (GstCaps *out_caps, GstCaps *in_caps);
|
||||
gst_vaapi_append_surface_caps (GstCaps * out_caps, GstCaps * in_caps);
|
||||
|
||||
G_GNUC_INTERNAL
|
||||
gboolean
|
||||
gst_vaapi_apply_composition(GstVaapiSurface *surface, GstBuffer *buffer);
|
||||
gst_vaapi_apply_composition (GstVaapiSurface * surface, GstBuffer * buffer);
|
||||
|
||||
#ifndef G_PRIMITIVE_SWAP
|
||||
#define G_PRIMITIVE_SWAP(type, a, b) do { \
|
||||
|
@ -73,6 +70,6 @@ gst_vaapi_apply_composition(GstVaapiSurface *surface, GstBuffer *buffer);
|
|||
|
||||
G_GNUC_INTERNAL
|
||||
gboolean
|
||||
gst_caps_set_interlaced(GstCaps *caps, GstVideoInfo *vip);
|
||||
gst_caps_set_interlaced (GstCaps * caps, GstVideoInfo * vip);
|
||||
|
||||
#endif /* GST_VAAPI_PLUGIN_UTIL_H */
|
||||
|
|
|
@ -28,154 +28,154 @@
|
|||
|
||||
#if GST_CHECK_VERSION(1,1,0)
|
||||
|
||||
GST_DEBUG_CATEGORY_STATIC(GST_CAT_CONTEXT);
|
||||
GST_DEBUG_CATEGORY_STATIC (GST_CAT_CONTEXT);
|
||||
|
||||
#define GST_VAAPI_TYPE_DISPLAY \
|
||||
gst_vaapi_display_get_type()
|
||||
|
||||
GType
|
||||
gst_vaapi_display_get_type(void) G_GNUC_CONST;
|
||||
gst_vaapi_display_get_type (void)
|
||||
G_GNUC_CONST;
|
||||
|
||||
G_DEFINE_BOXED_TYPE(GstVaapiDisplay, gst_vaapi_display,
|
||||
(GBoxedCopyFunc)gst_vaapi_display_ref,
|
||||
(GBoxedFreeFunc)gst_vaapi_display_unref)
|
||||
G_DEFINE_BOXED_TYPE (GstVaapiDisplay, gst_vaapi_display,
|
||||
(GBoxedCopyFunc) gst_vaapi_display_ref,
|
||||
(GBoxedFreeFunc) gst_vaapi_display_unref)
|
||||
|
||||
GstContext *
|
||||
gst_vaapi_video_context_new_with_display(GstVaapiDisplay *display,
|
||||
gboolean persistent)
|
||||
GstContext *gst_vaapi_video_context_new_with_display (GstVaapiDisplay *
|
||||
display, gboolean persistent)
|
||||
{
|
||||
GstContext *context;
|
||||
GstStructure *structure;
|
||||
GstContext *context;
|
||||
GstStructure *structure;
|
||||
|
||||
context = gst_context_new(GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME, persistent);
|
||||
structure = gst_context_writable_structure(context);
|
||||
gst_structure_set(structure, "display", GST_VAAPI_TYPE_DISPLAY,
|
||||
display, NULL);
|
||||
return context;
|
||||
context = gst_context_new (GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME, persistent);
|
||||
structure = gst_context_writable_structure (context);
|
||||
gst_structure_set (structure, "display", GST_VAAPI_TYPE_DISPLAY,
|
||||
display, NULL);
|
||||
return context;
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_vaapi_video_context_get_display(GstContext *context,
|
||||
GstVaapiDisplay **display_ptr)
|
||||
gst_vaapi_video_context_get_display (GstContext * context,
|
||||
GstVaapiDisplay ** display_ptr)
|
||||
{
|
||||
const GstStructure *structure;
|
||||
const GstStructure *structure;
|
||||
|
||||
g_return_val_if_fail(GST_IS_CONTEXT(context), FALSE);
|
||||
g_return_val_if_fail(g_strcmp0(gst_context_get_context_type(context),
|
||||
GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME) == 0, FALSE);
|
||||
g_return_val_if_fail (GST_IS_CONTEXT (context), FALSE);
|
||||
g_return_val_if_fail (g_strcmp0 (gst_context_get_context_type (context),
|
||||
GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME) == 0, FALSE);
|
||||
|
||||
structure = gst_context_get_structure(context);
|
||||
return gst_structure_get(structure, "display", GST_VAAPI_TYPE_DISPLAY,
|
||||
display_ptr, NULL);
|
||||
structure = gst_context_get_structure (context);
|
||||
return gst_structure_get (structure, "display", GST_VAAPI_TYPE_DISPLAY,
|
||||
display_ptr, NULL);
|
||||
}
|
||||
|
||||
static gboolean
|
||||
context_pad_query(const GValue *item, GValue *value, gpointer user_data)
|
||||
context_pad_query (const GValue * item, GValue * value, gpointer user_data)
|
||||
{
|
||||
GstPad * const pad = g_value_get_object(item);
|
||||
GstQuery * const query = user_data;
|
||||
GstPad *const pad = g_value_get_object (item);
|
||||
GstQuery *const query = user_data;
|
||||
|
||||
if (gst_pad_peer_query(pad, query)) {
|
||||
g_value_set_boolean(value, TRUE);
|
||||
return FALSE;
|
||||
}
|
||||
if (gst_pad_peer_query (pad, query)) {
|
||||
g_value_set_boolean (value, TRUE);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
GST_CAT_INFO_OBJECT(GST_CAT_CONTEXT, pad, "context pad peer query failed");
|
||||
GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, pad, "context pad peer query failed");
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
run_context_query (GstElement * element, GstQuery * query)
|
||||
{
|
||||
GstIteratorFoldFunction const func = context_pad_query;
|
||||
GstIterator *it;
|
||||
GValue res = { 0 };
|
||||
|
||||
g_value_init (&res, G_TYPE_BOOLEAN);
|
||||
g_value_set_boolean (&res, FALSE);
|
||||
|
||||
/* Ask downstream neighbour */
|
||||
it = gst_element_iterate_src_pads (element);
|
||||
while (gst_iterator_fold (it, func, &res, query) == GST_ITERATOR_RESYNC)
|
||||
gst_iterator_resync (it);
|
||||
gst_iterator_free (it);
|
||||
|
||||
if (g_value_get_boolean (&res))
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
run_context_query(GstElement *element, GstQuery *query)
|
||||
{
|
||||
GstIteratorFoldFunction const func = context_pad_query;
|
||||
GstIterator *it;
|
||||
GValue res = { 0 };
|
||||
/* If none, ask upstream neighbour (auto-plugged case) */
|
||||
it = gst_element_iterate_sink_pads (element);
|
||||
while (gst_iterator_fold (it, func, &res, query) == GST_ITERATOR_RESYNC)
|
||||
gst_iterator_resync (it);
|
||||
gst_iterator_free (it);
|
||||
|
||||
g_value_init(&res, G_TYPE_BOOLEAN);
|
||||
g_value_set_boolean(&res, FALSE);
|
||||
|
||||
/* Ask downstream neighbour */
|
||||
it = gst_element_iterate_src_pads(element);
|
||||
while (gst_iterator_fold(it, func, &res, query) == GST_ITERATOR_RESYNC)
|
||||
gst_iterator_resync(it);
|
||||
gst_iterator_free(it);
|
||||
|
||||
if (g_value_get_boolean(&res))
|
||||
return TRUE;
|
||||
|
||||
/* If none, ask upstream neighbour (auto-plugged case) */
|
||||
it = gst_element_iterate_sink_pads(element);
|
||||
while (gst_iterator_fold(it, func, &res, query) == GST_ITERATOR_RESYNC)
|
||||
gst_iterator_resync(it);
|
||||
gst_iterator_free(it);
|
||||
|
||||
return g_value_get_boolean(&res);
|
||||
return g_value_get_boolean (&res);
|
||||
}
|
||||
|
||||
void
|
||||
gst_vaapi_video_context_prepare(GstElement *element, const gchar **types)
|
||||
gst_vaapi_video_context_prepare (GstElement * element, const gchar ** types)
|
||||
{
|
||||
GstContext *context;
|
||||
GstQuery *query;
|
||||
GstMessage *msg;
|
||||
GstContext *context;
|
||||
GstQuery *query;
|
||||
GstMessage *msg;
|
||||
|
||||
if (!GST_CAT_CONTEXT)
|
||||
GST_DEBUG_CATEGORY_GET(GST_CAT_CONTEXT, "GST_CONTEXT");
|
||||
if (!GST_CAT_CONTEXT)
|
||||
GST_DEBUG_CATEGORY_GET (GST_CAT_CONTEXT, "GST_CONTEXT");
|
||||
|
||||
/* 1) Check if the element already has a context of the specific
|
||||
* type, i.e. it was previously set via
|
||||
* gst_element_set_context(). */
|
||||
/* This was already done by the caller of this function:
|
||||
* gst_vaapi_ensure_display() */
|
||||
/* 1) Check if the element already has a context of the specific
|
||||
* type, i.e. it was previously set via
|
||||
* gst_element_set_context(). */
|
||||
/* This was already done by the caller of this function:
|
||||
* gst_vaapi_ensure_display() */
|
||||
|
||||
/* 2) Query downstream with GST_QUERY_CONTEXT for the context and
|
||||
check if downstream already has a context of the specific
|
||||
type */
|
||||
/* 3) Query upstream with GST_QUERY_CONTEXT for the context and
|
||||
check if upstream already has a context of the specific
|
||||
type */
|
||||
context = NULL;
|
||||
query = gst_query_new_context(GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME);
|
||||
if (run_context_query(element, query)) {
|
||||
gst_query_parse_context(query, &context);
|
||||
GST_CAT_INFO_OBJECT(GST_CAT_CONTEXT, element,
|
||||
"found context (%p) in query", context);
|
||||
gst_element_set_context(element, context);
|
||||
}
|
||||
else {
|
||||
/* 4) Post a GST_MESSAGE_NEED_CONTEXT message on the bus with
|
||||
the required context types and afterwards check if an
|
||||
usable context was set now as in 1). The message could
|
||||
be handled by the parent bins of the element and the
|
||||
application. */
|
||||
GST_CAT_INFO_OBJECT(GST_CAT_CONTEXT, element,
|
||||
"posting `need-context' message");
|
||||
msg = gst_message_new_need_context(GST_OBJECT_CAST(element),
|
||||
GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME);
|
||||
gst_element_post_message(element, msg);
|
||||
/* 2) Query downstream with GST_QUERY_CONTEXT for the context and
|
||||
check if downstream already has a context of the specific
|
||||
type */
|
||||
/* 3) Query upstream with GST_QUERY_CONTEXT for the context and
|
||||
check if upstream already has a context of the specific
|
||||
type */
|
||||
context = NULL;
|
||||
query = gst_query_new_context (GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME);
|
||||
if (run_context_query (element, query)) {
|
||||
gst_query_parse_context (query, &context);
|
||||
GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element,
|
||||
"found context (%p) in query", context);
|
||||
gst_element_set_context (element, context);
|
||||
} else {
|
||||
/* 4) Post a GST_MESSAGE_NEED_CONTEXT message on the bus with
|
||||
the required context types and afterwards check if an
|
||||
usable context was set now as in 1). The message could
|
||||
be handled by the parent bins of the element and the
|
||||
application. */
|
||||
GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element,
|
||||
"posting `need-context' message");
|
||||
msg = gst_message_new_need_context (GST_OBJECT_CAST (element),
|
||||
GST_VAAPI_DISPLAY_CONTEXT_TYPE_NAME);
|
||||
gst_element_post_message (element, msg);
|
||||
|
||||
/* The check of an usable context is done by the caller:
|
||||
gst_vaapi_ensure_display() */
|
||||
}
|
||||
/* The check of an usable context is done by the caller:
|
||||
gst_vaapi_ensure_display() */
|
||||
}
|
||||
|
||||
gst_query_unref(query);
|
||||
gst_query_unref (query);
|
||||
}
|
||||
|
||||
/* 5) Create a context by itself and post a GST_MESSAGE_HAVE_CONTEXT message
|
||||
on the bus. */
|
||||
void
|
||||
gst_vaapi_video_context_propagate(GstElement *element, GstVaapiDisplay *display)
|
||||
gst_vaapi_video_context_propagate (GstElement * element,
|
||||
GstVaapiDisplay * display)
|
||||
{
|
||||
GstContext *context;
|
||||
GstMessage *msg;
|
||||
GstContext *context;
|
||||
GstMessage *msg;
|
||||
|
||||
context = gst_vaapi_video_context_new_with_display(display, FALSE);
|
||||
context = gst_vaapi_video_context_new_with_display (display, FALSE);
|
||||
|
||||
GST_CAT_INFO_OBJECT(GST_CAT_CONTEXT, element,
|
||||
"posting `have-context' (%p) message with display (%p)",
|
||||
context, display);
|
||||
msg = gst_message_new_have_context(GST_OBJECT_CAST(element), context);
|
||||
gst_element_post_message(GST_ELEMENT_CAST(element), msg);
|
||||
GST_CAT_INFO_OBJECT (GST_CAT_CONTEXT, element,
|
||||
"posting `have-context' (%p) message with display (%p)",
|
||||
context, display);
|
||||
msg = gst_message_new_have_context (GST_OBJECT_CAST (element), context);
|
||||
gst_element_post_message (GST_ELEMENT_CAST (element), msg);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -41,35 +41,36 @@
|
|||
|
||||
G_GNUC_INTERNAL
|
||||
GstContext *
|
||||
gst_vaapi_video_context_new_with_display(GstVaapiDisplay *display,
|
||||
gst_vaapi_video_context_new_with_display (GstVaapiDisplay * display,
|
||||
gboolean persistent);
|
||||
|
||||
G_GNUC_INTERNAL
|
||||
gboolean
|
||||
gst_vaapi_video_context_get_display(GstContext *context,
|
||||
GstVaapiDisplay **display_ptr);
|
||||
gst_vaapi_video_context_get_display (GstContext * context,
|
||||
GstVaapiDisplay ** display_ptr);
|
||||
|
||||
G_GNUC_INTERNAL
|
||||
void
|
||||
gst_vaapi_video_context_prepare(GstElement *element, const gchar **types);
|
||||
gst_vaapi_video_context_prepare (GstElement * element, const gchar ** types);
|
||||
|
||||
G_GNUC_INTERNAL
|
||||
void
|
||||
gst_vaapi_video_context_propagate(GstElement *element,
|
||||
GstVaapiDisplay *display);
|
||||
gst_vaapi_video_context_propagate (GstElement * element,
|
||||
GstVaapiDisplay * display);
|
||||
|
||||
#else
|
||||
#include <gst/video/videocontext.h>
|
||||
|
||||
static inline void
|
||||
gst_vaapi_video_context_prepare(GstVideoContext *context, const gchar **types)
|
||||
gst_vaapi_video_context_prepare (GstVideoContext * context,
|
||||
const gchar ** types)
|
||||
{
|
||||
gst_video_context_prepare(context, types);
|
||||
gst_video_context_prepare (context, types);
|
||||
}
|
||||
|
||||
static inline void
|
||||
gst_vaapi_video_context_propagate(GstVideoContext *context,
|
||||
GstVaapiDisplay *display)
|
||||
gst_vaapi_video_context_propagate (GstVideoContext * context,
|
||||
GstVaapiDisplay * display)
|
||||
{
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in a new issue