gldisplay: implement runtime GL api filtering

Needed so that the pipeline/application can limit the choice of GL api
to what it supports
This commit is contained in:
Matthew Waters 2014-11-27 21:05:45 +11:00 committed by Tim-Philipp Müller
parent 892d056a44
commit 4bd62ebb03
26 changed files with 242 additions and 37 deletions

View file

@ -296,6 +296,8 @@ gst_gl_bumper_class_init (GstGLBumperClass * klass)
"Filter/Effect/Video", "Bump mapping filter",
"Cyril Comparon <cyril.comparon@gmail.com>, "
"Julien Isorce <julien.isorce@gmail.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -104,6 +104,8 @@ gst_gl_colorscale_class_init (GstGLColorscaleClass * klass)
filter_class->filter_texture = gst_gl_colorscale_filter_texture;
basetransform_class->passthrough_on_same_caps = TRUE;
filter_class->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2;
}
static void

View file

@ -168,6 +168,8 @@ gst_gl_deinterlace_class_init (GstGLDeinterlaceClass * klass)
gst_gl_deinterlace_filter_texture;
GST_GL_FILTER_CLASS (klass)->onInitFBO = gst_gl_deinterlace_init_shader;
GST_GL_FILTER_CLASS (klass)->onReset = gst_gl_deinterlace_reset;
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -193,6 +193,8 @@ gst_gl_differencematte_class_init (GstGLDifferenceMatteClass * klass)
"Gstreamer OpenGL DifferenceMatte", "Filter/Effect/Video",
"Saves a background frame and replace it with a pixbuf",
"Filippo Argiolas <filippo.argiolas@gmail.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -130,56 +130,91 @@ gst_gl_effects_effect_get_type (void)
static void
gst_gl_effects_set_effect (GstGLEffects * effects, gint effect_type)
{
GstGLFilterClass *filter_class = GST_GL_FILTER_GET_CLASS (effects);
GstGLContext *context = GST_GL_FILTER (effects)->context;
switch (effect_type) {
case GST_GL_EFFECT_IDENTITY:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_identity;
filter_class->supported_gl_api =
GST_GL_API_GLES2 | GST_GL_API_OPENGL | GST_GL_API_OPENGL3;
effects->current_effect = effect_type;
break;
case GST_GL_EFFECT_MIRROR:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_mirror;
filter_class->supported_gl_api =
GST_GL_API_GLES2 | GST_GL_API_OPENGL | GST_GL_API_OPENGL3;
effects->current_effect = effect_type;
break;
case GST_GL_EFFECT_SQUEEZE:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_squeeze;
filter_class->supported_gl_api =
GST_GL_API_GLES2 | GST_GL_API_OPENGL | GST_GL_API_OPENGL3;
effects->current_effect = effect_type;
break;
}
if (context
&& (gst_gl_context_get_gl_api (context) & GST_GL_API_OPENGL) ==
GST_GL_API_NONE) {
GST_ELEMENT_WARNING (effects, RESOURCE, SETTINGS, ("%s",
"cannot change effect type"), ("%s",
"the current OpenGL context does not support the GL API required"));
return;
}
#if GST_GL_HAVE_OPENGL
switch (effect_type) {
case GST_GL_EFFECT_STRETCH:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_stretch;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_TUNNEL:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_tunnel;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_FISHEYE:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_fisheye;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_TWIRL:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_twirl;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_BULGE:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_bulge;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_SQUARE:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_square;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_HEAT:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_heat;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_SEPIA:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_sepia;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_XPRO:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_xpro;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_LUMA_XPRO:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_luma_xpro;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_XRAY:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_xray;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_SIN:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_sin;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
case GST_GL_EFFECT_GLOW:
effects->effect = (GstGLEffectProcessFunc) gst_gl_effects_glow;
filter_class->supported_gl_api = GST_GL_API_OPENGL;
break;
#endif
default:
@ -273,6 +308,9 @@ gst_gl_effects_class_init (GstGLEffectsClass * klass)
"Gstreamer OpenGL Effects", "Filter/Effect/Video",
"GL Shading Language effects",
"Filippo Argiolas <filippo.argiolas@gmail.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_GLES2 | GST_GL_API_OPENGL3;
}
static void

View file

@ -111,6 +111,9 @@ gst_gl_filter_app_class_init (GstGLFilterAppClass * klass)
"OpenGL application filter", "Filter/Effect",
"Use client callbacks to define the scene",
"Julien Isorce <julien.isorce@gmail.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_GLES2 | GST_GL_API_OPENGL3;
}
static void

View file

@ -114,6 +114,8 @@ gst_gl_filterblur_class_init (GstGLFilterBlurClass * klass)
gst_gl_filterblur_reset_resources;
GST_GL_FILTER_CLASS (klass)->onInitFBO = gst_gl_filterblur_init_shader;
GST_GL_FILTER_CLASS (klass)->onReset = gst_gl_filter_filterblur_reset;
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -179,6 +179,9 @@ gst_gl_filter_cube_class_init (GstGLFilterCubeClass * klass)
gst_element_class_set_metadata (element_class, "OpenGL cube filter",
"Filter/Effect/Video", "Map input texture on the 6 cube faces",
"Julien Isorce <julien.isorce@gmail.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_GLES2 | GST_GL_API_OPENGL3;
}
static void

View file

@ -162,6 +162,8 @@ gst_gl_filter_glass_class_init (GstGLFilterGlassClass * klass)
gst_gl_filter_glass_filter_texture;
GST_GL_FILTER_CLASS (klass)->onInitFBO = gst_gl_filter_glass_init_shader;
GST_GL_FILTER_CLASS (klass)->onReset = gst_gl_filter_glass_reset;
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -119,6 +119,8 @@ gst_gl_filter_laplacian_class_init (GstGLFilterLaplacianClass * klass)
gst_gl_filter_laplacian_filter_texture;
GST_GL_FILTER_CLASS (klass)->onInitFBO = gst_gl_filter_laplacian_init_shader;
GST_GL_FILTER_CLASS (klass)->onReset = gst_gl_filter_laplacian_reset;
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -138,6 +138,8 @@ gst_gl_filter_reflected_screen_class_init (GstGLFilterReflectedScreenClass *
gst_element_class_set_metadata (element_class,
"OpenGL Reflected Screen filter", "Filter/Effect/Video",
"Reflected Screen Filter", "Pierre POUZOL <pierre.pouzol@hotmail.fr>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -149,6 +149,9 @@ gst_gl_filtershader_class_init (GstGLFilterShaderClass * klass)
gst_gl_filtershader_reset_resources;
GST_GL_FILTER_CLASS (klass)->onInitFBO = gst_gl_filtershader_init_shader;
GST_GL_FILTER_CLASS (klass)->onReset = gst_gl_filter_filtershader_reset;
GST_GL_FILTER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_GLES2 | GST_GL_API_OPENGL3;
}
static void

View file

@ -131,6 +131,8 @@ gst_gl_filtersobel_class_init (GstGLFilterSobelClass * klass)
gst_element_class_set_metadata (element_class,
"Gstreamer OpenGL Sobel", "Filter/Effect/Video", "Sobel edge detection",
"Filippo Argiolas <filippo.argiolas@gmail.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -111,6 +111,8 @@ GST_DEBUG_CATEGORY (gst_debug_glimage_sink);
#define USING_GLES2(context) (gst_gl_context_check_gl_version (context, GST_GL_API_GLES2, 2, 0))
#define USING_GLES3(context) (gst_gl_context_check_gl_version (context, GST_GL_API_GLES2, 3, 0))
#define SUPPORTED_GL_APIS GST_GL_API_OPENGL | GST_GL_API_GLES2 | GST_GL_API_OPENGL3
static void gst_glimage_sink_thread_init_redisplay (GstGLImageSink * gl_sink);
static void gst_glimage_sink_cleanup_glthread (GstGLImageSink * gl_sink);
static void gst_glimage_sink_on_close (GstGLImageSink * gl_sink);
@ -493,6 +495,8 @@ _ensure_gl_setup (GstGLImageSink * gl_sink)
&gl_sink->other_context))
return FALSE;
gst_gl_display_filter_gl_api (gl_sink->display, SUPPORTED_GL_APIS);
if (!gl_sink->context) {
GstGLWindow *window;
@ -564,8 +568,12 @@ gst_glimage_sink_query (GstBaseSink * bsink, GstQuery * query)
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
{
return gst_gl_handle_context_query ((GstElement *) glimage_sink, query,
gboolean ret =
gst_gl_handle_context_query ((GstElement *) glimage_sink, query,
&glimage_sink->display, &glimage_sink->other_context);
if (glimage_sink->display)
gst_gl_display_filter_gl_api (glimage_sink->display, SUPPORTED_GL_APIS);
return ret;
}
case GST_QUERY_DRAIN:
{
@ -619,6 +627,9 @@ gst_glimage_sink_set_context (GstElement * element, GstContext * context)
gst_gl_handle_set_context (element, context, &gl_sink->display,
&gl_sink->other_context);
if (gl_sink->display)
gst_gl_display_filter_gl_api (gl_sink->display, SUPPORTED_GL_APIS);
}
static GstStateChangeReturn
@ -638,11 +649,6 @@ gst_glimage_sink_change_state (GstElement * element, GstStateChange transition)
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
g_atomic_int_set (&glimage_sink->to_quit, 0);
if (!glimage_sink->display) {
if (!gst_gl_ensure_element_data (glimage_sink, &glimage_sink->display,
&glimage_sink->other_context))
return GST_STATE_CHANGE_FAILURE;
}
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;

View file

@ -152,6 +152,7 @@ static gboolean
gst_gl_mixer_propose_allocation (GstGLMixer * mix,
GstQuery * decide_query, GstQuery * query)
{
GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix);
GstBufferPool *pool;
GstStructure *config;
GstCaps *caps;
@ -192,6 +193,8 @@ gst_gl_mixer_propose_allocation (GstGLMixer * mix,
if (!gst_gl_ensure_element_data (mix, &mix->display, &mix->other_context))
return FALSE;
gst_gl_display_filter_gl_api (mix->display, mix_class->supported_gl_api);
if (!mix->context) {
mix->context = gst_gl_context_new (mix->display);
if (!gst_gl_context_create (mix->context, mix->other_context, &error))
@ -428,6 +431,7 @@ gst_gl_mixer_sink_query (GstAggregator * agg, GstAggregatorPad * bpad,
{
gboolean ret = FALSE;
GstGLMixer *mix = GST_GL_MIXER (agg);
GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix);
GST_TRACE ("QUERY %" GST_PTR_FORMAT, query);
@ -485,6 +489,9 @@ gst_gl_mixer_sink_query (GstAggregator * agg, GstAggregatorPad * bpad,
{
ret = gst_gl_handle_context_query ((GstElement *) mix, query,
&mix->display, &mix->other_context);
if (mix->display)
gst_gl_display_filter_gl_api (mix->display,
mix_class->supported_gl_api);
break;
}
default:
@ -615,7 +622,7 @@ gst_gl_mixer_class_init (GstGLMixerClass * klass)
g_type_class_ref (GST_TYPE_GL_MIXER_PAD);
klass->set_caps = NULL;
klass->supported_gl_api = GST_GL_API_ANY;
}
static void
@ -661,19 +668,26 @@ static void
gst_gl_mixer_set_context (GstElement * element, GstContext * context)
{
GstGLMixer *mix = GST_GL_MIXER (element);
GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix);
gst_gl_handle_set_context (element, context, &mix->display,
&mix->other_context);
if (mix->display)
gst_gl_display_filter_gl_api (mix->display, mix_class->supported_gl_api);
}
static gboolean
gst_gl_mixer_activate (GstGLMixer * mix, gboolean active)
{
GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix);
gboolean result = TRUE;
if (active) {
if (!gst_gl_ensure_element_data (mix, &mix->display, &mix->other_context))
result = FALSE;
return FALSE;
gst_gl_display_filter_gl_api (mix->display, mix_class->supported_gl_api);
}
return result;
@ -729,12 +743,16 @@ gst_gl_mixer_src_query (GstAggregator * agg, GstQuery * query)
{
gboolean res = FALSE;
GstGLMixer *mix = GST_GL_MIXER (agg);
GstGLMixerClass *mix_class = GST_GL_MIXER_GET_CLASS (mix);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONTEXT:
{
res = gst_gl_handle_context_query ((GstElement *) mix, query,
&mix->display, &mix->other_context);
if (mix->display)
gst_gl_display_filter_gl_api (mix->display,
mix_class->supported_gl_api);
break;
}
case GST_QUERY_CAPS:
@ -785,6 +803,8 @@ gst_gl_mixer_decide_allocation (GstGLMixer * mix, GstQuery * query)
if (!gst_gl_ensure_element_data (mix, &mix->display, &mix->other_context))
return FALSE;
gst_gl_display_filter_gl_api (mix->display, mixer_class->supported_gl_api);
if (gst_query_find_allocation_meta (query,
GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
GstGLContext *context;

View file

@ -76,6 +76,7 @@ struct _GstGLMixer
struct _GstGLMixerClass
{
GstVideoAggregatorClass parent_class;
GstGLAPI supported_gl_api;
GstGLMixerSetCaps set_caps;
GstGLMixerReset reset;

View file

@ -134,6 +134,8 @@ gst_gl_mosaic_class_init (GstGLMosaicClass * klass)
GST_GL_MIXER_CLASS (klass)->set_caps = gst_gl_mosaic_init_shader;
GST_GL_MIXER_CLASS (klass)->reset = gst_gl_mosaic_reset;
GST_GL_MIXER_CLASS (klass)->process_textures = gst_gl_mosaic_process_textures;
GST_GL_MIXER_CLASS (klass)->supported_gl_api = GST_GL_API_OPENGL;
}
static void

View file

@ -244,6 +244,9 @@ gst_gl_overlay_class_init (GstGLOverlayClass * klass)
"Overlay GL video texture with a JPEG/PNG image",
"Filippo Argiolas <filippo.argiolas@gmail.com>, "
"Matthew Waters <matthew@centricular.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_GLES2 | GST_GL_API_OPENGL3;
}
static void

View file

@ -52,6 +52,7 @@
#endif
#define USE_PEER_BUFFERALLOC
#define SUPPORTED_GL_APIS GST_GL_API_OPENGL
GST_DEBUG_CATEGORY_STATIC (gl_test_src_debug);
#define GST_CAT_DEFAULT gl_test_src_debug
@ -474,7 +475,11 @@ gst_gl_test_src_set_context (GstElement * element, GstContext * context)
{
GstGLTestSrc *src = GST_GL_TEST_SRC (element);
gst_gl_handle_set_context (element, context, &src->display, &src->other_context);
gst_gl_handle_set_context (element, context, &src->display,
&src->other_context);
if (src->display)
gst_gl_display_filter_gl_api (src->display, SUPPORTED_GL_APIS);
}
static gboolean
@ -490,6 +495,8 @@ gst_gl_test_src_query (GstBaseSrc * bsrc, GstQuery * query)
{
res = gst_gl_handle_context_query ((GstElement *) src, query,
&src->display, &src->other_context);
if (src->display)
gst_gl_display_filter_gl_api (src->display, SUPPORTED_GL_APIS);
break;
}
case GST_QUERY_CONVERT:
@ -706,6 +713,8 @@ gst_gl_test_src_start (GstBaseSrc * basesrc)
if (!gst_gl_ensure_element_data (src, &src->display, &src->other_context))
return FALSE;
gst_gl_display_filter_gl_api (src->display, SUPPORTED_GL_APIS);
src->running_time = 0;
src->n_frames = 0;
src->negotiated = FALSE;
@ -766,6 +775,8 @@ gst_gl_test_src_decide_allocation (GstBaseSrc * basesrc, GstQuery * query)
if (!gst_gl_ensure_element_data (src, &src->display, &src->other_context))
return FALSE;
gst_gl_display_filter_gl_api (src->display, SUPPORTED_GL_APIS);
if (gst_query_find_allocation_meta (query,
GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
GstGLContext *context;

View file

@ -203,6 +203,9 @@ gst_gl_transformation_class_init (GstGLTransformationClass * klass)
gst_element_class_set_metadata (element_class, "OpenGL transformation filter",
"Filter/Effect/Video", "Transform video on the GPU",
"Lubosz Sarnecki <lubosz@gmail.com>");
GST_GL_FILTER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2;
}
static void

View file

@ -345,6 +345,9 @@ gst_gl_video_mixer_class_init (GstGLVideoMixerClass * klass)
vagg_class->update_caps = _update_caps;
agg_class->sinkpads_type = GST_TYPE_GL_VIDEO_MIXER_PAD;
GST_GL_MIXER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2;
}
static void

View file

@ -330,9 +330,14 @@ gst_gl_context_new_wrapped (GstGLDisplay * display, guintptr handle,
GstGLContext *context;
GstGLWrappedContext *context_wrap = NULL;
GstGLContextClass *context_class;
GstGLAPI display_api;
_init_debug ();
display_api = gst_gl_display_get_gl_api (display);
g_return_val_if_fail ((display_api & available_apis) != GST_GL_API_NONE,
NULL);
context_wrap = g_object_new (GST_GL_TYPE_WRAPPED_CONTEXT, NULL);
if (!context_wrap) {
@ -1164,10 +1169,11 @@ gst_gl_context_create_thread (GstGLContext * context)
GstGLWindowClass *window_class;
GstGLFuncs *gl;
gboolean ret = FALSE;
GstGLAPI compiled_api, user_api, gl_api;
GstGLAPI compiled_api, user_api, gl_api, display_api;
gchar *api_string;
gchar *compiled_api_s;
gchar *user_api_string;
gchar *user_api_s;
gchar *display_api_s;
const gchar *user_choice;
GError **error;
GstGLContext *other_context;
@ -1182,6 +1188,14 @@ gst_gl_context_create_thread (GstGLContext * context)
context_class = GST_GL_CONTEXT_GET_CLASS (context);
window_class = GST_GL_WINDOW_GET_CLASS (context->window);
display_api = gst_gl_display_get_gl_api (context->priv->display);
if (display_api == GST_GL_API_NONE) {
g_set_error (error, GST_GL_CONTEXT_ERROR, GST_GL_CONTEXT_ERROR_WRONG_API,
"Cannot create context with satisfying requested apis "
"(display has no GL api!)");
goto failure;
}
if (window_class->open) {
if (!window_class->open (context->window, error)) {
g_assert (error == NULL || *error != NULL);
@ -1191,20 +1205,22 @@ gst_gl_context_create_thread (GstGLContext * context)
gl = context->gl_vtable;
compiled_api = _compiled_api ();
user_choice = g_getenv ("GST_GL_API");
user_api = gst_gl_api_from_string (user_choice);
user_api_string = gst_gl_api_to_string (user_api);
compiled_api_s = gst_gl_api_to_string (compiled_api);
if ((user_api & compiled_api) == GST_GL_API_NONE) {
user_choice = g_getenv ("GST_GL_API");
user_api = gst_gl_api_from_string (user_choice);
user_api_s = gst_gl_api_to_string (user_api);
display_api_s = gst_gl_api_to_string (display_api);
if ((user_api & compiled_api & display_api) == GST_GL_API_NONE) {
g_set_error (error, GST_GL_CONTEXT_ERROR, GST_GL_CONTEXT_ERROR_WRONG_API,
"Cannot create context with the user requested api (%s). "
"We have support for (%s)", user_api_string, compiled_api_s);
g_free (user_api_string);
"We have support for (%s), display api (%s)", user_api_s,
compiled_api_s, display_api_s);
g_free (user_api_s);
g_free (compiled_api_s);
g_free (display_api_s);
goto failure;
}
@ -1212,18 +1228,21 @@ gst_gl_context_create_thread (GstGLContext * context)
!context_class->choose_format (context, error)) {
g_assert (error == NULL || *error != NULL);
g_free (compiled_api_s);
g_free (user_api_string);
g_free (user_api_s);
g_free (display_api_s);
goto failure;
}
GST_INFO ("Attempting to create opengl context. user chosen api(s) (%s), "
"compiled api support (%s)", user_api_string, compiled_api_s);
"compiled api support (%s) display api (%s)", user_api_s,
compiled_api_s, display_api_s);
if (!context_class->create_context (context, compiled_api & user_api,
other_context, error)) {
if (!context_class->create_context (context,
compiled_api & user_api & display_api, other_context, error)) {
g_assert (error == NULL || *error != NULL);
g_free (compiled_api_s);
g_free (user_api_string);
g_free (user_api_s);
g_free (display_api_s);
goto failure;
}
GST_INFO ("created context");
@ -1233,7 +1252,8 @@ gst_gl_context_create_thread (GstGLContext * context)
GST_GL_CONTEXT_ERROR_RESOURCE_UNAVAILABLE,
"Failed to activate the GL Context");
g_free (compiled_api_s);
g_free (user_api_string);
g_free (user_api_s);
g_free (display_api_s);
goto failure;
}
@ -1243,20 +1263,22 @@ gst_gl_context_create_thread (GstGLContext * context)
api_string = gst_gl_api_to_string (gl_api);
GST_INFO ("available GL APIs: %s", api_string);
if (((compiled_api & gl_api) & user_api) == GST_GL_API_NONE) {
if (((compiled_api & gl_api & display_api) & user_api) == GST_GL_API_NONE) {
g_set_error (error, GST_GL_CONTEXT_ERROR, GST_GL_CONTEXT_ERROR_WRONG_API,
"failed to create context, context "
"could not provide correct api. user (%s), compiled (%s), context (%s)",
user_api_string, compiled_api_s, api_string);
user_api_s, compiled_api_s, api_string);
g_free (api_string);
g_free (compiled_api_s);
g_free (user_api_string);
g_free (user_api_s);
g_free (display_api_s);
goto failure;
}
g_free (api_string);
g_free (compiled_api_s);
g_free (user_api_string);
g_free (user_api_s);
g_free (display_api_s);
gl->GetError = gst_gl_context_get_proc_address (context, "glGetError");
gl->GetString = gst_gl_context_get_proc_address (context, "glGetString");

View file

@ -83,7 +83,7 @@ static guintptr gst_gl_display_default_get_handle (GstGLDisplay * display);
struct _GstGLDisplayPrivate
{
gint dummy;
GstGLAPI gl_api;
};
static void
@ -102,6 +102,7 @@ gst_gl_display_init (GstGLDisplay * display)
display->priv = GST_GL_DISPLAY_GET_PRIVATE (display);
display->type = GST_GL_DISPLAY_TYPE_ANY;
display->priv->gl_api = GST_GL_API_ANY;
GST_TRACE ("init %p", display);
@ -192,6 +193,53 @@ gst_gl_display_default_get_handle (GstGLDisplay * display)
return 0;
}
/**
* gst_gl_display_filter_gl_api:
* @display: a #GstGLDisplay
*
* limit the use of OpenGL to the requested @gl_api. This is intended to allow
* application and elements to request a specific set of OpenGL API's based on
* what they support. See gst_gl_context_get_gl_api() for the retreiving the
* API supported by a #GstGLContext.
*/
void
gst_gl_display_filter_gl_api (GstGLDisplay * display, GstGLAPI gl_api)
{
gchar *gl_api_s;
g_return_if_fail (GST_IS_GL_DISPLAY (display));
gl_api_s = gst_gl_api_to_string (gl_api);
GST_TRACE_OBJECT (display, "filtering with api %s", gl_api_s);
g_free (gl_api_s);
GST_OBJECT_LOCK (display);
display->priv->gl_api &= gl_api;
GST_OBJECT_UNLOCK (display);
}
/**
* gst_gl_display_get_gl_api:
* @display: a #GstGLDisplay
*
* see gst_gl_display_filter_gl_api() for what the returned value represents
*
* Returns: the #GstGLAPI configured for @display
*/
GstGLAPI
gst_gl_display_get_gl_api (GstGLDisplay * display)
{
GstGLAPI ret;
g_return_if_fail (GST_IS_GL_DISPLAY (display));
GST_OBJECT_LOCK (display);
ret = display->priv->gl_api;
GST_OBJECT_UNLOCK (display);
return ret;
}
/**
* gst_gl_display_get_handle_type:
* @display: a #GstGLDisplay

View file

@ -83,6 +83,9 @@ GstGLDisplay *gst_gl_display_new (void);
guintptr gst_gl_display_get_handle (GstGLDisplay * display);
GstGLDisplayType gst_gl_display_get_handle_type (GstGLDisplay * display);
void gst_gl_display_filter_gl_api (GstGLDisplay * display,
GstGLAPI api);
GstGLAPI gst_gl_display_get_gl_api (GstGLDisplay * display);
#define GST_GL_DISPLAY_CONTEXT_TYPE "gst.gl.GLDisplay"
void gst_context_set_gl_display (GstContext * context, GstGLDisplay * display);

View file

@ -151,6 +151,7 @@ gst_gl_filter_class_init (GstGLFilterClass * klass)
"Get OpenGL context",
GST_GL_TYPE_CONTEXT, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
klass->supported_gl_api = GST_GL_API_ANY;
klass->set_caps = NULL;
klass->filter = NULL;
klass->display_init_cb = NULL;
@ -199,18 +200,21 @@ static void
gst_gl_filter_set_context (GstElement * element, GstContext * context)
{
GstGLFilter *filter = GST_GL_FILTER (element);
GstGLFilterClass *filter_class = GST_GL_FILTER_GET_CLASS (filter);
gst_gl_handle_set_context (element, context, &filter->display,
&filter->other_context);
if (filter->display)
gst_gl_display_filter_gl_api (filter->display,
filter_class->supported_gl_api);
}
static gboolean
gst_gl_filter_query (GstBaseTransform * trans, GstPadDirection direction,
GstQuery * query)
{
GstGLFilter *filter;
filter = GST_GL_FILTER (trans);
GstGLFilter *filter = GST_GL_FILTER (trans);
GstGLFilterClass *filter_class = GST_GL_FILTER_GET_CLASS (filter);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_ALLOCATION:
@ -222,8 +226,12 @@ gst_gl_filter_query (GstBaseTransform * trans, GstPadDirection direction,
}
case GST_QUERY_CONTEXT:
{
return gst_gl_handle_context_query ((GstElement *) filter, query,
gboolean ret = gst_gl_handle_context_query ((GstElement *) filter, query,
&filter->display, &filter->other_context);
if (filter->display)
gst_gl_display_filter_gl_api (filter->display,
filter_class->supported_gl_api);
return ret;
}
default:
break;
@ -315,6 +323,9 @@ gst_gl_filter_start (GstBaseTransform * bt)
&filter->other_context))
return FALSE;
gst_gl_display_filter_gl_api (filter->display,
filter_class->supported_gl_api);
if (filter_class->onStart)
filter_class->onStart (filter);
@ -855,6 +866,7 @@ gst_gl_filter_propose_allocation (GstBaseTransform * trans,
GstQuery * decide_query, GstQuery * query)
{
GstGLFilter *filter = GST_GL_FILTER (trans);
GstGLFilterClass *filter_class = GST_GL_FILTER_GET_CLASS (filter);
GstBufferPool *pool;
GstStructure *config;
GstCaps *caps, *decide_caps;
@ -896,6 +908,9 @@ gst_gl_filter_propose_allocation (GstBaseTransform * trans,
&filter->other_context))
return FALSE;
gst_gl_display_filter_gl_api (filter->display,
filter_class->supported_gl_api);
if (!filter->context) {
filter->context = gst_gl_context_new (filter->display);
if (!gst_gl_context_create (filter->context, filter->other_context, &error))
@ -1025,6 +1040,9 @@ gst_gl_filter_decide_allocation (GstBaseTransform * trans, GstQuery * query)
&filter->other_context))
return FALSE;
gst_gl_display_filter_gl_api (filter->display,
filter_class->supported_gl_api);
if (gst_query_find_allocation_meta (query,
GST_VIDEO_GL_TEXTURE_UPLOAD_META_API_TYPE, &idx)) {
GstGLContext *context;
@ -1294,8 +1312,7 @@ gst_gl_filter_transform (GstBaseTransform * bt, GstBuffer * inbuf,
filter = GST_GL_FILTER (bt);
filter_class = GST_GL_FILTER_GET_CLASS (bt);
if (!gst_gl_ensure_element_data (filter, &filter->display,
&filter->other_context))
if (!filter->display)
return GST_FLOW_NOT_NEGOTIATED;
if (!filter->upload) {

View file

@ -116,6 +116,7 @@ struct _GstGLFilter
struct _GstGLFilterClass
{
GstBaseTransformClass base_transform_class;
GstGLAPI supported_gl_api;
gboolean (*set_caps) (GstGLFilter* filter, GstCaps* incaps, GstCaps* outcaps);
gboolean (*filter) (GstGLFilter *filter, GstBuffer *inbuf, GstBuffer *outbuf);