From eefdb2ed8675b738202b074470e097c6215bea9b Mon Sep 17 00:00:00 2001 From: Jan Schmidt Date: Sat, 30 May 2015 02:29:04 +1000 Subject: [PATCH] gl: Add glviewconvert, glstereomix and glstereosplit elements Conversion elements for transforming multiview/stereoscopic video https://bugzilla.gnome.org/show_bug.cgi?id=611157 --- ext/gl/Makefile.am | 6 + ext/gl/gstglstereomix.c | 702 ++++++++++++++++++++++++++++ ext/gl/gstglstereomix.h | 83 ++++ ext/gl/gstglstereosplit.c | 928 ++++++++++++++++++++++++++++++++++++++ ext/gl/gstglstereosplit.h | 66 +++ ext/gl/gstglviewconvert.c | 353 +++++++++++++++ ext/gl/gstglviewconvert.h | 53 +++ ext/gl/gstopengl.c | 17 + 8 files changed, 2208 insertions(+) create mode 100644 ext/gl/gstglstereomix.c create mode 100644 ext/gl/gstglstereomix.h create mode 100644 ext/gl/gstglstereosplit.c create mode 100644 ext/gl/gstglstereosplit.h create mode 100644 ext/gl/gstglviewconvert.c create mode 100644 ext/gl/gstglviewconvert.h diff --git a/ext/gl/Makefile.am b/ext/gl/Makefile.am index 5d02afcc97..1a634ef2ea 100644 --- a/ext/gl/Makefile.am +++ b/ext/gl/Makefile.am @@ -61,6 +61,9 @@ if USE_OPENGL libgstopengl_la_SOURCES += \ gstglfilterglass.c \ gstgldeinterlace.c \ + gstglviewconvert.c \ + gstglstereosplit.c \ + gstglstereomix.c \ gltestsrc.c \ gstgltestsrc.c \ gstglmosaic.c @@ -68,6 +71,9 @@ libgstopengl_la_SOURCES += \ noinst_HEADERS += \ gstglfilterglass.h \ gstgldeinterlace.h \ + gstglstereosplit.h \ + gstglstereomix.h \ + gstglviewconvert.h \ gltestsrc.h \ gstgltestsrc.h \ gstglmosaic.h \ diff --git a/ext/gl/gstglstereomix.c b/ext/gl/gstglstereomix.c new file mode 100644 index 0000000000..1422823f77 --- /dev/null +++ b/ext/gl/gstglstereomix.c @@ -0,0 +1,702 @@ +/* + * Combine video streams to 3D stereo + * + * GStreamer + * Copyright (C) 2009 Julien Isorce + * Copyright (C) 2014 Jan Schmidt + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstglstereomix.h" + +#define GST_CAT_DEFAULT gst_gl_stereo_mix_debug +GST_DEBUG_CATEGORY (gst_gl_stereo_mix_debug); + +#define gst_gl_stereo_mix_parent_class parent_class +G_DEFINE_TYPE (GstGLStereoMix, gst_gl_stereo_mix, GST_TYPE_GL_MIXER); + +static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps); +static gboolean _negotiated_caps (GstVideoAggregator * videoaggregator, + GstCaps * caps); +gboolean gst_gl_stereo_mix_make_output (GstGLStereoMix * mix); +static gboolean gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer, + GPtrArray * in_frames); + +#define DEFAULT_DOWNMIX GST_GL_STEREO_DOWNMIX_ANAGLYPH_GREEN_MAGENTA_DUBOIS + +/* GLStereoMix signals and args */ +enum +{ + /* FILL ME */ + LAST_SIGNAL +}; + +enum +{ + PROP_0, + PROP_DOWNMIX_MODE +}; + +static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src", + GST_PAD_SRC, + GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, + "RGBA") "; " + GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, + "RGBA") + "; " GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS)) + ); + +static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink_%u", + GST_PAD_SINK, + GST_PAD_REQUEST, + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, + "RGBA") "; " + GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, + "RGBA") + "; " GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS)) + ); + +static GstFlowReturn gst_gl_stereo_mix_get_output_buffer (GstVideoAggregator * + videoaggregator, GstBuffer ** outbuf); +static gboolean gst_gl_stereo_mix_stop (GstAggregator * agg); +static gboolean gst_gl_stereo_mix_start (GstAggregator * agg); +static gboolean gst_gl_stereo_mix_src_query (GstAggregator * agg, + GstQuery * query); + +static void +gst_gl_stereo_mix_find_best_format (GstVideoAggregator * vagg, + GstCaps * downstream_caps, GstVideoInfo * best_info, + gboolean * at_least_one_alpha); + +static void gst_gl_stereo_mix_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec); +static void gst_gl_stereo_mix_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec); + +static void gst_gl_stereo_mix_finalize (GObject * object); + +static GstFlowReturn +gst_gl_stereo_mix_aggregate_frames (GstVideoAggregator * vagg, + GstBuffer * outbuffer); + +static void +gst_gl_stereo_mix_class_init (GstGLStereoMixClass * klass) +{ + GObjectClass *gobject_class = (GObjectClass *) klass; + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + GstVideoAggregatorClass *videoaggregator_class = + (GstVideoAggregatorClass *) klass; + GstAggregatorClass *agg_class = (GstAggregatorClass *) klass; + GstGLBaseMixerClass *base_mix_class = (GstGLBaseMixerClass *) klass; + + GST_DEBUG_CATEGORY_INIT (GST_CAT_DEFAULT, "glstereomixer", 0, + "opengl stereoscopic mixer"); + + gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_gl_stereo_mix_finalize); + + gobject_class->get_property = gst_gl_stereo_mix_get_property; + gobject_class->set_property = gst_gl_stereo_mix_set_property; + + gst_element_class_set_metadata (element_class, "OpenGL stereo video combiner", + "Filter/Effect/Video", "OpenGL stereo video combiner", + "Jan Schmidt "); + + g_object_class_install_property (gobject_class, PROP_DOWNMIX_MODE, + g_param_spec_enum ("downmix-mode", "Mode for mono downmixed output", + "Output anaglyph type to generate when downmixing to mono", + GST_TYPE_GL_STEREO_DOWNMIX_MODE_TYPE, DEFAULT_DOWNMIX, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_factory)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_factory)); + + agg_class->stop = gst_gl_stereo_mix_stop; + agg_class->start = gst_gl_stereo_mix_start; + agg_class->src_query = gst_gl_stereo_mix_src_query; + + videoaggregator_class->aggregate_frames = gst_gl_stereo_mix_aggregate_frames; + videoaggregator_class->update_caps = _update_caps; + videoaggregator_class->negotiated_caps = _negotiated_caps; + videoaggregator_class->get_output_buffer = + gst_gl_stereo_mix_get_output_buffer; + videoaggregator_class->find_best_format = gst_gl_stereo_mix_find_best_format; + videoaggregator_class->preserve_update_caps_result = TRUE; + + base_mix_class->supported_gl_api = GST_GL_API_OPENGL | GST_GL_API_OPENGL3; +} + +static void +gst_gl_stereo_mix_init (GstGLStereoMix * mix) +{ +} + +static void +gst_gl_stereo_mix_finalize (GObject * object) +{ + //GstGLStereoMix *mix = GST_GL_STEREO_MIX (object); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static gboolean +gst_gl_stereo_mix_query_caps (GstPad * pad, GstAggregator * agg, + GstQuery * query) +{ + GstCaps *filter, *caps; + + gst_query_parse_caps (query, &filter); + + caps = gst_pad_get_current_caps (agg->srcpad); + if (caps == NULL) { + caps = gst_pad_get_pad_template_caps (agg->srcpad); + } + + if (filter) + caps = gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST); + + gst_query_set_caps_result (query, caps); + gst_caps_unref (caps); + + return TRUE; +} + +static gboolean +gst_gl_stereo_mix_src_query (GstAggregator * agg, GstQuery * query) +{ + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CAPS: + return gst_gl_stereo_mix_query_caps (agg->srcpad, agg, query); + break; + default: + break; + } + + return GST_AGGREGATOR_CLASS (parent_class)->src_query (agg, query); +} + + +static GstFlowReturn +gst_gl_stereo_mix_get_output_buffer (GstVideoAggregator * videoaggregator, + GstBuffer ** outbuf) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (videoaggregator); + GstFlowReturn ret = GST_FLOW_OK; + +#if 0 + + if (!mix->priv->pool_active) { + if (!gst_buffer_pool_set_active (mix->priv->pool, TRUE)) { + GST_ELEMENT_ERROR (mix, RESOURCE, SETTINGS, + ("failed to activate bufferpool"), ("failed to activate bufferpool")); + return GST_FLOW_ERROR; + } + mix->priv->pool_active = TRUE; + } + + return gst_buffer_pool_acquire_buffer (mix->priv->pool, outbuf, NULL); +#endif + + if (!gst_gl_stereo_mix_make_output (mix)) { + gst_buffer_replace (&mix->primary_out, NULL); + gst_buffer_replace (&mix->auxilliary_out, NULL); + GST_ELEMENT_ERROR (mix, RESOURCE, SETTINGS, + ("Failed to generate output"), ("failed to generate output")); + ret = GST_FLOW_ERROR; + } + + if (mix->auxilliary_out) { + *outbuf = mix->auxilliary_out; + mix->auxilliary_out = NULL; + } else { + *outbuf = mix->primary_out; + mix->primary_out = NULL; + } + return ret; +} + +gboolean +gst_gl_stereo_mix_make_output (GstGLStereoMix * mix) +{ + guint i; + GList *walk; + gboolean res = FALSE; + guint array_index = 0; + GstElement *element = GST_ELEMENT (mix); + gboolean missing_buffer = FALSE; + + GST_LOG_OBJECT (mix, "Processing buffers"); + + GST_OBJECT_LOCK (mix); + walk = element->sinkpads; + + i = mix->frames->len; + g_ptr_array_set_size (mix->frames, element->numsinkpads); + for (; i < element->numsinkpads; i++) + mix->frames->pdata[i] = g_slice_new0 (GstGLStereoMixFrameData); + while (walk) { + GstGLMixerPad *pad = GST_GL_MIXER_PAD (walk->data); + GstVideoAggregatorPad *vaggpad = walk->data; + GstGLStereoMixFrameData *frame; + + GST_LOG_OBJECT (mix, "Checking pad %" GST_PTR_FORMAT, vaggpad); + + frame = g_ptr_array_index (mix->frames, array_index); + frame->base.pad = pad; + frame->buf = NULL; + + walk = g_list_next (walk); + + if (vaggpad->buffer != NULL) { + frame->buf = vaggpad->buffer; + + GST_DEBUG_OBJECT (pad, "Got buffer %" GST_PTR_FORMAT, frame->buf); + } else { + GST_LOG_OBJECT (mix, "No buffer on pad %" GST_PTR_FORMAT, vaggpad); + missing_buffer = TRUE; + } + ++array_index; + } + if (missing_buffer) { + /* We're still waiting for a buffer to turn up on at least one input */ + GST_WARNING_OBJECT (mix, "Not generating output - need more input buffers"); + res = TRUE; + goto out; + } + + /* Copy GL memory from each input frame to the output */ + if (!gst_gl_stereo_mix_process_frames (mix, mix->frames)) { + GST_LOG_OBJECT (mix, "Failed to process frames to output"); + goto out; + } + + if (mix->primary_out == NULL) + goto out; + + res = TRUE; + +out: + GST_OBJECT_UNLOCK (mix); + + return res; +} + +static GstFlowReturn +gst_gl_stereo_mix_aggregate_frames (GstVideoAggregator * vagg, + GstBuffer * outbuf) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg); + /* If we're operating in frame-by-frame mode, push + * the primary view now, and let the parent class + * push the remaining auxilliary view */ + if (GST_VIDEO_INFO_MULTIVIEW_MODE (&vagg->info) == + GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) { + /* Transfer the timestamps video-agg put on the aux buffer */ + gst_buffer_copy_into (mix->primary_out, outbuf, + GST_BUFFER_COPY_TIMESTAMPS, 0, -1); + gst_aggregator_finish_buffer (GST_AGGREGATOR (vagg), mix->primary_out); + mix->primary_out = NULL; + + /* And actually, we don't want timestamps on the aux buffer */ + GST_BUFFER_TIMESTAMP (outbuf) = GST_CLOCK_TIME_NONE; + GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE; + } + return GST_FLOW_OK; +} + +static void +gst_gl_stereo_mix_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (object); + + switch (prop_id) { + case PROP_DOWNMIX_MODE: + g_value_set_enum (value, mix->downmix_mode); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_gl_stereo_mix_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (object); + + switch (prop_id) { + case PROP_DOWNMIX_MODE: + mix->downmix_mode = g_value_get_enum (value); + if (mix->viewconvert) + g_object_set_property (G_OBJECT (mix->viewconvert), "downmix-mode", + value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +_free_glmixer_frame_data (GstGLStereoMixFrameData * frame) +{ + if (frame == NULL) + return; + if (frame->buf) + gst_buffer_unref (frame->buf); + g_slice_free1 (sizeof (GstGLStereoMixFrameData), frame); +} + +static gboolean +gst_gl_stereo_mix_start (GstAggregator * agg) +{ + guint i; + GstGLStereoMix *mix = GST_GL_STEREO_MIX (agg); + GstElement *element = GST_ELEMENT (agg); + + if (!GST_AGGREGATOR_CLASS (parent_class)->start (agg)) + return FALSE; + + GST_OBJECT_LOCK (mix); + mix->array_buffers = g_ptr_array_new_full (element->numsinkpads, + (GDestroyNotify) _free_glmixer_frame_data); + mix->frames = g_ptr_array_new_full (element->numsinkpads, NULL); + + g_ptr_array_set_size (mix->array_buffers, element->numsinkpads); + g_ptr_array_set_size (mix->frames, element->numsinkpads); + + for (i = 0; i < element->numsinkpads; i++) + mix->frames->pdata[i] = g_slice_new0 (GstGLStereoMixFrameData); + + mix->viewconvert = gst_gl_view_convert_new (); + g_object_set (G_OBJECT (mix->viewconvert), "downmix-mode", + mix->downmix_mode, NULL); + + GST_OBJECT_UNLOCK (mix); + + return TRUE; +} + +static gboolean +gst_gl_stereo_mix_stop (GstAggregator * agg) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (agg); + + if (!GST_AGGREGATOR_CLASS (parent_class)->stop (agg)) + return FALSE; + + GST_OBJECT_LOCK (agg); + g_ptr_array_free (mix->frames, TRUE); + mix->frames = NULL; + g_ptr_array_free (mix->array_buffers, TRUE); + mix->array_buffers = NULL; + GST_OBJECT_UNLOCK (agg); + + if (mix->viewconvert) { + gst_object_unref (mix->viewconvert); + mix->viewconvert = NULL; + } + + return TRUE; +} + +/* Convert to caps that can be accepted by this element... */ +static GstCaps * +get_converted_caps (GstGLStereoMix * mix, GstCaps * caps) +{ +#if 0 + GstGLContext *context = GST_GL_BASE_MIXER (mix)->context; + GstCaps *result, *tmp; + + GST_LOG_OBJECT (mix, "Converting caps %" GST_PTR_FORMAT, caps); + result = gst_gl_upload_transform_caps (context, GST_PAD_SINK, caps, NULL); + tmp = result; + GST_TRACE_OBJECT (mix, "transfer returned caps %" GST_PTR_FORMAT, tmp); + + result = + gst_gl_color_convert_transform_caps (context, GST_PAD_SINK, tmp, NULL); + gst_caps_unref (tmp); + GST_TRACE_OBJECT (mix, "convert returned caps %" GST_PTR_FORMAT, tmp); + + tmp = result; + result = gst_gl_view_convert_transform_caps (mix->viewconvert, + GST_PAD_SINK, tmp, NULL); + gst_caps_unref (tmp); +#else + GstCaps *result; + + GST_LOG_OBJECT (mix, "Converting caps %" GST_PTR_FORMAT, caps); + result = gst_gl_view_convert_transform_caps (mix->viewconvert, + GST_PAD_SINK, caps, NULL); +#endif + + GST_LOG_OBJECT (mix, "returning caps %" GST_PTR_FORMAT, result); + + return result; +} + +/* Return the possible output caps we decided in find_best_format() */ +static GstCaps * +_update_caps (GstVideoAggregator * vagg, GstCaps * caps) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg); + + return gst_caps_ref (mix->out_caps); +} + +/* Called after videoaggregator fixates our caps */ +static gboolean +_negotiated_caps (GstVideoAggregator * vagg, GstCaps * caps) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg); + + GST_LOG_OBJECT (mix, "Configured output caps %" GST_PTR_FORMAT, caps); + + if (GST_VIDEO_AGGREGATOR_CLASS (parent_class)->negotiated_caps) + if (!GST_VIDEO_AGGREGATOR_CLASS (parent_class)->negotiated_caps (vagg, + caps)) + return FALSE; + + /* Update the glview_convert output */ + if (!gst_video_info_from_caps (&mix->out_info, caps)) + return FALSE; + + /* We can configure the view_converter now */ + gst_gl_view_convert_set_context (mix->viewconvert, + GST_GL_BASE_MIXER (mix)->context); + gst_gl_view_convert_set_format (mix->viewconvert, &mix->mix_info, + &mix->out_info); + + return TRUE; + +} + +static gboolean +gst_gl_stereo_mix_process_frames (GstGLStereoMix * mixer, GPtrArray * frames) +{ + GstVideoAggregator *vagg = GST_VIDEO_AGGREGATOR (mixer); + GstBuffer *converted_buffer, *inbuf; + GstVideoInfo *out_info = &vagg->info; + gint count = 0, n; + gint v, views; + gint valid_views = 0; + + inbuf = gst_buffer_new (); + while (count < frames->len) { + GstGLStereoMixFrameData *frame; + GstMemory *in_mem; + + frame = g_ptr_array_index (frames, count); + GST_LOG_OBJECT (mixer, "Handling frame %d", count); + + if (!frame) { + GST_DEBUG ("skipping texture, null frame"); + count++; + continue; + } + + in_mem = gst_buffer_get_memory (frame->buf, 0); + + GST_LOG_OBJECT (mixer, + "Appending memory %" GST_PTR_FORMAT " to intermediate buffer", in_mem); + /* Appending the memory to a 2nd buffer locks it + * exclusive a 2nd time, which will mark it for + * copy-on-write. The ref will keep the memory + * alive but we add a parent_buffer_meta to also + * prevent the input buffer from returning to any buffer + * pool it might belong to + */ + gst_buffer_append_memory (inbuf, in_mem); + /* Use parent buffer meta to keep input buffer alive */ + gst_buffer_add_parent_buffer_meta (inbuf, frame->buf); + + count++; + valid_views++; + } + + if (mixer->mix_info.views != valid_views) { + GST_WARNING_OBJECT (mixer, "Not enough input views to process"); + return FALSE; + } + + if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) == + GST_VIDEO_MULTIVIEW_MODE_SEPARATED) + views = out_info->views; + else + views = 1; + + if (gst_gl_view_convert_submit_input_buffer (mixer->viewconvert, + FALSE, inbuf) != GST_FLOW_OK) + return FALSE; + + /* Clear any existing buffers, just in case */ + gst_buffer_replace (&mixer->primary_out, NULL); + gst_buffer_replace (&mixer->auxilliary_out, NULL); + + if (gst_gl_view_convert_get_output (mixer->viewconvert, + &mixer->primary_out) != GST_FLOW_OK) + return FALSE; + + if (GST_VIDEO_INFO_MULTIVIEW_MODE (out_info) == + GST_VIDEO_MULTIVIEW_MODE_FRAME_BY_FRAME) { + if (gst_gl_view_convert_get_output (mixer->viewconvert, + &mixer->auxilliary_out) != GST_FLOW_OK) + return FALSE; + } + + if (mixer->primary_out == NULL) + return FALSE; + + converted_buffer = mixer->primary_out; + v = 0; + n = gst_buffer_n_memory (converted_buffer); + g_assert (n == GST_VIDEO_INFO_N_PLANES (out_info) * views); + for (v = 0; v < views; v++) { + gst_buffer_add_video_meta_full (converted_buffer, v, + GST_VIDEO_INFO_FORMAT (out_info), + GST_VIDEO_INFO_WIDTH (out_info), + GST_VIDEO_INFO_HEIGHT (out_info), + GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, out_info->stride); + if (mixer->auxilliary_out) { + gst_buffer_add_video_meta_full (mixer->auxilliary_out, v, + GST_VIDEO_INFO_FORMAT (out_info), + GST_VIDEO_INFO_WIDTH (out_info), + GST_VIDEO_INFO_HEIGHT (out_info), + GST_VIDEO_INFO_N_PLANES (out_info), out_info->offset, + out_info->stride); + } + } + + return TRUE; +} + +/* Iterate the input sink pads, and choose the blend format + * we will generate before output conversion, which is RGBA + * at some suitable size */ +static void +gst_gl_stereo_mix_find_best_format (GstVideoAggregator * vagg, + GstCaps * downstream_caps, GstVideoInfo * best_info, + gboolean * at_least_one_alpha) +{ + GstGLStereoMix *mix = GST_GL_STEREO_MIX (vagg); + GList *l; + gint best_width = -1, best_height = -1; + gdouble best_fps = -1, cur_fps; + gint best_fps_n = 0, best_fps_d = 1; + GstVideoInfo *mix_info; + GstCaps *blend_caps, *tmp_caps; + + /* We'll deal with alpha internally, so just tell aggregator to + * be quiet */ + *at_least_one_alpha = FALSE; + + GST_OBJECT_LOCK (vagg); + + for (l = GST_ELEMENT (vagg)->sinkpads; l; l = l->next) { + GstVideoAggregatorPad *pad = l->data; + GstVideoInfo tmp = pad->info; + gint this_width, this_height; + gint fps_n, fps_d; + + if (!pad->info.finfo) + continue; + + /* This can happen if we release a pad and another pad hasn't been negotiated_caps yet */ + if (GST_VIDEO_INFO_FORMAT (&pad->info) == GST_VIDEO_FORMAT_UNKNOWN) + continue; + + /* Convert to per-view width/height for unpacked forms */ + gst_video_multiview_video_info_change_mode (&tmp, + GST_VIDEO_MULTIVIEW_MODE_SEPARATED, GST_VIDEO_MULTIVIEW_FLAGS_NONE); + + this_width = GST_VIDEO_INFO_WIDTH (&tmp); + this_height = GST_VIDEO_INFO_HEIGHT (&tmp); + fps_n = GST_VIDEO_INFO_FPS_N (&tmp); + fps_d = GST_VIDEO_INFO_FPS_D (&tmp); + + GST_INFO_OBJECT (vagg, "Input pad %" GST_PTR_FORMAT + " w %u h %u", pad, this_width, this_height); + + if (this_width == 0 || this_height == 0) + continue; + + if (best_width < this_width) + best_width = this_width; + if (best_height < this_height) + best_height = this_height; + + if (fps_d == 0) + cur_fps = 0.0; + else + gst_util_fraction_to_double (fps_n, fps_d, &cur_fps); + + if (best_fps < cur_fps) { + best_fps = cur_fps; + best_fps_n = fps_n; + best_fps_d = fps_d; + } + + /* FIXME: Preserve PAR for at least one input when different sized inputs */ + } + GST_OBJECT_UNLOCK (vagg); + + mix_info = &mix->mix_info; + gst_video_info_set_format (mix_info, GST_VIDEO_FORMAT_RGBA, best_width, + best_height); + + GST_VIDEO_INFO_FPS_N (mix_info) = best_fps_n; + GST_VIDEO_INFO_FPS_D (mix_info) = best_fps_d; + + GST_VIDEO_INFO_MULTIVIEW_MODE (mix_info) = GST_VIDEO_MULTIVIEW_MODE_SEPARATED; + GST_VIDEO_INFO_VIEWS (mix_info) = 2; + + /* FIXME: If input is marked as flipped or flopped, preserve those flags */ + GST_VIDEO_INFO_MULTIVIEW_FLAGS (mix_info) = GST_VIDEO_MULTIVIEW_FLAGS_NONE; + + /* Choose our output format based on downstream preferences */ + blend_caps = gst_video_info_to_caps (mix_info); + + gst_caps_set_features (blend_caps, 0, + gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY)); + + tmp_caps = get_converted_caps (GST_GL_STEREO_MIX (vagg), blend_caps); + gst_caps_unref (blend_caps); + + if (mix->out_caps) + gst_caps_unref (mix->out_caps); + + mix->out_caps = gst_caps_intersect (downstream_caps, tmp_caps); + gst_caps_unref (tmp_caps); + + GST_DEBUG_OBJECT (vagg, "Possible output caps %" GST_PTR_FORMAT, + mix->out_caps); + /* Tell videoaggregator our preferred size. Actual info gets + * overridden during caps nego */ + *best_info = *mix_info; +} diff --git a/ext/gl/gstglstereomix.h b/ext/gl/gstglstereomix.h new file mode 100644 index 0000000000..debe347c49 --- /dev/null +++ b/ext/gl/gstglstereomix.h @@ -0,0 +1,83 @@ +/* + * GStreamer + * Copyright (C) 2009 Julien Isorce + * Copyright (C) 2014 Jan Schmidt + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_GL_STEREO_MIX_H__ +#define __GST_GL_STEREO_MIX_H__ + +#include "gstglmixer.h" + +G_BEGIN_DECLS + +#define GST_TYPE_GL_STEREO_MIX (gst_gl_stereo_mix_get_type()) +#define GST_GL_STEREO_MIX(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_GL_STEREO_MIX, GstGLStereoMix)) +#define GST_GL_STEREO_MIX_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_GL_STEREO_MIX, GstGLStereoMixClass)) +#define GST_IS_GL_STEREO_MIX(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_GL_STEREO_MIX)) +#define GST_IS_GL_STEREO_MIX_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_GL_STEREO_MIX)) +#define GST_GL_STEREO_MIX_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_GL_STEREO_MIX,GstGLStereoMixClass)) + +typedef struct _GstGLStereoMix GstGLStereoMix; +typedef struct _GstGLStereoMixClass GstGLStereoMixClass; +typedef struct _GstGLStereoMixFrameData GstGLStereoMixFrameData; + +struct _GstGLStereoMix +{ + GstGLMixer mixer; + + GPtrArray *array_buffers; + GPtrArray *frames; + + GLuint out_tex_id; + GstGLDownload *download; + + GstGLViewConvert *viewconvert; + GstGLStereoDownmix downmix_mode; + + GstCaps *out_caps; + GstVideoInfo out_info; + + GstVideoInfo mix_info; + + GPtrArray *input_frames; + GstBuffer *primary_out; + GstBuffer *auxilliary_out; +}; + +struct _GstGLStereoMixClass +{ + GstGLMixerClass mixer_class; +}; + +struct _GstGLStereoMixFrameData +{ + GstGLMixerFrameData base; + gboolean mapped; + GstBuffer *buf; +}; + +GType gst_gl_stereo_mix_get_type(void); + +G_END_DECLS +#endif /* __GST_GL_STEREO_MIX_H__ */ diff --git a/ext/gl/gstglstereosplit.c b/ext/gl/gstglstereosplit.c new file mode 100644 index 0000000000..a9e500c0b9 --- /dev/null +++ b/ext/gl/gstglstereosplit.c @@ -0,0 +1,928 @@ +/* + * GStreamer + * Copyright (C) 2015 Jan Schmidt + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +/** + * SECTION:element-glstereosplit + * + * Receive a stereoscopic video stream and split into left/right + * + * + * Examples + * |[ + * gst-launch videotestsrc ! glstereosplit name=s ! queue ! glimagesink s. ! queue ! glimagesink + * ]| + * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstglstereosplit.h" + +#if GST_GL_HAVE_PLATFORM_EGL +#include +#endif + +#define GST_CAT_DEFAULT gst_gl_stereosplit_debug +GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + +#define SUPPORTED_GL_APIS GST_GL_API_OPENGL | GST_GL_API_OPENGL3 +#define DEBUG_INIT \ + GST_DEBUG_CATEGORY_INIT (gst_gl_stereosplit_debug, "glstereosplit", 0, "glstereosplit element"); + +G_DEFINE_TYPE_WITH_CODE (GstGLStereoSplit, gst_gl_stereosplit, + GST_TYPE_ELEMENT, DEBUG_INIT); + +static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink", + GST_PAD_SINK, GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, + "RGBA") "; " +#if GST_GL_HAVE_PLATFORM_EGL + GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_EGL_IMAGE, + "RGBA") "; " +#endif + GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, "RGBA") "; " + GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS) + ) + ); + +static GstStaticPadTemplate src_left_template = GST_STATIC_PAD_TEMPLATE ("left", + GST_PAD_SRC, GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, + "RGBA") +#if 0 + "; " +#if GST_GL_HAVE_PLATFORM_EGL + GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_EGL_IMAGE, + "RGBA") "; " +#endif + GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, "RGBA") "; " + GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS) +#endif + ) + ); + +static GstStaticPadTemplate src_right_template = + GST_STATIC_PAD_TEMPLATE ("right", + GST_PAD_SRC, GST_PAD_ALWAYS, + GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_MEMORY_GL_MEMORY, + "RGBA") +#if 0 + "; " +#if GST_GL_HAVE_PLATFORM_EGL + GST_VIDEO_CAPS_MAKE_WITH_FEATURES (GST_CAPS_FEATURE_MEMORY_EGL_IMAGE, + "RGBA") "; " +#endif + GST_VIDEO_CAPS_MAKE_WITH_FEATURES + (GST_CAPS_FEATURE_META_GST_VIDEO_GL_TEXTURE_UPLOAD_META, "RGBA") "; " + GST_VIDEO_CAPS_MAKE (GST_GL_COLOR_CONVERT_FORMATS) +#endif + ) + ); + +static void stereosplit_reset (GstGLStereoSplit * self); +static void stereosplit_finalize (GstGLStereoSplit * self); +static void stereosplit_set_context (GstElement * element, + GstContext * context); +static GstFlowReturn stereosplit_chain (GstPad * pad, GstGLStereoSplit * split, + GstBuffer * buf); +static GstStateChangeReturn stereosplit_change_state (GstElement * element, + GstStateChange transition); +static gboolean stereosplit_sink_query (GstPad * pad, GstObject * parent, + GstQuery * query); +static gboolean stereosplit_sink_event (GstPad * pad, GstObject * parent, + GstEvent * event); +static gboolean stereosplit_src_query (GstPad * pad, GstObject * parent, + GstQuery * query); +static gboolean stereosplit_src_event (GstPad * pad, GstObject * parent, + GstEvent * event); +static gboolean ensure_context (GstGLStereoSplit * self); + +static void +gst_gl_stereosplit_class_init (GstGLStereoSplitClass * klass) +{ + GObjectClass *gobject_class = (GObjectClass *) klass; + GstElementClass *element_class = GST_ELEMENT_CLASS (klass); + + gst_element_class_set_static_metadata (element_class, + "GLStereoSplit", "Codec/Converter", + "Splits a stereoscopic stream into separate left/right streams", + "Jan Schmidt \n" + "Matthew Waters "); + + gobject_class->finalize = (GObjectFinalizeFunc) (stereosplit_finalize); + + element_class->change_state = stereosplit_change_state; + element_class->set_context = stereosplit_set_context; + + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&sink_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_left_template)); + gst_element_class_add_pad_template (element_class, + gst_static_pad_template_get (&src_right_template)); +} + +static void +gst_gl_stereosplit_init (GstGLStereoSplit * self) +{ + GstPad *pad; + + pad = self->sink_pad = + gst_pad_new_from_static_template (&sink_template, "sink"); + + gst_pad_set_chain_function (pad, (GstPadChainFunction) (stereosplit_chain)); + gst_pad_set_query_function (pad, stereosplit_sink_query); + gst_pad_set_event_function (pad, stereosplit_sink_event); + + gst_element_add_pad (GST_ELEMENT (self), self->sink_pad); + + pad = self->left_pad = + gst_pad_new_from_static_template (&src_left_template, "left"); + gst_pad_set_query_function (pad, stereosplit_src_query); + gst_pad_set_event_function (pad, stereosplit_src_event); + gst_element_add_pad (GST_ELEMENT (self), self->left_pad); + + pad = self->right_pad = + gst_pad_new_from_static_template (&src_right_template, "right"); + gst_pad_set_query_function (pad, stereosplit_src_query); + gst_pad_set_event_function (pad, stereosplit_src_event); + gst_element_add_pad (GST_ELEMENT (self), self->right_pad); + + self->viewconvert = gst_gl_view_convert_new (); +} + +static void +stereosplit_reset (GstGLStereoSplit * self) +{ + if (self->upload) + gst_object_replace ((GstObject **) & self->upload, NULL); + if (self->convert) + gst_object_replace ((GstObject **) & self->convert, NULL); + if (self->context) + gst_object_replace ((GstObject **) & self->context, NULL); + if (self->display) + gst_object_replace ((GstObject **) & self->display, NULL); +} + +static void +stereosplit_finalize (GstGLStereoSplit * self) +{ + GObjectClass *klass = G_OBJECT_CLASS (gst_gl_stereosplit_parent_class); + + if (self->viewconvert) + gst_object_replace ((GstObject **) & self->viewconvert, NULL); + + klass->finalize ((GObject *) (self)); +} + +static void +stereosplit_set_context (GstElement * element, GstContext * context) +{ + GstGLStereoSplit *stereosplit = GST_GL_STEREOSPLIT (element); + + gst_gl_handle_set_context (element, context, &stereosplit->display, + &stereosplit->other_context); + + if (stereosplit->display) + gst_gl_display_filter_gl_api (stereosplit->display, SUPPORTED_GL_APIS); +} + +static GstStateChangeReturn +stereosplit_change_state (GstElement * element, GstStateChange transition) +{ + GstGLStereoSplit *stereosplit = GST_GL_STEREOSPLIT (element); + GstStateChangeReturn result; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + if (!gst_gl_ensure_element_data (element, &stereosplit->display, + &stereosplit->other_context)) + return GST_STATE_CHANGE_FAILURE; + + gst_gl_display_filter_gl_api (stereosplit->display, SUPPORTED_GL_APIS); + break; + default: + break; + } + + result = + GST_ELEMENT_CLASS (gst_gl_stereosplit_parent_class)->change_state + (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_READY_TO_NULL: + if (stereosplit->other_context) { + gst_object_unref (stereosplit->other_context); + stereosplit->other_context = NULL; + } + + if (stereosplit->display) { + gst_object_unref (stereosplit->display); + stereosplit->display = NULL; + } + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + stereosplit_reset (stereosplit); + break; + default: + break; + } + + return result; +} + +static GstCaps * +stereosplit_transform_caps (GstGLStereoSplit * self, GstPadDirection direction, + GstCaps * caps, GstCaps * filter) +{ + GstCaps *next_caps; + + /* FIXME: Is this the right way to ensure a context here ? */ + if (!ensure_context (self)) + return NULL; + + if (direction == GST_PAD_SINK) { + next_caps = + gst_gl_upload_transform_caps (self->context, direction, caps, filter); + caps = next_caps; + + next_caps = + gst_gl_color_convert_transform_caps (self->context, direction, caps, + NULL); + gst_caps_unref (caps); + caps = next_caps; + + next_caps = + gst_gl_view_convert_transform_caps (self->viewconvert, direction, caps, + NULL); + gst_caps_unref (caps); + } else { + next_caps = + gst_gl_view_convert_transform_caps (self->viewconvert, direction, caps, + filter); + caps = next_caps; + + next_caps = + gst_gl_color_convert_transform_caps (self->context, direction, caps, + NULL); + gst_caps_unref (caps); + caps = next_caps; + + next_caps = + gst_gl_upload_transform_caps (self->context, direction, caps, NULL); + gst_caps_unref (caps); + } + + return next_caps; +} + +static GstCaps * +strip_mview_fields (GstCaps * incaps, GstVideoMultiviewFlags keep_flags) +{ + GstCaps *outcaps = gst_caps_make_writable (incaps); + + gint i, n; + + n = gst_caps_get_size (outcaps); + for (i = 0; i < n; i++) { + GstStructure *st = gst_caps_get_structure (outcaps, i); + GstVideoMultiviewFlags flags, mask; + + gst_structure_remove_field (st, "multiview-mode"); + if (gst_structure_get_flagset (st, "multiview-flags", &flags, &mask)) { + flags &= keep_flags; + mask = keep_flags; + gst_structure_set (st, "multiview-flags", + GST_TYPE_VIDEO_MULTIVIEW_FLAGSET, flags, mask, NULL); + } + } + + return outcaps; +} + +static gboolean stereosplit_do_bufferpool (GstGLStereoSplit * self, + GstCaps * caps); + +static GstCaps * +stereosplit_get_src_caps (GstGLStereoSplit * split, + GstPad * pad, GstVideoMultiviewMode preferred_mode) +{ + GstCaps *outcaps, *tmp, *templ_caps; + GValue item = G_VALUE_INIT, list = G_VALUE_INIT; + + /* Get the template format */ + templ_caps = gst_pad_get_pad_template_caps (pad); + + /* And limit down to the preferred mode or mono */ + templ_caps = gst_caps_make_writable (templ_caps); + + g_value_init (&item, G_TYPE_STRING); + g_value_init (&list, GST_TYPE_LIST); + g_value_set_static_string (&item, + gst_video_multiview_mode_to_caps_string (preferred_mode)); + gst_value_list_append_value (&list, &item); + g_value_set_static_string (&item, + gst_video_multiview_mode_to_caps_string (GST_VIDEO_MULTIVIEW_MODE_MONO)); + gst_value_list_append_value (&list, &item); + + gst_caps_set_value (templ_caps, "multiview-mode", &list); + + g_value_unset (&list); + g_value_unset (&item); + + /* And intersect with the peer */ + if ((tmp = gst_pad_peer_query_caps (pad, NULL)) == NULL) { + gst_caps_unref (templ_caps); + return NULL; + } + + outcaps = gst_caps_intersect_full (tmp, templ_caps, GST_CAPS_INTERSECT_FIRST); + gst_caps_unref (tmp); + gst_caps_unref (templ_caps); + + GST_DEBUG_OBJECT (split, "Src pad %" GST_PTR_FORMAT " caps %" GST_PTR_FORMAT, + pad, outcaps); + return outcaps; +} + +static gboolean +stereosplit_set_output_caps (GstGLStereoSplit * split, GstCaps * sinkcaps) +{ + GstCaps *left = NULL, *right = NULL, *tridcaps = NULL; + GstCaps *tmp, *combined; + gboolean res = FALSE; + + /* Choose some preferred output caps. + * Keep input width/height and PAR, preserve preferred output + * multiview flags for flipping/flopping if any, and set each + * left right pad to either left/mono and right/mono, as they prefer + */ + + /* Calculate what downstream can collectively support */ + left = + stereosplit_get_src_caps (split, split->left_pad, + GST_VIDEO_MULTIVIEW_MODE_LEFT); + if (left == NULL) + goto fail; + right = + stereosplit_get_src_caps (split, split->right_pad, + GST_VIDEO_MULTIVIEW_MODE_RIGHT); + if (right == NULL) + goto fail; + + tridcaps = stereosplit_transform_caps (split, GST_PAD_SINK, sinkcaps, NULL); + + if (!tridcaps || gst_caps_is_empty (tridcaps)) { + GST_ERROR_OBJECT (split, + "Failed to transform input caps %" GST_PTR_FORMAT, sinkcaps); + goto fail; + } + + /* Preserve downstream preferred flipping/flopping */ + tmp = + strip_mview_fields (gst_caps_ref (left), + GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLIPPED | + GST_VIDEO_MULTIVIEW_FLAGS_LEFT_FLOPPED); + combined = gst_caps_intersect (tridcaps, tmp); + gst_caps_unref (tridcaps); + gst_caps_unref (tmp); + tridcaps = combined; + + tmp = + strip_mview_fields (gst_caps_ref (right), + GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLIPPED | + GST_VIDEO_MULTIVIEW_FLAGS_RIGHT_FLOPPED); + combined = gst_caps_intersect (tridcaps, tmp); + gst_caps_unref (tridcaps); + gst_caps_unref (tmp); + tridcaps = combined; + + if (G_UNLIKELY (gst_caps_is_empty (tridcaps))) { + gst_caps_unref (tridcaps); + goto fail; + } + + /* Now generate the version for each output pad */ + GST_DEBUG_OBJECT (split, "Attempting to set output caps %" GST_PTR_FORMAT, + tridcaps); + tmp = gst_caps_intersect (tridcaps, left); + gst_caps_unref (left); + left = tmp; + left = gst_caps_fixate (left); + if (!gst_pad_set_caps (split->left_pad, left)) { + GST_ERROR_OBJECT (split, + "Failed to set left output caps %" GST_PTR_FORMAT, left); + goto fail; + } + + tmp = gst_caps_intersect (tridcaps, right); + gst_caps_unref (right); + right = tmp; + right = gst_caps_fixate (right); + if (!gst_pad_set_caps (split->right_pad, right)) { + GST_ERROR_OBJECT (split, + "Failed to set right output caps %" GST_PTR_FORMAT, right); + goto fail; + } + + /* FIXME: Provide left and right caps to do_bufferpool */ + stereosplit_do_bufferpool (split, left); + + res = TRUE; + +fail: + if (left) + gst_caps_unref (left); + if (right) + gst_caps_unref (right); + if (tridcaps) + gst_caps_unref (tridcaps); + return res; +} + +static gboolean +_find_local_gl_context (GstGLStereoSplit * split) +{ + GstQuery *query; + GstContext *context; + const GstStructure *s; + + if (split->context) + return TRUE; + + query = gst_query_new_context ("gst.gl.local_context"); + if (!split->context + && gst_gl_run_query (GST_ELEMENT (split), query, GST_PAD_SRC)) { + gst_query_parse_context (query, &context); + if (context) { + s = gst_context_get_structure (context); + gst_structure_get (s, "context", GST_GL_TYPE_CONTEXT, &split->context, + NULL); + } + } + if (!split->context + && gst_gl_run_query (GST_ELEMENT (split), query, GST_PAD_SINK)) { + gst_query_parse_context (query, &context); + if (context) { + s = gst_context_get_structure (context); + gst_structure_get (s, "context", GST_GL_TYPE_CONTEXT, &split->context, + NULL); + } + } + + GST_DEBUG_OBJECT (split, "found local context %p", split->context); + + gst_query_unref (query); + + if (split->context) + return TRUE; + + return FALSE; +} + +static void +_init_upload (GstGLStereoSplit * split) +{ + GstGLContext *context = split->context; + + if (!split->upload) { + GstCaps *in_caps = gst_pad_get_current_caps (GST_PAD (split->sink_pad)); + GstCaps *split_caps = gst_pad_get_current_caps (split->left_pad); + GstCaps *upload_caps = gst_caps_copy (in_caps); + GstCapsFeatures *gl_features = + gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY); + GstCaps *gl_caps; + + split->upload = gst_gl_upload_new (context); + + gst_caps_set_features (upload_caps, 0, + gst_caps_features_copy (gl_features)); + gst_gl_upload_set_caps (split->upload, in_caps, upload_caps); + gst_caps_unref (in_caps); + + gl_caps = gst_caps_copy (upload_caps); + gst_caps_set_simple (gl_caps, "format", G_TYPE_STRING, "RGBA", NULL); + gst_caps_set_features (gl_caps, 0, gst_caps_features_copy (gl_features)); + + if (!split->convert) { + split->convert = gst_gl_color_convert_new (context); + gst_gl_color_convert_set_caps (split->convert, upload_caps, gl_caps); + } + + gst_caps_unref (upload_caps); + gst_caps_features_free (gl_features); + + gst_gl_view_convert_set_context (split->viewconvert, split->context); + + split_caps = gst_caps_make_writable (split_caps); + gst_caps_set_simple (split_caps, "multiview-mode", G_TYPE_STRING, + "separated", "views", G_TYPE_INT, 2, NULL); + + gst_gl_view_convert_set_caps (split->viewconvert, gl_caps, split_caps); + + gst_caps_unref (split_caps); + gst_caps_unref (gl_caps); + } +} + +static gboolean +ensure_context (GstGLStereoSplit * self) +{ + GError *error = NULL; + + if (!gst_gl_ensure_element_data (self, &self->display, &self->other_context)) + return FALSE; + + gst_gl_display_filter_gl_api (self->display, SUPPORTED_GL_APIS); + + _find_local_gl_context (self); + + if (!self->context) { + GST_OBJECT_LOCK (self->display); + do { + if (self->context) + gst_object_unref (self->context); + /* just get a GL context. we don't care */ + self->context = + gst_gl_display_get_gl_context_for_thread (self->display, NULL); + if (!self->context) { + self->context = gst_gl_context_new (self->display); + if (!gst_gl_context_create (self->context, self->other_context, &error)) + goto context_error; + } + } while (!gst_gl_display_add_context (self->display, self->context)); + GST_OBJECT_UNLOCK (self->display); + } + + return TRUE; + +context_error: + { + GST_ELEMENT_ERROR (self, RESOURCE, NOT_FOUND, ("%s", error->message), + (NULL)); + return FALSE; + } +} + +static gboolean +stereosplit_decide_allocation (GstGLStereoSplit * self, GstQuery * query) +{ + if (!ensure_context (self)) + return FALSE; + if (self->upload) + gst_object_replace ((GstObject **) & self->upload, NULL); + if (self->convert) + gst_object_replace ((GstObject **) & self->convert, NULL); + + return TRUE; + +} + +static gboolean +stereosplit_propose_allocation (GstGLStereoSplit * self, GstQuery * query) +{ + + if (!gst_gl_ensure_element_data (self, &self->display, &self->other_context)) + return FALSE; + + _init_upload (self); + + gst_gl_upload_propose_allocation (self->upload, NULL, query); + + return TRUE; +} + +static gboolean +stereosplit_do_bufferpool (GstGLStereoSplit * self, GstCaps * caps) +{ + GstQuery *query; + + query = gst_query_new_allocation (caps, TRUE); + if (!gst_pad_peer_query (self->left_pad, query)) { + if (!gst_pad_peer_query (self->right_pad, query)) { + GST_DEBUG_OBJECT (self, "peer ALLOCATION query failed on both src pads"); + } + } + + if (!stereosplit_decide_allocation (self, query)) { + gst_query_unref (query); + return FALSE; + } + + gst_query_unref (query); + return TRUE; +} + +static GstFlowReturn +stereosplit_chain (GstPad * pad, GstGLStereoSplit * split, GstBuffer * buf) +{ + GstBuffer *uploaded_buffer, *converted_buffer, *left, *right; + GstBuffer *split_buffer = NULL; + GstFlowReturn ret; + gint i, n_planes; + + if (!split->upload) + _init_upload (split); + + n_planes = GST_VIDEO_INFO_N_PLANES (&split->viewconvert->out_info); + + GST_LOG_OBJECT (split, "chaining buffer %" GST_PTR_FORMAT, buf); + + if (GST_GL_UPLOAD_DONE != gst_gl_upload_perform_with_buffer (split->upload, + buf, &uploaded_buffer)) { + gst_buffer_unref (buf); + GST_ELEMENT_ERROR (split, RESOURCE, NOT_FOUND, ("%s", + "Failed to upload buffer"), (NULL)); + return GST_FLOW_ERROR; + } + gst_buffer_unref (buf); + + if (!(converted_buffer = + gst_gl_color_convert_perform (split->convert, uploaded_buffer))) { + GST_ELEMENT_ERROR (split, RESOURCE, NOT_FOUND, ("%s", + "Failed to convert buffer"), (NULL)); + gst_buffer_unref (uploaded_buffer); + return GST_FLOW_ERROR; + } + gst_buffer_unref (uploaded_buffer); + + if (gst_gl_view_convert_submit_input_buffer (split->viewconvert, + GST_BUFFER_IS_DISCONT (converted_buffer), + converted_buffer) != GST_FLOW_OK) { + GST_ELEMENT_ERROR (split, RESOURCE, NOT_FOUND, ("%s", + "Failed to 3d convert buffer"), + ("Could not get submit input buffer")); + return GST_FLOW_ERROR; + } + + ret = gst_gl_view_convert_get_output (split->viewconvert, &split_buffer); + if (ret != GST_FLOW_OK) { + GST_ELEMENT_ERROR (split, RESOURCE, NOT_FOUND, ("%s", + "Failed to 3d convert buffer"), ("Could not get output buffer")); + return GST_FLOW_ERROR; + } + if (split_buffer == NULL) + return GST_FLOW_OK; /* Need another input buffer */ + + left = gst_buffer_new (); + gst_buffer_copy_into (left, buf, + GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1); + GST_BUFFER_FLAG_UNSET (left, GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE); + + gst_buffer_add_parent_buffer_meta (left, split_buffer); + + for (i = 0; i < n_planes; i++) { + GstMemory *mem = gst_buffer_get_memory (split_buffer, i); + gst_buffer_append_memory (left, mem); + } + + ret = gst_pad_push (split->left_pad, gst_buffer_ref (left)); + /* Allow unlinked on the first pad - as long as the 2nd isn't unlinked */ + gst_buffer_unref (left); + if (G_UNLIKELY (ret != GST_FLOW_OK && ret != GST_FLOW_NOT_LINKED)) { + gst_buffer_unref (split_buffer); + return ret; + } + + right = gst_buffer_new (); + gst_buffer_copy_into (right, buf, + GST_BUFFER_COPY_FLAGS | GST_BUFFER_COPY_TIMESTAMPS, 0, -1); + GST_BUFFER_FLAG_UNSET (left, GST_VIDEO_BUFFER_FLAG_FIRST_IN_BUNDLE); + gst_buffer_add_parent_buffer_meta (right, split_buffer); + for (i = n_planes; i < n_planes * 2; i++) { + GstMemory *mem = gst_buffer_get_memory (split_buffer, i); + gst_buffer_append_memory (right, mem); + } + + ret = gst_pad_push (split->right_pad, gst_buffer_ref (right)); + gst_buffer_unref (right); + gst_buffer_unref (split_buffer); + return ret; +} + +static gboolean +stereosplit_src_query (GstPad * pad, GstObject * parent, GstQuery * query) +{ + GstGLStereoSplit *split = GST_GL_STEREOSPLIT (parent); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONTEXT: + { + const gchar *context_type; + GstContext *context, *old_context; + gboolean ret; + + ret = gst_gl_handle_context_query ((GstElement *) split, query, + &split->display, &split->other_context); + if (split->display) + gst_gl_display_filter_gl_api (split->display, SUPPORTED_GL_APIS); + gst_query_parse_context_type (query, &context_type); + + if (g_strcmp0 (context_type, "gst.gl.local_context") == 0) { + GstStructure *s; + + gst_query_parse_context (query, &old_context); + + if (old_context) + context = gst_context_copy (old_context); + else + context = gst_context_new ("gst.gl.local_context", FALSE); + + s = gst_context_writable_structure (context); + gst_structure_set (s, "context", GST_GL_TYPE_CONTEXT, split->context, + NULL); + gst_query_set_context (query, context); + gst_context_unref (context); + + ret = split->context != NULL; + } + GST_LOG_OBJECT (split, "context query of type %s %i", context_type, ret); + + if (ret) + return ret; + + return gst_pad_query_default (pad, parent, query); + } + /* FIXME: Handle caps query */ + default: + return gst_pad_query_default (pad, parent, query); + } +} + +static gboolean +stereosplit_src_event (GstPad * pad, GstObject * parent, GstEvent * event) +{ + return gst_pad_event_default (pad, parent, event); +} + +static gboolean +stereosplit_sink_query (GstPad * pad, GstObject * parent, GstQuery * query) +{ + GstGLStereoSplit *split = GST_GL_STEREOSPLIT (parent); + + GST_DEBUG_OBJECT (split, "sink query %s", + gst_query_type_get_name (GST_QUERY_TYPE (query))); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONTEXT: + { + const gchar *context_type; + GstContext *context, *old_context; + gboolean ret; + + ret = gst_gl_handle_context_query ((GstElement *) split, query, + &split->display, &split->other_context); + if (split->display) + gst_gl_display_filter_gl_api (split->display, SUPPORTED_GL_APIS); + gst_query_parse_context_type (query, &context_type); + + if (g_strcmp0 (context_type, "gst.gl.local_context") == 0) { + GstStructure *s; + + gst_query_parse_context (query, &old_context); + + if (old_context) + context = gst_context_copy (old_context); + else + context = gst_context_new ("gst.gl.local_context", FALSE); + + s = gst_context_writable_structure (context); + gst_structure_set (s, "context", GST_GL_TYPE_CONTEXT, split->context, + NULL); + gst_query_set_context (query, context); + gst_context_unref (context); + + ret = split->context != NULL; + } + GST_LOG_OBJECT (split, "context query of type %s %i", context_type, ret); + + if (ret) + return ret; + + return gst_pad_query_default (pad, parent, query); + } + case GST_QUERY_ALLOCATION: + { + return stereosplit_propose_allocation (split, query); + } + case GST_QUERY_ACCEPT_CAPS: + { + GstCaps *possible, *caps; + gboolean allowed; + + gst_query_parse_accept_caps (query, &caps); + + if (!(possible = gst_pad_query_caps (split->sink_pad, caps))) + return FALSE; + + allowed = gst_caps_is_subset (caps, possible); + gst_caps_unref (possible); + + gst_query_set_accept_caps_result (query, allowed); + return allowed; + } + case GST_QUERY_CAPS: + { + GstCaps *filter, *left, *right, *combined, *ret, *templ_caps; + + gst_query_parse_caps (query, &filter); + + /* Calculate what downstream can collectively support */ + if (!(left = gst_pad_peer_query_caps (split->left_pad, NULL))) + return FALSE; + if (!(right = gst_pad_peer_query_caps (split->right_pad, NULL))) + return FALSE; + + /* Strip out multiview mode and flags that might break the + * intersection, since we can convert. + * We could keep downstream preferred flip/flopping and list + * separated as preferred in the future which might + * theoretically allow us an easier conversion, but it's not essential + */ + left = strip_mview_fields (left, GST_VIDEO_MULTIVIEW_FLAGS_NONE); + right = strip_mview_fields (right, GST_VIDEO_MULTIVIEW_FLAGS_NONE); + + combined = gst_caps_intersect (left, right); + gst_caps_unref (left); + gst_caps_unref (right); + + /* Intersect peer caps with our template formats */ + templ_caps = gst_pad_get_pad_template_caps (split->left_pad); + ret = + gst_caps_intersect_full (combined, templ_caps, + GST_CAPS_INTERSECT_FIRST); + gst_caps_unref (templ_caps); + + gst_caps_unref (combined); + combined = ret; + + if (!combined || gst_caps_is_empty (combined)) { + gst_caps_unref (combined); + return FALSE; + } + + /* Convert from the src pad caps to input formats we support */ + ret = stereosplit_transform_caps (split, GST_PAD_SRC, combined, filter); + gst_caps_unref (combined); + combined = ret; + + /* Intersect with the sink pad template then */ + templ_caps = gst_pad_get_pad_template_caps (split->sink_pad); + ret = + gst_caps_intersect_full (combined, templ_caps, + GST_CAPS_INTERSECT_FIRST); + gst_caps_unref (templ_caps); + + GST_LOG_OBJECT (split, "Returning sink pad caps %" GST_PTR_FORMAT, ret); + + gst_query_set_caps_result (query, ret); + return !gst_caps_is_empty (ret); + } + default: + return gst_pad_query_default (pad, parent, query); + } +} + +static gboolean +stereosplit_sink_event (GstPad * pad, GstObject * parent, GstEvent * event) +{ + GstGLStereoSplit *split = GST_GL_STEREOSPLIT (parent); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_CAPS: + { + GstCaps *caps; + + gst_event_parse_caps (event, &caps); + + return stereosplit_set_output_caps (split, caps); + } + default: + return gst_pad_event_default (pad, parent, event); + } +} diff --git a/ext/gl/gstglstereosplit.h b/ext/gl/gstglstereosplit.h new file mode 100644 index 0000000000..5a1e3c6b0b --- /dev/null +++ b/ext/gl/gstglstereosplit.h @@ -0,0 +1,66 @@ +/* + * GStreamer + * Copyright (C) 2015 Jan Schmidt + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef __GST_GL_STEREOSPLIT_H__ +#define __GST_GL_STEREOSPLIT_H__ + +#include +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_GL_STEREOSPLIT (gst_gl_stereosplit_get_type()) +#define GST_GL_STEREOSPLIT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_GL_STEREOSPLIT,GstGLStereoSplit)) +#define GST_IS_GL_STEREOSPLIT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_GL_STEREOSPLIT)) +#define GST_GL_STEREOSPLIT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_GL_STEREOSPLIT,GstGLStereoSplitClass)) +#define GST_IS_GL_STEREOSPLIT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_GL_STEREOSPLIT)) +#define GST_GL_STEREOSPLIT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_GL_STEREOSPLIT,GstGLStereoSplitClass)) + +typedef struct _GstGLStereoSplit GstGLStereoSplit; +typedef struct _GstGLStereoSplitClass GstGLStereoSplitClass; + +struct _GstGLStereoSplit +{ + GstElement parent; + + GstPad *sink_pad; + GstPad *left_pad; + GstPad *right_pad; + + GstGLDisplay *display; + GstGLContext *context; + GstGLContext *other_context; + + GstGLUpload *upload; + GstGLColorConvert *convert; + GstGLViewConvert *viewconvert; +}; + +struct _GstGLStereoSplitClass +{ + GstElementClass parent_class; +}; + +GType gst_gl_stereosplit_get_type (void); + +G_END_DECLS + +#endif diff --git a/ext/gl/gstglviewconvert.c b/ext/gl/gstglviewconvert.c new file mode 100644 index 0000000000..6dcad7db26 --- /dev/null +++ b/ext/gl/gstglviewconvert.c @@ -0,0 +1,353 @@ +/* + * GStreamer + * Copyright (C) 2009 Julien Isorce + * Copyright (C) 2014 Jan Schmidt + * Copyright (C) 2015 Matthew Waters + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +/** + * SECTION:element-glviewconvert + * + * Convert stereoscopic video between different representations using fragment shaders. + * + * The element can use either property settings or caps negotiation to choose the + * input and output formats to process. + * + * + * Examples + * |[ + * gst-launch-1.0 videotestsrc ! glviewconvert ! glimagesink + * ]| + * |[ + * gst-launch-1.0 videotestsrc pattern=checkers-1 ! glviewconvert input-mode-override=side-by-side ! glimagesink -v + * ]| + * FBO (Frame Buffer Object) and GLSL (OpenGL Shading Language) are required. + * + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include +#include "gstglviewconvert.h" + +#define GST_CAT_DEFAULT gst_gl_view_convert_element_debug +GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT); + +enum +{ + PROP_0, + PROP_INPUT_LAYOUT, + PROP_INPUT_FLAGS, + PROP_OUTPUT_LAYOUT, + PROP_OUTPUT_FLAGS, + PROP_OUTPUT_DOWNMIX_MODE +}; + +#define DEFAULT_DOWNMIX GST_GL_STEREO_DOWNMIX_ANAGLYPH_GREEN_MAGENTA_DUBOIS + +#define DEBUG_INIT \ + GST_DEBUG_CATEGORY_INIT (gst_gl_view_convert_element_debug, "glview_convertelement", 0, "glview_convert element"); + +G_DEFINE_TYPE_WITH_CODE (GstGLViewConvertElement, gst_gl_view_convert_element, + GST_TYPE_GL_FILTER, DEBUG_INIT); +#define parent_class gst_gl_view_convert_element_parent_class + +static void gst_gl_view_convert_dispose (GObject * object); +static void gst_gl_view_convert_element_set_property (GObject * object, + guint prop_id, const GValue * value, GParamSpec * pspec); +static void gst_gl_view_convert_element_get_property (GObject * object, + guint prop_id, GValue * value, GParamSpec * pspec); + +static gboolean gst_gl_view_convert_element_stop (GstBaseTransform * bt); +static gboolean +gst_gl_view_convert_element_set_caps (GstGLFilter * filter, GstCaps * incaps, + GstCaps * outcaps); +static GstCaps *gst_gl_view_convert_element_transform_internal_caps (GstGLFilter + * filter, GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps); +static GstCaps *gst_gl_view_convert_element_fixate_caps (GstBaseTransform * + trans, GstPadDirection direction, GstCaps * caps, GstCaps * othercaps); +static GstFlowReturn +gst_gl_view_convert_element_submit_input_buffer (GstBaseTransform * trans, + gboolean is_discont, GstBuffer * input); +static gboolean +gst_gl_view_convert_element_generate_output_buffer (GstBaseTransform * bt, + GstBuffer ** outbuf); + +static void +gst_gl_view_convert_element_class_init (GstGLViewConvertElementClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + + gobject_class = (GObjectClass *) klass; + element_class = GST_ELEMENT_CLASS (klass); + + gobject_class->set_property = gst_gl_view_convert_element_set_property; + gobject_class->get_property = gst_gl_view_convert_element_get_property; + gobject_class->dispose = gst_gl_view_convert_dispose; + + gst_element_class_set_metadata (element_class, + "OpenGL Multiview/3D conversion filter", "Filter", + "Convert stereoscopic/multiview video formats", + "Jan Schmidt \n" + "Matthew Waters "); + + GST_GL_FILTER_CLASS (klass)->set_caps = gst_gl_view_convert_element_set_caps; + + GST_GL_FILTER_CLASS (klass)->transform_internal_caps = + gst_gl_view_convert_element_transform_internal_caps; + GST_BASE_TRANSFORM_CLASS (klass)->stop = gst_gl_view_convert_element_stop; + GST_BASE_TRANSFORM_CLASS (klass)->fixate_caps = + gst_gl_view_convert_element_fixate_caps; + GST_BASE_TRANSFORM_CLASS (klass)->submit_input_buffer = + gst_gl_view_convert_element_submit_input_buffer; + GST_BASE_TRANSFORM_CLASS (klass)->generate_output = + gst_gl_view_convert_element_generate_output_buffer; + + g_object_class_install_property (gobject_class, PROP_INPUT_LAYOUT, + g_param_spec_enum ("input-mode-override", + "Input Multiview Mode Override", + "Override any input information about multiview layout", + GST_TYPE_VIDEO_MULTIVIEW_FRAME_PACKING, + GST_VIDEO_MULTIVIEW_MODE_NONE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_INPUT_FLAGS, + g_param_spec_flags ("input-flags-override", + "Input Multiview Flags Override", + "Override any input information about multiview layout flags", + GST_TYPE_VIDEO_MULTIVIEW_FLAGS, GST_VIDEO_MULTIVIEW_FLAGS_NONE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_OUTPUT_LAYOUT, + g_param_spec_enum ("output-mode-override", + "Output Multiview Mode Override", + "Override automatic output mode selection for multiview layout", + GST_TYPE_VIDEO_MULTIVIEW_MODE, GST_VIDEO_MULTIVIEW_MODE_NONE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_OUTPUT_FLAGS, + g_param_spec_flags ("output-flags-override", + "Output Multiview Flags Override", + "Override automatic negotiation for output multiview layout flags", + GST_TYPE_VIDEO_MULTIVIEW_FLAGS, GST_VIDEO_MULTIVIEW_FLAGS_NONE, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); + g_object_class_install_property (gobject_class, PROP_OUTPUT_DOWNMIX_MODE, + g_param_spec_enum ("downmix-mode", "Mode for mono downmixed output", + "Output anaglyph type to generate when downmixing to mono", + GST_TYPE_GL_STEREO_DOWNMIX_MODE_TYPE, DEFAULT_DOWNMIX, + G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS)); +} + +static void +gst_gl_view_convert_element_init (GstGLViewConvertElement * convert) +{ + convert->viewconvert = gst_gl_view_convert_new (); +} + +static void +gst_gl_view_convert_dispose (GObject * object) +{ + GstGLViewConvertElement *convert = GST_GL_VIEW_CONVERT_ELEMENT (object); + + if (convert->viewconvert) { + gst_object_unref (convert->viewconvert); + convert->viewconvert = NULL; + } + G_OBJECT_CLASS (parent_class)->dispose (object); +} + +static gboolean +gst_gl_view_convert_element_set_caps (GstGLFilter * filter, GstCaps * incaps, + GstCaps * outcaps) +{ + GstGLViewConvertElement *viewconvert_filter = + GST_GL_VIEW_CONVERT_ELEMENT (filter); + GstCapsFeatures *gl_features; + gboolean ret; + + GST_DEBUG_OBJECT (filter, "incaps %" GST_PTR_FORMAT + " outcaps %" GST_PTR_FORMAT, incaps, outcaps); + /* The view_convert component needs RGBA caps */ + incaps = gst_caps_copy (incaps); + outcaps = gst_caps_copy (outcaps); + + gst_caps_set_simple (incaps, "format", G_TYPE_STRING, "RGBA", NULL); + gl_features = + gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY); + gst_caps_set_features (incaps, 0, gl_features); + + gst_caps_set_simple (outcaps, "format", G_TYPE_STRING, "RGBA", NULL); + gl_features = + gst_caps_features_from_string (GST_CAPS_FEATURE_MEMORY_GL_MEMORY); + gst_caps_set_features (outcaps, 0, gl_features); + + ret = gst_gl_view_convert_set_caps (viewconvert_filter->viewconvert, + incaps, outcaps); + + gst_caps_unref (incaps); + gst_caps_unref (outcaps); + + return ret; +} + +static GstCaps * +gst_gl_view_convert_element_transform_internal_caps (GstGLFilter * filter, + GstPadDirection direction, GstCaps * caps, GstCaps * filter_caps) +{ + GstGLViewConvertElement *viewconvert_filter = + GST_GL_VIEW_CONVERT_ELEMENT (filter); + GstCaps *result; + + GST_DEBUG_OBJECT (filter, "dir %s transforming caps: %" GST_PTR_FORMAT, + direction == GST_PAD_SINK ? "sink" : "src", caps); + + result = + gst_gl_view_convert_transform_caps (viewconvert_filter->viewconvert, + direction, caps, NULL); + + GST_DEBUG_OBJECT (filter, "returning caps: %" GST_PTR_FORMAT, result); + + return result; +} + +static GstCaps * +gst_gl_view_convert_element_fixate_caps (GstBaseTransform * trans, + GstPadDirection direction, GstCaps * caps, GstCaps * othercaps) +{ + GstGLViewConvertElement *viewconvert_filter = + GST_GL_VIEW_CONVERT_ELEMENT (trans); + + othercaps = gst_gl_view_convert_fixate_caps (viewconvert_filter->viewconvert, + direction, caps, othercaps); + + if (gst_caps_is_empty (othercaps)) + return othercaps; + + /* Let GLfilter do the rest */ + return + GST_BASE_TRANSFORM_CLASS + (gst_gl_view_convert_element_parent_class)->fixate_caps (trans, direction, + caps, othercaps); +} + +static gboolean +gst_gl_view_convert_element_stop (GstBaseTransform * bt) +{ + GstGLViewConvertElement *viewconvert_filter = + GST_GL_VIEW_CONVERT_ELEMENT (bt); + + gst_gl_view_convert_reset (viewconvert_filter->viewconvert); + + return GST_BASE_TRANSFORM_CLASS (parent_class)->stop (bt); +} + +static void +gst_gl_view_convert_element_set_property (GObject * object, guint prop_id, + const GValue * value, GParamSpec * pspec) +{ + GstGLViewConvertElement *convert = GST_GL_VIEW_CONVERT_ELEMENT (object); + + switch (prop_id) { + case PROP_INPUT_LAYOUT: + case PROP_INPUT_FLAGS: + g_object_set_property (G_OBJECT (convert->viewconvert), pspec->name, + value); + gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (convert)); + break; + case PROP_OUTPUT_LAYOUT: + case PROP_OUTPUT_FLAGS: + g_object_set_property (G_OBJECT (convert->viewconvert), pspec->name, + value); + gst_base_transform_reconfigure_src (GST_BASE_TRANSFORM (convert)); + break; + case PROP_OUTPUT_DOWNMIX_MODE: + g_object_set_property (G_OBJECT (convert->viewconvert), pspec->name, + value); + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static void +gst_gl_view_convert_element_get_property (GObject * object, guint prop_id, + GValue * value, GParamSpec * pspec) +{ + GstGLViewConvertElement *convert = GST_GL_VIEW_CONVERT_ELEMENT (object); + + switch (prop_id) { + case PROP_INPUT_LAYOUT: + case PROP_INPUT_FLAGS: + case PROP_OUTPUT_LAYOUT: + case PROP_OUTPUT_FLAGS: + case PROP_OUTPUT_DOWNMIX_MODE: + g_object_get_property (G_OBJECT (convert->viewconvert), pspec->name, + value); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec); + break; + } +} + +static GstFlowReturn +gst_gl_view_convert_element_submit_input_buffer (GstBaseTransform * trans, + gboolean is_discont, GstBuffer * input) +{ + GstGLContext *context = GST_GL_BASE_FILTER (trans)->context; + GstGLViewConvertElement *viewconvert_filter = + GST_GL_VIEW_CONVERT_ELEMENT (trans); + GstFlowReturn ret; + + ret = + GST_BASE_TRANSFORM_CLASS (parent_class)->submit_input_buffer (trans, + is_discont, input); + if (ret != GST_FLOW_OK || trans->queued_buf == NULL) + return ret; + + gst_gl_view_convert_set_context (viewconvert_filter->viewconvert, context); + + /* Takes the ref to the input buffer */ + ret = + gst_gl_view_convert_submit_input_buffer (viewconvert_filter->viewconvert, + is_discont, input); + trans->queued_buf = NULL; + + return ret; +} + +static GstFlowReturn +gst_gl_view_convert_element_generate_output_buffer (GstBaseTransform * bt, + GstBuffer ** outbuf_ptr) +{ + GstGLFilter *filter = GST_GL_FILTER (bt); + GstGLViewConvertElement *viewconvert_filter = + GST_GL_VIEW_CONVERT_ELEMENT (bt); + GstFlowReturn ret = GST_FLOW_OK; + + ret = gst_gl_view_convert_get_output (viewconvert_filter->viewconvert, + outbuf_ptr); + + if (ret != GST_FLOW_OK) { + GST_ELEMENT_ERROR (filter, RESOURCE, SETTINGS, + ("failed to perform view conversion on input buffer"), (NULL)); + return ret; + } + + return ret; +} diff --git a/ext/gl/gstglviewconvert.h b/ext/gl/gstglviewconvert.h new file mode 100644 index 0000000000..1403d27a12 --- /dev/null +++ b/ext/gl/gstglviewconvert.h @@ -0,0 +1,53 @@ +/* + * GStreamer + * Copyright (C) 2014 Jan Schmidt + * Copyright (C) 2015 Matthew Waters + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, + * Boston, MA 02110-1301, USA. + */ + +#ifndef _GST_GL_VIEW_CONVERT_ELEMENT_H_ +#define _GST_GL_VIEW_CONVERT_ELEMENT_H_ + +#include + +G_BEGIN_DECLS +#define GST_TYPE_GL_VIEW_CONVERT_ELEMENT (gst_gl_view_convert_element_get_type()) +#define GST_GL_VIEW_CONVERT_ELEMENT(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_GL_VIEW_CONVERT_ELEMENT,GstGLViewConvertElement)) +#define GST_IS_GL_VIEW_CONVERT_ELEMENT(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_GL_VIEW_CONVERT_ELEMENT)) +#define GST_GL_VIEW_CONVERT_ELEMENT_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass) ,GST_TYPE_GL_VIEW_CONVERT_ELEMENT,GstGLViewConvertElementClass)) +#define GST_IS_GL_VIEW_CONVERT_ELEMENT_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass) ,GST_TYPE_GL_VIEW_CONVERT_ELEMENT)) +#define GST_GL_VIEW_CONVERT_ELEMENT_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS((obj) ,GST_TYPE_GL_VIEW_CONVERT_ELEMENT,GstGLViewConvertElementClass)) + +typedef struct _GstGLViewConvertElement GstGLViewConvertElement; +typedef struct _GstGLViewConvertElementClass GstGLViewConvertElementClass; + +struct _GstGLViewConvertElement +{ + GstGLFilter filter; + + GstGLViewConvert *viewconvert; +}; + +struct _GstGLViewConvertElementClass +{ + GstGLFilterClass filter_class; +}; + +GType gst_gl_view_convert_element_get_type (void); + +G_END_DECLS +#endif /* _GST_GL_VIEW_CONVERT_H_ */ diff --git a/ext/gl/gstopengl.c b/ext/gl/gstopengl.c index 308c65c08e..7b43e49e12 100644 --- a/ext/gl/gstopengl.c +++ b/ext/gl/gstopengl.c @@ -58,6 +58,9 @@ #include "gstglvideomixer.h" #include "gstglfiltershader.h" #include "gstglfilterapp.h" +#include "gstglstereosplit.h" +#include "gstglstereomix.h" + #if HAVE_GRAPHENE #include "gstgltransformation.h" #endif @@ -72,6 +75,7 @@ #include "gstglfilterglass.h" /* #include "gstglfilterreflectedscreen.h" */ #include "gstgldeinterlace.h" +#include "gstglviewconvert.h" #include "gstglmosaic.h" #if HAVE_PNG #include "gstgldifferencematte.h" @@ -203,6 +207,14 @@ plugin_init (GstPlugin * plugin) } #endif /* HAVE_PNG */ #endif /* HAVE_JPEG */ + if (!gst_element_register (plugin, "glstereosplit", + GST_RANK_NONE, GST_TYPE_GL_STEREOSPLIT)) { + return FALSE; + } + if (!gst_element_register (plugin, "glstereomix", + GST_RANK_NONE, GST_TYPE_GL_STEREO_MIX)) { + return FALSE; + } #if GST_GL_HAVE_OPENGL if (!gst_element_register (plugin, "gltestsrc", GST_RANK_NONE, GST_TYPE_GL_TEST_SRC)) { @@ -224,6 +236,11 @@ plugin_init (GstPlugin * plugin) return FALSE; } + if (!gst_element_register (plugin, "glviewconvert", + GST_RANK_NONE, GST_TYPE_GL_VIEW_CONVERT_ELEMENT)) { + return FALSE; + } + if (!gst_element_register (plugin, "glmosaic", GST_RANK_NONE, GST_TYPE_GL_MOSAIC)) { return FALSE;