gl: take the affine transformation in NDC

Provide a function to get the affine matrix in the meta in terms of NDC
coordinates and use as a standard opengl matrix.

Also advertise support for the affine transformation meta in the allocation
query.
This commit is contained in:
Matthew Waters 2016-05-14 16:27:26 +03:00 committed by Tim-Philipp Müller
parent d23808a877
commit 2d62b2910c
7 changed files with 115 additions and 33 deletions

View file

@ -1698,6 +1698,8 @@ gst_glimage_sink_propose_allocation (GstBaseSink * bsink, GstQuery * query)
gst_query_add_allocation_meta (query,
GST_VIDEO_OVERLAY_COMPOSITION_META_API_TYPE, allocation_meta);
gst_query_add_allocation_meta (query,
GST_VIDEO_AFFINE_TRANSFORMATION_META_API_TYPE, 0);
if (allocation_meta)
gst_structure_free (allocation_meta);
@ -1776,7 +1778,7 @@ gst_glimage_sink_thread_init_redisplay (GstGLImageSink * gl_sink)
vert_stage = gst_glsl_stage_new_with_string (gl_sink->context,
GL_VERTEX_SHADER, GST_GLSL_VERSION_NONE,
GST_GLSL_PROFILE_ES | GST_GLSL_PROFILE_COMPATIBILITY,
gst_gl_shader_string_vertex_mat4_texture_transform);
gst_gl_shader_string_vertex_mat4_vertex_transform);
if (gl_sink->texture_target == GST_GL_TEXTURE_TARGET_EXTERNAL_OES) {
frag_stage = gst_glsl_stage_new_with_string (gl_sink->context,
GL_FRAGMENT_SHADER, GST_GLSL_VERSION_NONE,
@ -1939,13 +1941,6 @@ gst_glimage_sink_on_resize (GstGLImageSink * gl_sink, gint width, gint height)
GST_GLIMAGE_SINK_UNLOCK (gl_sink);
}
static const gfloat identity_matrix[] = {
1.0f, 0.0f, 0.0, 0.0f,
0.0f, 1.0f, 0.0, 0.0f,
0.0f, 0.0f, 1.0, 0.0f,
0.0f, 0.0f, 0.0, 1.0f,
};
static void
gst_glimage_sink_on_draw (GstGLImageSink * gl_sink)
{
@ -2028,16 +2023,15 @@ gst_glimage_sink_on_draw (GstGLImageSink * gl_sink)
gst_gl_shader_set_uniform_1i (gl_sink->redisplay_shader, "tex", 0);
{
GstVideoAffineTransformationMeta *af_meta;
gfloat matrix[16];
af_meta =
gst_buffer_get_video_affine_transformation_meta
(gl_sink->stored_buffer[0]);
if (af_meta)
gst_gl_shader_set_uniform_matrix_4fv (gl_sink->redisplay_shader,
"u_transformation", 1, FALSE, af_meta->matrix);
else
gst_gl_shader_set_uniform_matrix_4fv (gl_sink->redisplay_shader,
"u_transformation", 1, FALSE, identity_matrix);
gst_gl_get_affine_transformation_meta_as_ndc (af_meta, matrix);
gst_gl_shader_set_uniform_matrix_4fv (gl_sink->redisplay_shader,
"u_transformation", 1, FALSE, matrix);
}
gl->DrawElements (GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, 0);

View file

@ -454,6 +454,7 @@ enum
#define DEBUG_INIT \
GST_DEBUG_CATEGORY_INIT (gst_gl_video_mixer_debug, "glvideomixer", 0, "glvideomixer element");
#define gst_gl_video_mixer_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstGLVideoMixer, gst_gl_video_mixer, GST_TYPE_GL_MIXER,
DEBUG_INIT);
@ -465,6 +466,9 @@ static void gst_gl_video_mixer_get_property (GObject * object, guint prop_id,
static GstCaps *_update_caps (GstVideoAggregator * vagg, GstCaps * caps,
GstCaps * filter);
static GstCaps *_fixate_caps (GstVideoAggregator * vagg, GstCaps * caps);
static gboolean gst_gl_video_mixer_propose_allocation (GstGLBaseMixer *
base_mix, GstGLBaseMixerPad * base_pad, GstQuery * decide_query,
GstQuery * query);
static void gst_gl_video_mixer_reset (GstGLMixer * mixer);
static gboolean gst_gl_video_mixer_init_shader (GstGLMixer * mixer,
GstCaps * outcaps);
@ -856,6 +860,7 @@ gst_gl_video_mixer_class_init (GstGLVideoMixerClass * klass)
GstElementClass *element_class;
GstAggregatorClass *agg_class = (GstAggregatorClass *) klass;
GstVideoAggregatorClass *vagg_class = (GstVideoAggregatorClass *) klass;
GstGLBaseMixerClass *mix_class = GST_GL_BASE_MIXER_CLASS (klass);
gobject_class = (GObjectClass *) klass;
element_class = GST_ELEMENT_CLASS (klass);
@ -883,6 +888,8 @@ gst_gl_video_mixer_class_init (GstGLVideoMixerClass * klass)
agg_class->sinkpads_type = GST_TYPE_GL_VIDEO_MIXER_PAD;
mix_class->propose_allocation = gst_gl_video_mixer_propose_allocation;
GST_GL_BASE_MIXER_CLASS (klass)->supported_gl_api =
GST_GL_API_OPENGL | GST_GL_API_OPENGL3 | GST_GL_API_GLES2;
}
@ -926,6 +933,20 @@ gst_gl_video_mixer_get_property (GObject * object, guint prop_id,
}
}
static gboolean
gst_gl_video_mixer_propose_allocation (GstGLBaseMixer * base_mix,
GstGLBaseMixerPad * base_pad, GstQuery * decide_query, GstQuery * query)
{
if (!GST_GL_BASE_MIXER_CLASS (parent_class)->propose_allocation (base_mix,
base_pad, decide_query, query))
return FALSE;
gst_query_add_allocation_meta (query,
GST_VIDEO_AFFINE_TRANSFORMATION_META_API_TYPE, 0);
return TRUE;
}
static void
_mixer_pad_get_output_size (GstGLVideoMixer * mix,
GstGLVideoMixerPad * mix_pad, gint out_par_n, gint out_par_d, gint * width,
@ -1130,7 +1151,7 @@ gst_gl_video_mixer_init_shader (GstGLMixer * mixer, GstCaps * outcaps)
video_mixer->shader);
return gst_gl_context_gen_shader (GST_GL_BASE_MIXER (mixer)->context,
gst_gl_shader_string_vertex_mat4_texture_transform,
gst_gl_shader_string_vertex_mat4_vertex_transform,
video_mixer_f_src, &video_mixer->shader);
}
@ -1361,13 +1382,6 @@ _set_blend_state (GstGLVideoMixer * video_mixer, GstGLVideoMixerPad * mix_pad)
return TRUE;
}
static const gfloat identity_matrix[] = {
1.0f, 0.0f, 0.0f, 0.0f,
0.0f, 1.0f, 0.0f, 0.0f,
0.0f, 0.0f, 1.0f, 0.0f,
0.0f, 0.0f, 0.0f, 1.0f,
};
/* opengl scene, params: input texture (not the output mixer->texture) */
static void
gst_gl_video_mixer_callback (gpointer stuff)
@ -1495,15 +1509,13 @@ gst_gl_video_mixer_callback (gpointer stuff)
{
GstVideoAffineTransformationMeta *af_meta;
gfloat matrix[16];
af_meta =
gst_buffer_get_video_affine_transformation_meta (vagg_pad->buffer);
if (af_meta)
gst_gl_shader_set_uniform_matrix_4fv (video_mixer->shader,
"u_transformation", 1, FALSE, af_meta->matrix);
else
gst_gl_shader_set_uniform_matrix_4fv (video_mixer->shader,
"u_transformation", 1, FALSE, identity_matrix);
gst_gl_get_affine_transformation_meta_as_ndc (af_meta, matrix);
gst_gl_shader_set_uniform_matrix_4fv (video_mixer->shader,
"u_transformation", 1, FALSE, matrix);
}
gl->EnableVertexAttribArray (attr_position_loc);

View file

@ -46,6 +46,17 @@ const gchar *gst_gl_shader_string_vertex_mat4_texture_transform =
" v_texcoord = (u_transformation * vec4(a_texcoord, 0, 1)).xy;\n"
"}\n";
const gchar *gst_gl_shader_string_vertex_mat4_vertex_transform =
"uniform mat4 u_transformation;\n"
"attribute vec4 a_position;\n"
"attribute vec2 a_texcoord;\n"
"varying vec2 v_texcoord;\n"
"void main()\n"
"{\n"
" gl_Position = u_transformation * a_position;\n"
" v_texcoord = a_texcoord;\n"
"}\n";
const gchar *gst_gl_shader_string_fragment_default =
"#ifdef GL_ES\n"
"precision mediump float;\n"

View file

@ -29,6 +29,7 @@ extern const gchar *gst_gl_shader_string_vertex_default;
extern const gchar *gst_gl_shader_string_fragment_default;
extern const gchar *gst_gl_shader_string_vertex_mat4_texture_transform;
extern const gchar *gst_gl_shader_string_vertex_mat4_vertex_transform;
extern const gchar *gst_gl_shader_string_fragment_external_oes_default;
G_END_DECLS

View file

@ -1067,3 +1067,59 @@ gst_gl_value_set_texture_target_from_mask (GValue * value,
return ret;
}
}
static const gfloat identity_matrix[] = {
1.0f, 0.0f, 0.0, 0.0f,
0.0f, 1.0f, 0.0, 0.0f,
0.0f, 0.0f, 1.0, 0.0f,
0.0f, 0.0f, 0.0, 1.0f,
};
static const gfloat from_ndc_matrix[] = {
0.5f, 0.0f, 0.0, 0.5f,
0.0f, 0.5f, 0.0, 0.5f,
0.0f, 0.0f, 0.5, 0.5f,
0.0f, 0.0f, 0.0, 1.0f,
};
static const gfloat to_ndc_matrix[] = {
2.0f, 0.0f, 0.0, -1.0f,
0.0f, 2.0f, 0.0, -1.0f,
0.0f, 0.0f, 2.0, -1.0f,
0.0f, 0.0f, 0.0, 1.0f,
};
static void
_multiply_matrix4 (const gfloat * a, const gfloat * b, gfloat * result)
{
int i, j, k;
for (i = 0; i < 16; i++)
result[i] = 0.0f;
for (i = 0; i < 4; i++) {
for (j = 0; j < 4; j++) {
for (k = 0; k < 4; k++) {
result[i + (j * 4)] += a[i + (k * 4)] * b[k + (j * 4)];
}
}
}
}
void
gst_gl_get_affine_transformation_meta_as_ndc (GstVideoAffineTransformationMeta *
meta, gfloat * matrix)
{
if (!meta) {
int i;
for (i = 0; i < 16; i++) {
matrix[i] = identity_matrix[i];
}
} else {
gfloat tmp[16] = { 0.0f };
_multiply_matrix4 (from_ndc_matrix, meta->matrix, tmp);
_multiply_matrix4 (tmp, to_ndc_matrix, matrix);
}
}

View file

@ -22,6 +22,7 @@
#define __GST_GL_UTILS_H__
#include <gst/video/video.h>
#include <gst/video/gstvideoaffinetransformationmeta.h>
#include <gst/gl/gstgl_fwd.h>
@ -116,6 +117,8 @@ gboolean gst_gl_value_set_texture_target_from_mask (GValue * value,
gboolean gst_gl_value_set_texture_target (GValue * value, GstGLTextureTarget target);
GstGLTextureTarget gst_gl_value_get_texture_target_mask (const GValue * value);
void gst_gl_get_affine_transformation_meta_as_ndc (GstVideoAffineTransformationMeta * meta, gfloat * matrix);
G_END_DECLS
#endif /* __GST_GL_UTILS_H__ */

View file

@ -1742,7 +1742,7 @@ _init_view_convert (GstGLViewConvert * viewconvert)
tmp =
_gst_glsl_mangle_shader
(gst_gl_shader_string_vertex_mat4_texture_transform, GL_VERTEX_SHADER,
(gst_gl_shader_string_vertex_mat4_vertex_transform, GL_VERTEX_SHADER,
GST_GL_TEXTURE_TARGET_2D, viewconvert->from_texture_target, gl_api,
gl_major, gl_minor, &version, &profile);
@ -1869,7 +1869,6 @@ _do_view_convert_draw (GstGLContext * context, GstGLViewConvert * viewconvert)
GstVideoMultiviewMode out_mode = priv->output_mode;
guint from_gl_target =
gst_gl_texture_target_to_gl (viewconvert->from_texture_target);
GstVideoAffineTransformationMeta *af_meta;
gl = context->gl_vtable;
out_width = GST_VIDEO_INFO_WIDTH (&viewconvert->out_info);
@ -1883,10 +1882,16 @@ _do_view_convert_draw (GstGLContext * context, GstGLViewConvert * viewconvert)
}
/* FIXME: the auxillary buffer could have a different transform matrix */
af_meta = gst_buffer_get_video_affine_transformation_meta (priv->primary_in);
if (af_meta)
{
GstVideoAffineTransformationMeta *af_meta;
gfloat matrix[16];
af_meta =
gst_buffer_get_video_affine_transformation_meta (priv->primary_in);
gst_gl_get_affine_transformation_meta_as_ndc (af_meta, matrix);
gst_gl_shader_set_uniform_matrix_4fv (viewconvert->shader,
"u_transformation", 1, FALSE, af_meta->matrix);
"u_transformation", 1, FALSE, matrix);
}
/* attach the texture to the FBO to renderer to */
for (i = 0; i < out_views; i++) {