v4l2: Add Video Encoder support

This implements H264 encoding support using generic V4L2 interface. It is
reported to work with Samsung MFC driver, IXM.6 CODA driver and
Qualcomm mainline Venus driver. Other platform should be supported as
none of this work is platform specific.

The implementation consist of a GstV4l2VideoEnc base class, which
implements the core streaming functionality. This base class is implemented
by GstV4l2H264Enc class that implements the caps negotiation specific to
H264 profiles and level. This implementation supports hardware with multiple
H264 encoder. Though, to make it simplier to use, the first discovered H264
encoder will be named v4l2h264enc. Other encoder found during discovery will
have a unique name like v4l2video0h264enc.

This work is the combined work of multiple developpers in the last 3
years. Thanks to all of the contributors:

  Ayaka <ayaka@soulik.info>
  Frédéric Sureau <frederic.sureau@vodalys.com>
  Jean-Michel Hautbois <jean-michel.hautbois@veo-labs.com>
  Nicolas Dufresne <nicolas.dufresne@collabora.com>
  Pablo Anton <pablo.anton@vodalys-labs.com>

https://bugzilla.gnome.org/show_bug.cgi?id=728438
This commit is contained in:
Ayaka 2017-05-23 14:40:56 -04:00 committed by Nicolas Dufresne
parent 7dc57e381b
commit 27310365d5
7 changed files with 1656 additions and 0 deletions

View file

@ -14,6 +14,8 @@ libgstvideo4linux2_la_SOURCES = gstv4l2.c \
gstv4l2tuner.c \
gstv4l2transform.c \
gstv4l2videodec.c \
gstv4l2videoenc.c \
gstv4l2h264enc.c \
gstv4l2vidorient.c \
v4l2_calls.c \
v4l2-utils.c \
@ -54,6 +56,8 @@ noinst_HEADERS = \
gstv4l2tuner.h \
gstv4l2transform.h \
gstv4l2videodec.h \
gstv4l2videoenc.h \
gstv4l2h264enc.h \
gstv4l2vidorient.h \
v4l2_calls.h \
v4l2-utils.h \

View file

@ -47,6 +47,7 @@
#include "gstv4l2sink.h"
#include "gstv4l2radio.h"
#include "gstv4l2videodec.h"
#include "gstv4l2h264enc.h"
#include "gstv4l2deviceprovider.h"
#include "gstv4l2transform.h"
@ -185,6 +186,9 @@ gst_v4l2_probe_and_register (GstPlugin * plugin)
if (gst_v4l2_is_video_dec (sink_caps, src_caps))
ret = gst_v4l2_video_dec_register (plugin, basename, it->device_path,
sink_caps, src_caps);
else if (gst_v4l2_is_h264_enc (sink_caps, src_caps))
ret = gst_v4l2_h264_enc_register (plugin, basename, it->device_path,
sink_caps, src_caps);
else if (gst_v4l2_is_transform (sink_caps, src_caps))
ret = gst_v4l2_transform_register (plugin, basename, it->device_path,
sink_caps, src_caps);

541
sys/v4l2/gstv4l2h264enc.c Normal file
View file

@ -0,0 +1,541 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association
* Author: ayaka <ayaka@soulik.info>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2h264enc.h"
#include "v4l2_calls.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_h264_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_h264_enc_debug
static GstStaticCaps src_template_caps =
GST_STATIC_CAPS ("video/x-h264, stream-format=(string) byte-stream, "
"alignment=(string) au");
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
/* TODO add H264 controls
* PROP_I_FRAME_QP,
* PROP_P_FRAME_QP,
* PROP_B_FRAME_QP,
* PROP_MIN_QP,
* PROP_MAX_QP,
* PROP_8x8_TRANSFORM,
* PROP_CPB_SIZE,
* PROP_ENTROPY_MODE,
* PROP_I_PERIOD,
* PROP_LOOP_FILTER_ALPHA,
* PROP_LOOP_FILTER_BETA,
* PROP_LOOP_FILTER_MODE,
* PROP_VUI_EXT_SAR_HEIGHT,
* PROP_VUI_EXT_SAR_WIDTH,
* PROP_VUI_SAR_ENABLED,
* PROP_VUI_SAR_IDC,
* PROP_SEI_FRAME_PACKING,
* PROP_SEI_FP_CURRENT_FRAME_0,
* PROP_SEI_FP_ARRANGEMENT_TYP,
* ...
* */
};
#define gst_v4l2_h264_enc_parent_class parent_class
G_DEFINE_TYPE (GstV4l2H264Enc, gst_v4l2_h264_enc, GST_TYPE_V4L2_VIDEO_ENC);
static void
gst_v4l2_h264_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
/* TODO */
}
static void
gst_v4l2_h264_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
/* TODO */
}
static gint
v4l2_profile_from_string (const gchar * profile)
{
gint v4l2_profile = -1;
if (g_str_equal (profile, "baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE;
} else if (g_str_equal (profile, "constrained-baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE;
} else if (g_str_equal (profile, "main")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_MAIN;
} else if (g_str_equal (profile, "extended")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED;
} else if (g_str_equal (profile, "high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH;
} else if (g_str_equal (profile, "high-10")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10;
} else if (g_str_equal (profile, "high-4:2:2")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422;
} else if (g_str_equal (profile, "high-4:4:4")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE;
} else if (g_str_equal (profile, "high-10-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA;
} else if (g_str_equal (profile, "high-4:2:2-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA;
} else if (g_str_equal (profile, "high-4:4:4-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA;
} else if (g_str_equal (profile, "cavlc-4:4:4-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA;
} else if (g_str_equal (profile, "scalable-baseline")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE;
} else if (g_str_equal (profile, "scalable-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH;
} else if (g_str_equal (profile, "scalable-high-intra")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA;
} else if (g_str_equal (profile, "stereo-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH;
} else if (g_str_equal (profile, "multiview-high")) {
v4l2_profile = V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH;
} else {
GST_WARNING ("Unsupported profile string '%s'", profile);
}
return v4l2_profile;
}
static const gchar *
v4l2_profile_to_string (gint v4l2_profile)
{
switch (v4l2_profile) {
case V4L2_MPEG_VIDEO_H264_PROFILE_BASELINE:
return "baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE:
return "constrained-baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_MAIN:
return "main";
case V4L2_MPEG_VIDEO_H264_PROFILE_EXTENDED:
return "extended";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH:
return "high";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10:
return "high-10";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422:
return "high-4:2:2";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_PREDICTIVE:
return "high-4:4:4";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_10_INTRA:
return "high-10-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_422_INTRA:
return "high-4:2:2-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_HIGH_444_INTRA:
return "high-4:4:4-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_CAVLC_444_INTRA:
return "cavlc-4:4:4-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_BASELINE:
return "scalable-baseline";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH:
return "scalable-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_SCALABLE_HIGH_INTRA:
return "scalable-high-intra";
case V4L2_MPEG_VIDEO_H264_PROFILE_STEREO_HIGH:
return "stereo-high";
case V4L2_MPEG_VIDEO_H264_PROFILE_MULTIVIEW_HIGH:
return "multiview-high";
default:
GST_WARNING ("Unsupported V4L2 profile %i", v4l2_profile);
break;
}
return NULL;
}
static gint
v4l2_level_from_string (const gchar * level)
{
gint v4l2_level = -1;
if (g_str_equal (level, "1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_0;
else if (g_str_equal (level, "1b"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1B;
else if (g_str_equal (level, "1.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_1;
else if (g_str_equal (level, "1.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_2;
else if (g_str_equal (level, "1.3"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_1_3;
else if (g_str_equal (level, "2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_0;
else if (g_str_equal (level, "2.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_1;
else if (g_str_equal (level, "2.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_2_2;
else if (g_str_equal (level, "3"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_0;
else if (g_str_equal (level, "3.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_1;
else if (g_str_equal (level, "3.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_3_2;
else if (g_str_equal (level, "4"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_0;
else if (g_str_equal (level, "4.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_1;
else if (g_str_equal (level, "4.2"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_4_2;
else if (g_str_equal (level, "5"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_5_0;
else if (g_str_equal (level, "5.1"))
v4l2_level = V4L2_MPEG_VIDEO_H264_LEVEL_5_1;
else
GST_WARNING ("Unsupported level '%s'", level);
return v4l2_level;
}
static const gchar *
v4l2_level_to_string (gint v4l2_level)
{
switch (v4l2_level) {
case V4L2_MPEG_VIDEO_H264_LEVEL_1_0:
return "1";
case V4L2_MPEG_VIDEO_H264_LEVEL_1B:
return "1b";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_1:
return "1.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_2:
return "1.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_1_3:
return "1.3";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_0:
return "2";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_1:
return "2.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_2_2:
return "2.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_0:
return "3.0";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_1:
return "3.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_3_2:
return "3.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_0:
return "4";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_1:
return "4.1";
case V4L2_MPEG_VIDEO_H264_LEVEL_4_2:
return "4.2";
case V4L2_MPEG_VIDEO_H264_LEVEL_5_0:
return "5";
case V4L2_MPEG_VIDEO_H264_LEVEL_5_1:
return "5.1";
default:
GST_WARNING ("Unsupported V4L2 level %i", v4l2_level);
break;
}
return NULL;
}
struct ProfileLevelCtx
{
GstV4l2H264Enc *self;
const gchar *profile;
const gchar *level;
};
static gboolean
get_string_list (GstStructure * s, const gchar * field, GQueue * queue)
{
const GValue *value;
value = gst_structure_get_value (s, field);
if (!value)
return FALSE;
if (GST_VALUE_HOLDS_LIST (value)) {
guint i;
if (gst_value_list_get_size (value) == 0)
return FALSE;
for (i = 0; i < gst_value_list_get_size (value); i++) {
const GValue *item = gst_value_list_get_value (value, i);
if (G_VALUE_HOLDS_STRING (item))
g_queue_push_tail (queue, g_value_dup_string (item));
}
} else if (G_VALUE_HOLDS_STRING (value)) {
g_queue_push_tail (queue, g_value_dup_string (value));
}
return TRUE;
}
static gboolean
negotiate_profile_and_level (GstCapsFeatures * features, GstStructure * s,
gpointer user_data)
{
struct ProfileLevelCtx *ctx = user_data;
GstV4l2Object *v4l2object = GST_V4L2_VIDEO_ENC (ctx->self)->v4l2output;
GQueue profiles = G_QUEUE_INIT;
GQueue levels = G_QUEUE_INIT;
gboolean failed = FALSE;
if (get_string_list (s, "profile", &profiles)) {
GList *l;
for (l = profiles.head; l; l = l->next) {
struct v4l2_control control = { 0, };
gint v4l2_profile;
const gchar *profile = l->data;
GST_TRACE_OBJECT (ctx->self, "Trying profile %s", profile);
control.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
control.value = v4l2_profile = v4l2_profile_from_string (profile);
if (control.value < 0)
continue;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
GST_WARNING_OBJECT (ctx->self, "Failed to set H264 profile: '%s'",
g_strerror (errno));
break;
}
profile = v4l2_profile_to_string (control.value);
if (control.value == v4l2_profile) {
ctx->profile = profile;
break;
}
if (g_list_find_custom (l, profile, g_str_equal)) {
ctx->profile = profile;
break;
}
}
if (profiles.length && !ctx->profile)
failed = TRUE;
g_queue_foreach (&profiles, (GFunc) g_free, NULL);
g_queue_clear (&profiles);
}
if (!failed && get_string_list (s, "level", &levels)) {
GList *l;
for (l = levels.head; l; l = l->next) {
struct v4l2_control control = { 0, };
gint v4l2_level;
const gchar *level = l->data;
GST_TRACE_OBJECT (ctx->self, "Trying level %s", level);
control.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
control.value = v4l2_level = v4l2_level_from_string (level);
if (control.value < 0)
continue;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_S_CTRL, &control) < 0) {
GST_WARNING_OBJECT (ctx->self, "Failed to set H264 level: '%s'",
g_strerror (errno));
break;
}
level = v4l2_level_to_string (control.value);
if (control.value == v4l2_level) {
ctx->level = level;
break;
}
if (g_list_find_custom (l, level, g_str_equal)) {
ctx->level = level;
break;
}
}
if (levels.length && !ctx->level)
failed = TRUE;
g_queue_foreach (&levels, (GFunc) g_free, NULL);
g_queue_clear (&levels);
}
/* If it failed, we continue */
return failed;
}
static gboolean
gst_v4l2_h264_enc_negotiate (GstVideoEncoder * encoder)
{
GstV4l2H264Enc *self = GST_V4L2_H264_ENC (encoder);
GstV4l2VideoEnc *venc = GST_V4L2_VIDEO_ENC (encoder);
GstV4l2Object *v4l2object = venc->v4l2output;
GstCaps *allowed_caps;
struct ProfileLevelCtx ctx = { self, NULL, NULL };
GstVideoCodecState *state;
GstStructure *s;
GST_DEBUG_OBJECT (self, "Negotiating H264 profile and level.");
allowed_caps = gst_pad_get_allowed_caps (GST_VIDEO_ENCODER_SRC_PAD (encoder));
if (allowed_caps) {
if (gst_caps_is_empty (allowed_caps))
goto not_negotiated;
allowed_caps = gst_caps_make_writable (allowed_caps);
/* negotiate_profile_and_level() will return TRUE on failure to keep
* iterating, if gst_caps_foreach() returns TRUE it means there was no
* compatible profile and level in any of the structure */
if (gst_caps_foreach (allowed_caps, negotiate_profile_and_level, &ctx)) {
goto no_profile_level;
}
}
if (!ctx.profile) {
struct v4l2_control control = { 0, };
control.id = V4L2_CID_MPEG_VIDEO_H264_PROFILE;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
goto g_ctrl_failed;
ctx.profile = v4l2_profile_to_string (control.value);
}
if (!ctx.level) {
struct v4l2_control control = { 0, };
control.id = V4L2_CID_MPEG_VIDEO_H264_LEVEL;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_G_CTRL, &control) < 0)
goto g_ctrl_failed;
ctx.level = v4l2_level_to_string (control.value);
}
GST_DEBUG_OBJECT (self, "Selected H264 profile %s at level %s",
ctx.profile, ctx.level);
state = gst_video_encoder_get_output_state (encoder);
s = gst_caps_get_structure (state->caps, 0);
gst_structure_set (s, "profile", G_TYPE_STRING, ctx.profile,
"level", G_TYPE_STRING, ctx.level, NULL);
return GST_VIDEO_ENCODER_CLASS (parent_class)->negotiate (encoder);
g_ctrl_failed:
GST_WARNING_OBJECT (self, "Failed to get H264 profile and level: '%s'",
g_strerror (errno));
goto not_negotiated;
no_profile_level:
GST_WARNING_OBJECT (self, "No compatible level and profiled in caps: %"
GST_PTR_FORMAT, allowed_caps);
goto not_negotiated;
not_negotiated:
if (allowed_caps)
gst_caps_unref (allowed_caps);
return FALSE;
}
static void
gst_v4l2_h264_enc_init (GstV4l2H264Enc * self)
{
}
static void
gst_v4l2_h264_enc_class_init (GstV4l2H264EncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstVideoEncoderClass *baseclass;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
baseclass = GST_VIDEO_ENCODER_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (gst_v4l2_h264_enc_debug, "v4l2h264enc", 0,
"V4L2 H.264 Encoder");
gst_element_class_set_static_metadata (element_class,
"V4L2 H.264 Encoder",
"Codec/Encoder/Video",
"Encode H.264 video streams via V4L2 API", "ayaka <ayaka@soulik.info>");
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h264_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_h264_enc_get_property);
baseclass->negotiate = GST_DEBUG_FUNCPTR (gst_v4l2_h264_enc_negotiate);
}
/* Probing functions */
gboolean
gst_v4l2_is_h264_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
gboolean ret = FALSE;
GstCaps *codec_caps;
codec_caps = gst_static_caps_get (&src_template_caps);
if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_raw_caps ())
&& gst_caps_can_intersect (src_caps, codec_caps))
ret = TRUE;
gst_caps_unref (codec_caps);
return ret;
}
gboolean
gst_v4l2_h264_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps)
{
return gst_v4l2_video_enc_register (plugin, GST_TYPE_V4L2_H264_ENC,
"h264", basename, device_path, sink_caps,
gst_static_caps_get (&src_template_caps), src_caps);
}

62
sys/v4l2/gstv4l2h264enc.h Normal file
View file

@ -0,0 +1,62 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association.
* Author: ayaka <ayaka@soulik.info>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_H264_ENC_H__
#define __GST_V4L2_H264_ENC_H__
#include <gst/gst.h>
#include "gstv4l2videoenc.h"
GST_DEBUG_CATEGORY_EXTERN (v4l2h264enc_debug);
G_BEGIN_DECLS
#define GST_TYPE_V4L2_H264_ENC \
(gst_v4l2_h264_enc_get_type())
#define GST_V4L2_H264_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_H264_ENC,GstV4l2H264Enc))
#define GST_V4L2_H264_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_H264_ENC,GstV4l2H264EncClass))
#define GST_IS_V4L2_H264_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_H264_ENC))
#define GST_IS_V4L2_H264_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_H264_ENC))
typedef struct _GstV4l2H264Enc GstV4l2H264Enc;
typedef struct _GstV4l2H264EncClass GstV4l2H264EncClass;
struct _GstV4l2H264Enc
{
GstV4l2VideoEnc parent;
};
struct _GstV4l2H264EncClass
{
GstV4l2VideoEncClass parent_class;
};
GType gst_v4l2_h264_enc_get_type (void);
gboolean gst_v4l2_is_h264_enc (GstCaps * sink_caps, GstCaps * src_caps);
gboolean gst_v4l2_h264_enc_register (GstPlugin * plugin, const gchar * basename,
const gchar * device_path, GstCaps * sink_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_H264_ENC_H__ */

956
sys/v4l2/gstv4l2videoenc.c Normal file
View file

@ -0,0 +1,956 @@
/*
* Copyright (C) 2014-2017 SUMOMO Computer Association
* Authors Ayaka <ayaka@soulik.info>
* Copyright (C) 2017 Collabora Ltd.
* Author: Nicolas Dufresne <nicolas.dufresne@collabora.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include "gstv4l2videoenc.h"
#include "v4l2_calls.h"
#include <string.h>
#include <gst/gst-i18n-plugin.h>
GST_DEBUG_CATEGORY_STATIC (gst_v4l2_video_enc_debug);
#define GST_CAT_DEFAULT gst_v4l2_video_enc_debug
typedef struct
{
gchar *device;
GstCaps *sink_caps;
GstCaps *src_caps;
} GstV4l2VideoEncCData;
enum
{
PROP_0,
V4L2_STD_OBJECT_PROPS,
};
#define gst_v4l2_video_enc_parent_class parent_class
G_DEFINE_ABSTRACT_TYPE (GstV4l2VideoEnc, gst_v4l2_video_enc,
GST_TYPE_VIDEO_ENCODER);
static void
gst_v4l2_video_enc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_OUTPUT_IO_MODE:
gst_v4l2_object_set_property_helper (self->v4l2output,
prop_id, value, pspec);
break;
case PROP_CAPTURE_IO_MODE:
gst_v4l2_object_set_property_helper (self->v4l2capture,
prop_id, value, pspec);
break;
/* By default, only set on output */
default:
if (!gst_v4l2_object_set_property_helper (self->v4l2output,
prop_id, value, pspec)) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
}
break;
}
}
static void
gst_v4l2_video_enc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
switch (prop_id) {
case PROP_OUTPUT_IO_MODE:
gst_v4l2_object_get_property_helper (self->v4l2output,
prop_id, value, pspec);
break;
case PROP_CAPTURE_IO_MODE:
gst_v4l2_object_get_property_helper (self->v4l2output,
PROP_IO_MODE, value, pspec);
break;
/* By default read from output */
default:
if (!gst_v4l2_object_get_property_helper (self->v4l2output,
prop_id, value, pspec)) {
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
}
break;
}
}
static gboolean
gst_v4l2_video_enc_open (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GST_DEBUG_OBJECT (self, "Opening");
if (!gst_v4l2_object_open (self->v4l2output))
goto failure;
if (!gst_v4l2_object_open_shared (self->v4l2capture, self->v4l2output))
goto failure;
self->probed_sinkcaps = gst_v4l2_object_get_caps (self->v4l2output,
gst_v4l2_object_get_raw_caps ());
if (gst_caps_is_empty (self->probed_sinkcaps))
goto no_raw_format;
self->probed_srccaps = gst_v4l2_object_get_caps (self->v4l2capture,
gst_v4l2_object_get_codec_caps ());
if (gst_caps_is_empty (self->probed_srccaps))
goto no_encoded_format;
return TRUE;
no_encoded_format:
GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
(_("Encoder on device %s has no supported output format"),
self->v4l2output->videodev), (NULL));
goto failure;
no_raw_format:
GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
(_("Encoder on device %s has no supported input format"),
self->v4l2output->videodev), (NULL));
goto failure;
failure:
if (GST_V4L2_IS_OPEN (self->v4l2output))
gst_v4l2_object_close (self->v4l2output);
if (GST_V4L2_IS_OPEN (self->v4l2capture))
gst_v4l2_object_close (self->v4l2capture);
gst_caps_replace (&self->probed_srccaps, NULL);
gst_caps_replace (&self->probed_sinkcaps, NULL);
return FALSE;
}
static gboolean
gst_v4l2_video_enc_close (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GST_DEBUG_OBJECT (self, "Closing");
gst_v4l2_object_close (self->v4l2output);
gst_v4l2_object_close (self->v4l2capture);
gst_caps_replace (&self->probed_srccaps, NULL);
gst_caps_replace (&self->probed_sinkcaps, NULL);
return TRUE;
}
static gboolean
gst_v4l2_video_enc_start (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GST_DEBUG_OBJECT (self, "Starting");
gst_v4l2_object_unlock (self->v4l2output);
g_atomic_int_set (&self->active, TRUE);
self->output_flow = GST_FLOW_OK;
return TRUE;
}
static gboolean
gst_v4l2_video_enc_stop (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GST_DEBUG_OBJECT (self, "Stopping");
gst_v4l2_object_unlock (self->v4l2output);
gst_v4l2_object_unlock (self->v4l2capture);
/* Wait for capture thread to stop */
gst_pad_stop_task (encoder->srcpad);
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
self->output_flow = GST_FLOW_OK;
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
/* Should have been flushed already */
g_assert (g_atomic_int_get (&self->active) == FALSE);
g_assert (g_atomic_int_get (&self->processing) == FALSE);
gst_v4l2_object_stop (self->v4l2output);
gst_v4l2_object_stop (self->v4l2capture);
if (self->input_state) {
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
}
GST_DEBUG_OBJECT (self, "Stopped");
return TRUE;
}
static gboolean
gst_v4l2_encoder_cmd (GstV4l2Object * v4l2object, guint cmd, guint flags)
{
struct v4l2_encoder_cmd ecmd = { 0, };
GST_DEBUG_OBJECT (v4l2object->element,
"sending v4l2 encoder command %u with flags %u", cmd, flags);
if (!GST_V4L2_IS_OPEN (v4l2object))
return FALSE;
ecmd.cmd = cmd;
ecmd.flags = flags;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENCODER_CMD, &ecmd) < 0)
goto ecmd_failed;
return TRUE;
ecmd_failed:
if (errno == ENOTTY) {
GST_INFO_OBJECT (v4l2object->element,
"Failed to send encoder command %u with flags %u for '%s'. (%s)",
cmd, flags, v4l2object->videodev, g_strerror (errno));
} else {
GST_ERROR_OBJECT (v4l2object->element,
"Failed to send encoder command %u with flags %u for '%s'. (%s)",
cmd, flags, v4l2object->videodev, g_strerror (errno));
}
return FALSE;
}
static GstFlowReturn
gst_v4l2_video_enc_finish (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GstFlowReturn ret = GST_FLOW_OK;
if (gst_pad_get_task_state (encoder->srcpad) != GST_TASK_STARTED)
goto done;
GST_DEBUG_OBJECT (self, "Finishing encoding");
/* drop the stream lock while draining, so remaining buffers can be
* pushed from the src pad task thread */
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
if (gst_v4l2_encoder_cmd (self->v4l2capture, V4L2_ENC_CMD_STOP, 0)) {
GstTask *task = encoder->srcpad->task;
/* Wait for the task to be drained */
GST_OBJECT_LOCK (task);
while (GST_TASK_STATE (task) == GST_TASK_STARTED)
GST_TASK_WAIT (task);
GST_OBJECT_UNLOCK (task);
ret = GST_FLOW_FLUSHING;
}
/* and ensure the processing thread has stopped in case another error
* occured. */
gst_v4l2_object_unlock (self->v4l2capture);
gst_pad_stop_task (encoder->srcpad);
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
if (ret == GST_FLOW_FLUSHING)
ret = self->output_flow;
GST_DEBUG_OBJECT (encoder, "Done draining buffers");
done:
return ret;
}
static gboolean
gst_v4l2_video_enc_set_format (GstVideoEncoder * encoder,
GstVideoCodecState * state)
{
gboolean ret = TRUE;
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GstV4l2Error error = GST_V4L2_ERROR_INIT;
GstCaps *outcaps;
GST_DEBUG_OBJECT (self, "Setting format: %" GST_PTR_FORMAT, state->caps);
if (self->input_state) {
if (gst_v4l2_object_caps_equal (self->v4l2output, state->caps)) {
GST_DEBUG_OBJECT (self, "Compatible caps");
return TRUE;
}
if (gst_v4l2_video_enc_finish (encoder) != GST_FLOW_OK)
return FALSE;
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
}
outcaps = gst_pad_get_pad_template_caps (encoder->srcpad);
outcaps = gst_caps_make_writable (outcaps);
gst_video_encoder_set_output_state (encoder, outcaps, state);
if (!gst_video_encoder_negotiate (encoder))
return FALSE;
if (!gst_v4l2_object_set_format (self->v4l2output, state->caps, &error)) {
gst_v4l2_error (self, &error);
return FALSE;
}
self->input_state = gst_video_codec_state_ref (state);
GST_DEBUG_OBJECT (self, "output caps: %" GST_PTR_FORMAT, state->caps);
return ret;
}
static gboolean
gst_v4l2_video_enc_flush (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GST_DEBUG_OBJECT (self, "Flushing");
/* Ensure the processing thread has stopped for the reverse playback
* iscount case */
if (g_atomic_int_get (&self->processing)) {
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
gst_v4l2_object_unlock_stop (self->v4l2output);
gst_v4l2_object_unlock_stop (self->v4l2capture);
gst_pad_stop_task (encoder->srcpad);
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
}
self->output_flow = GST_FLOW_OK;
gst_v4l2_object_unlock_stop (self->v4l2output);
gst_v4l2_object_unlock_stop (self->v4l2capture);
return TRUE;
}
static gboolean
gst_v4l2_video_enc_negotiate (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
gboolean ret;
ret = GST_VIDEO_ENCODER_CLASS (parent_class)->negotiate (encoder);
if (!gst_buffer_pool_set_active (GST_BUFFER_POOL (self->v4l2capture->pool),
TRUE)) {
GST_WARNING_OBJECT (self, "Could not activate capture buffer pool.");
ret = FALSE;
}
return ret;
}
static GstVideoCodecFrame *
gst_v4l2_video_enc_get_oldest_frame (GstVideoEncoder * encoder)
{
GstVideoCodecFrame *frame = NULL;
GList *frames, *l;
gint count = 0;
frames = gst_video_encoder_get_frames (encoder);
for (l = frames; l != NULL; l = l->next) {
GstVideoCodecFrame *f = l->data;
if (!frame || frame->pts > f->pts)
frame = f;
count++;
}
if (frame) {
GST_LOG_OBJECT (encoder,
"Oldest frame is %d %" GST_TIME_FORMAT
" and %d frames left",
frame->system_frame_number, GST_TIME_ARGS (frame->pts), count - 1);
gst_video_codec_frame_ref (frame);
}
g_list_free_full (frames, (GDestroyNotify) gst_video_codec_frame_unref);
return frame;
}
static void
gst_v4l2_video_enc_loop (GstVideoEncoder * encoder)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GstVideoCodecFrame *frame;
GstBuffer *buffer = NULL;
GstFlowReturn ret;
GST_LOG_OBJECT (encoder, "Allocate output buffer");
buffer = gst_video_encoder_allocate_output_buffer (encoder,
self->v4l2capture->info.size);
if (NULL == buffer) {
ret = GST_FLOW_FLUSHING;
goto beach;
}
/* FIXME Check if buffer isn't the last one here */
GST_LOG_OBJECT (encoder, "Process output buffer");
ret =
gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL
(self->v4l2capture->pool), &buffer);
if (ret != GST_FLOW_OK)
goto beach;
frame = gst_v4l2_video_enc_get_oldest_frame (encoder);
if (frame) {
frame->output_buffer = buffer;
buffer = NULL;
ret = gst_video_encoder_finish_frame (encoder, frame);
if (ret != GST_FLOW_OK)
goto beach;
} else {
GST_WARNING_OBJECT (encoder, "Encoder is producing too many buffers");
gst_buffer_unref (buffer);
}
return;
beach:
GST_DEBUG_OBJECT (encoder, "Leaving output thread");
gst_buffer_replace (&buffer, NULL);
self->output_flow = ret;
g_atomic_int_set (&self->processing, FALSE);
gst_v4l2_object_unlock (self->v4l2output);
gst_pad_pause_task (encoder->srcpad);
}
static void
gst_v4l2_video_enc_loop_stopped (GstV4l2VideoEnc * self)
{
if (g_atomic_int_get (&self->processing)) {
GST_DEBUG_OBJECT (self, "Early stop of encoding thread");
self->output_flow = GST_FLOW_FLUSHING;
g_atomic_int_set (&self->processing, FALSE);
}
GST_DEBUG_OBJECT (self, "Encoding task destroyed: %s",
gst_flow_get_name (self->output_flow));
}
static GstFlowReturn
gst_v4l2_video_enc_handle_frame (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GstFlowReturn ret = GST_FLOW_OK;
GST_DEBUG_OBJECT (self, "Handling frame %d", frame->system_frame_number);
if (G_UNLIKELY (!g_atomic_int_get (&self->active)))
goto flushing;
if (gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)) ==
GST_TASK_STOPPED) {
GstBufferPool *pool = GST_BUFFER_POOL (self->v4l2output->pool);
/* It possible that the processing thread stopped due to an error */
if (self->output_flow != GST_FLOW_OK &&
self->output_flow != GST_FLOW_FLUSHING) {
GST_DEBUG_OBJECT (self, "Processing loop stopped with error, leaving");
ret = self->output_flow;
goto drop;
}
/* Ensure input internal pool is active */
if (!gst_buffer_pool_is_active (pool)) {
GstStructure *config = gst_buffer_pool_get_config (pool);
gst_buffer_pool_config_set_params (config, self->input_state->caps,
self->v4l2output->info.size, self->v4l2output->min_buffers,
self->v4l2output->min_buffers);
/* There is no reason to refuse this config */
if (!gst_buffer_pool_set_config (pool, config))
goto activate_failed;
if (!gst_buffer_pool_set_active (pool, TRUE))
goto activate_failed;
}
GST_DEBUG_OBJECT (self, "Starting encoding thread");
/* Start the processing task, when it quits, the task will disable input
* processing to unlock input if draining, or prevent potential block */
if (!gst_pad_start_task (encoder->srcpad,
(GstTaskFunction) gst_v4l2_video_enc_loop, self,
(GDestroyNotify) gst_v4l2_video_enc_loop_stopped))
goto start_task_failed;
}
if (frame->input_buffer) {
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
ret =
gst_v4l2_buffer_pool_process (GST_V4L2_BUFFER_POOL
(self->v4l2output->pool), &frame->input_buffer);
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
if (ret == GST_FLOW_FLUSHING) {
if (gst_pad_get_task_state (GST_VIDEO_DECODER_SRC_PAD (self)) !=
GST_TASK_STARTED)
ret = self->output_flow;
goto drop;
} else if (ret != GST_FLOW_OK) {
goto process_failed;
}
}
gst_video_codec_frame_unref (frame);
return ret;
/* ERRORS */
activate_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, SETTINGS,
(_("Failed to allocate required memory.")),
("Buffer pool activation failed"));
return GST_FLOW_ERROR;
}
flushing:
{
ret = GST_FLOW_FLUSHING;
goto drop;
}
start_task_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
(_("Failed to start encoding thread.")), (NULL));
g_atomic_int_set (&self->processing, FALSE);
ret = GST_FLOW_ERROR;
goto drop;
}
process_failed:
{
GST_ELEMENT_ERROR (self, RESOURCE, FAILED,
(_("Failed to process frame.")),
("Maybe be due to not enough memory or failing driver"));
ret = GST_FLOW_ERROR;
goto drop;
}
drop:
{
gst_video_encoder_finish_frame (encoder, frame);
return ret;
}
}
static gboolean
gst_v4l2_video_enc_decide_allocation (GstVideoEncoder *
encoder, GstQuery * query)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
GstVideoCodecState *state = gst_video_encoder_get_output_state (encoder);
GstV4l2Error error = GST_V4L2_ERROR_INIT;
GstClockTime latency;
gboolean ret = FALSE;
/* We need to set the format here, since this is called right after
* GstVideoEncoder have set the width, height and framerate into the state
* caps. These are needed by the driver to calculate the buffer size and to
* implement bitrate adaptation. */
if (!gst_v4l2_object_set_format (self->v4l2capture, state->caps, &error)) {
gst_v4l2_error (self, &error);
ret = FALSE;
goto done;
}
if (gst_v4l2_object_decide_allocation (self->v4l2capture, query)) {
GstVideoEncoderClass *enc_class = GST_VIDEO_ENCODER_CLASS (parent_class);
ret = enc_class->decide_allocation (encoder, query);
}
/* FIXME This may not be entirely correct, as encoder may keep some
* observation withouth delaying the encoding. Linux Media API need some
* more work to explicitly expressed the decoder / encoder latency. This
* value will then become max latency, and the reported driver latency would
* become the min latency. */
latency = self->v4l2capture->min_buffers * self->v4l2capture->duration;
gst_video_encoder_set_latency (encoder, latency, latency);
done:
gst_video_codec_state_unref (state);
return ret;
}
static gboolean
gst_v4l2_video_enc_propose_allocation (GstVideoEncoder *
encoder, GstQuery * query)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
gboolean ret = FALSE;
GST_DEBUG_OBJECT (self, "called");
if (query == NULL)
ret = TRUE;
else
ret = gst_v4l2_object_propose_allocation (self->v4l2output, query);
if (ret)
ret = GST_VIDEO_ENCODER_CLASS (parent_class)->propose_allocation (encoder,
query);
return ret;
}
static gboolean
gst_v4l2_video_enc_src_query (GstVideoEncoder * encoder, GstQuery * query)
{
gboolean ret = TRUE;
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CAPS:{
GstCaps *filter, *result = NULL;
GstPad *pad = GST_VIDEO_ENCODER_SRC_PAD (encoder);
gst_query_parse_caps (query, &filter);
/* FIXME Try and not probe the entire encoder, but only the implement
* subclass format */
if (self->probed_srccaps) {
GstCaps *tmpl = gst_pad_get_pad_template_caps (pad);
result = gst_caps_intersect (tmpl, self->probed_srccaps);
gst_caps_unref (tmpl);
} else
result = gst_pad_get_pad_template_caps (pad);
if (filter) {
GstCaps *tmp = result;
result =
gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (tmp);
}
GST_DEBUG_OBJECT (self, "Returning src caps %" GST_PTR_FORMAT, result);
gst_query_set_caps_result (query, result);
gst_caps_unref (result);
break;
}
default:
ret = GST_VIDEO_ENCODER_CLASS (parent_class)->src_query (encoder, query);
break;
}
return ret;
}
static gboolean
gst_v4l2_video_enc_sink_query (GstVideoEncoder * encoder, GstQuery * query)
{
gboolean ret = TRUE;
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CAPS:{
GstCaps *filter, *result = NULL;
GstPad *pad = GST_VIDEO_ENCODER_SINK_PAD (encoder);
gst_query_parse_caps (query, &filter);
if (self->probed_sinkcaps)
result = gst_caps_ref (self->probed_sinkcaps);
else
result = gst_pad_get_pad_template_caps (pad);
if (filter) {
GstCaps *tmp = result;
result =
gst_caps_intersect_full (filter, tmp, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (tmp);
}
GST_DEBUG_OBJECT (self, "Returning sink caps %" GST_PTR_FORMAT, result);
gst_query_set_caps_result (query, result);
gst_caps_unref (result);
break;
}
default:
ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_query (encoder, query);
break;
}
return ret;
}
static gboolean
gst_v4l2_video_enc_sink_event (GstVideoEncoder * encoder, GstEvent * event)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (encoder);
gboolean ret;
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
GST_DEBUG_OBJECT (self, "flush start");
gst_v4l2_object_unlock (self->v4l2output);
gst_v4l2_object_unlock (self->v4l2capture);
break;
default:
break;
}
ret = GST_VIDEO_ENCODER_CLASS (parent_class)->sink_event (encoder, event);
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_FLUSH_START:
gst_pad_stop_task (encoder->srcpad);
GST_DEBUG_OBJECT (self, "flush start done");
default:
break;
}
return ret;
}
static GstStateChangeReturn
gst_v4l2_video_enc_change_state (GstElement * element,
GstStateChange transition)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (element);
if (transition == GST_STATE_CHANGE_PAUSED_TO_READY) {
g_atomic_int_set (&self->active, FALSE);
gst_v4l2_object_unlock (self->v4l2output);
gst_v4l2_object_unlock (self->v4l2capture);
}
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
}
static void
gst_v4l2_video_enc_dispose (GObject * object)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
gst_caps_replace (&self->probed_sinkcaps, NULL);
gst_caps_replace (&self->probed_srccaps, NULL);
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
gst_v4l2_video_enc_finalize (GObject * object)
{
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (object);
gst_v4l2_object_destroy (self->v4l2capture);
gst_v4l2_object_destroy (self->v4l2output);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_v4l2_video_enc_init (GstV4l2VideoEnc * self)
{
/* V4L2 object are created in subinstance_init */
}
static void
gst_v4l2_video_enc_subinstance_init (GTypeInstance * instance, gpointer g_class)
{
GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
GstV4l2VideoEnc *self = GST_V4L2_VIDEO_ENC (instance);
self->v4l2output = gst_v4l2_object_new (GST_ELEMENT (self),
V4L2_BUF_TYPE_VIDEO_OUTPUT, klass->default_device,
gst_v4l2_get_output, gst_v4l2_set_output, NULL);
self->v4l2output->no_initial_format = TRUE;
self->v4l2output->keep_aspect = FALSE;
self->v4l2capture = gst_v4l2_object_new (GST_ELEMENT (self),
V4L2_BUF_TYPE_VIDEO_CAPTURE, klass->default_device,
gst_v4l2_get_input, gst_v4l2_set_input, NULL);
self->v4l2capture->no_initial_format = TRUE;
self->v4l2output->keep_aspect = FALSE;
}
static void
gst_v4l2_video_enc_class_init (GstV4l2VideoEncClass * klass)
{
GstElementClass *element_class;
GObjectClass *gobject_class;
GstVideoEncoderClass *video_encoder_class;
parent_class = g_type_class_peek_parent (klass);
element_class = (GstElementClass *) klass;
gobject_class = (GObjectClass *) klass;
video_encoder_class = (GstVideoEncoderClass *) klass;
GST_DEBUG_CATEGORY_INIT (gst_v4l2_video_enc_debug, "v4l2videoenc", 0,
"V4L2 Video Encoder");
gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_dispose);
gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finalize);
gobject_class->set_property =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_property);
gobject_class->get_property =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_get_property);
video_encoder_class->open = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_open);
video_encoder_class->close = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_close);
video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_start);
video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_stop);
video_encoder_class->finish = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_finish);
video_encoder_class->flush = GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_flush);
video_encoder_class->set_format =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_set_format);
video_encoder_class->negotiate =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_negotiate);
video_encoder_class->decide_allocation =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_decide_allocation);
video_encoder_class->propose_allocation =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_propose_allocation);
video_encoder_class->sink_query =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_query);
video_encoder_class->src_query =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_src_query);
video_encoder_class->sink_event =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_sink_event);
video_encoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_handle_frame);
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_v4l2_video_enc_change_state);
gst_v4l2_object_install_m2m_properties_helper (gobject_class);
}
static void
gst_v4l2_video_enc_subclass_init (gpointer g_class, gpointer data)
{
GstV4l2VideoEncClass *klass = GST_V4L2_VIDEO_ENC_CLASS (g_class);
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstV4l2VideoEncCData *cdata = data;
klass->default_device = cdata->device;
/* Note: gst_pad_template_new() take the floating ref from the caps */
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
cdata->sink_caps));
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
cdata->src_caps));
g_free (cdata);
}
/* Probing functions */
gboolean
gst_v4l2_is_video_enc (GstCaps * sink_caps, GstCaps * src_caps)
{
gboolean ret = FALSE;
if (gst_caps_is_subset (sink_caps, gst_v4l2_object_get_raw_caps ())
&& gst_caps_is_subset (src_caps, gst_v4l2_object_get_codec_caps ()))
ret = TRUE;
return ret;
}
gboolean
gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
const char *codec, const gchar * basename, const gchar * device_path,
GstCaps * sink_caps, GstCaps * codec_caps, GstCaps * src_caps)
{
GstCaps *filtered_caps;
GTypeQuery type_query;
GTypeInfo type_info = { 0, };
GType subtype;
gchar *type_name;
GstV4l2VideoEncCData *cdata;
filtered_caps = gst_caps_intersect (src_caps, codec_caps);
cdata = g_new0 (GstV4l2VideoEncCData, 1);
cdata->device = g_strdup (device_path);
cdata->sink_caps = gst_caps_ref (sink_caps);
cdata->src_caps = gst_caps_ref (filtered_caps);
g_type_query (type, &type_query);
memset (&type_info, 0, sizeof (type_info));
type_info.class_size = type_query.class_size;
type_info.instance_size = type_query.instance_size;
type_info.class_init = gst_v4l2_video_enc_subclass_init;
type_info.class_data = cdata;
type_info.instance_init = gst_v4l2_video_enc_subinstance_init;
/* The first encoder to be registered should use a constant name, like
* v4l2h264enc, for any additional encoders, we create unique names. Encoder
* names may change between boots, so this should help gain stable names for
* the most common use cases. */
type_name = g_strdup_printf ("v4l2%senc", codec);
if (g_type_from_name (type_name) != 0) {
g_free (type_name);
type_name = g_strdup_printf ("v4l2%s%senc", basename, codec);
}
subtype = g_type_register_static (type, type_name, &type_info, 0);
gst_element_register (plugin, type_name, GST_RANK_PRIMARY + 1, subtype);
g_free (type_name);
return TRUE;
}

View file

@ -0,0 +1,87 @@
/*
* Copyright (C) 2014 SUMOMO Computer Association.
* Author: ayaka <ayaka@soulik.info>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*
*/
#ifndef __GST_V4L2_VIDEO_ENC_H__
#define __GST_V4L2_VIDEO_ENC_H__
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstvideoencoder.h>
#include <gst/video/gstvideometa.h>
#include <gstv4l2object.h>
#include <gstv4l2bufferpool.h>
GST_DEBUG_CATEGORY_EXTERN (v4l2videoenc_debug);
G_BEGIN_DECLS
#define GST_TYPE_V4L2_VIDEO_ENC \
(gst_v4l2_video_enc_get_type())
#define GST_V4L2_VIDEO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4L2_VIDEO_ENC,GstV4l2VideoEnc))
#define GST_V4L2_VIDEO_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4L2_VIDEO_ENC,GstV4l2VideoEncClass))
#define GST_IS_V4L2_VIDEO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4L2_VIDEO_ENC))
#define GST_IS_V4L2_VIDEO_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2_VIDEO_ENC))
typedef struct _GstV4l2VideoEnc GstV4l2VideoEnc;
typedef struct _GstV4l2VideoEncClass GstV4l2VideoEncClass;
struct _GstV4l2VideoEnc
{
GstVideoEncoder parent;
/* < private > */
GstV4l2Object *v4l2output;
GstV4l2Object *v4l2capture;
/* pads */
GstCaps *probed_srccaps;
GstCaps *probed_sinkcaps;
/* State */
GstVideoCodecState *input_state;
gboolean active;
gboolean processing;
GstFlowReturn output_flow;
};
struct _GstV4l2VideoEncClass
{
GstVideoEncoderClass parent_class;
gchar *default_device;
GstFlowReturn (*get_output_caps) (GstVideoEncoder * encoder,
GstCaps ** outcaps);
};
GType gst_v4l2_video_enc_get_type (void);
gboolean gst_v4l2_video_enc_register (GstPlugin * plugin, GType type,
const char *codec, const gchar * basename, const gchar * device_path,
GstCaps * sink_caps, GstCaps *codec_caps, GstCaps * src_caps);
G_END_DECLS
#endif /* __GST_V4L2_VIDEO_ENC_H__ */

View file

@ -11,6 +11,8 @@ v4l2_sources = [
'gstv4l2tuner.c',
'gstv4l2transform.c',
'gstv4l2videodec.c',
'gstv4l2videoenc.c',
'gstv4l2h264enc.c',
'gstv4l2vidorient.c',
'v4l2_calls.c',
'v4l2-utils.c',