UVC H264 plugin

This commit is contained in:
Youness Alaoui 2012-09-10 16:09:26 -04:00 committed by Olivier Crête
parent 1ef529601b
commit 1ba24e1306
28 changed files with 6921 additions and 6 deletions

View file

@ -325,7 +325,7 @@ GST_PLUGINS_NONPORTED=" aiff \
gsettings jasper ladspa \
musepack musicbrainz nas neon ofa openal rsvg sdl sndfile spandsp timidity \
directsound directdraw direct3d9 acm wininet \
wildmidi xvid lv2 teletextdec sndio"
wildmidi xvid lv2 teletextdec sndio uvch264"
AC_SUBST(GST_PLUGINS_NONPORTED)
dnl these are all the gst plug-ins, compilable without additional libs
@ -689,6 +689,27 @@ AG_GST_CHECK_FEATURE(VCD, [Video CD], vcdsrc, [
AC_CHECK_HEADER(linux/cdrom.h, HAVE_VCD="yes", HAVE_VCD="no")
])
dnl *** UVC H264 ***
translit(dnm, m, l) AM_CONDITIONAL(USE_UVCH264, true)
AG_GST_CHECK_FEATURE(UVCH264, [UVC H264], uvch264, [
AC_CHECK_HEADER(linux/uvcvideo.h, HAVE_UVCH264=yes, HAVE_UVCH264=no)
AG_GST_PKG_CHECK_MODULES(GST_VIDEO, gstreamer-video-0.10 >= 0.10.36)
PKG_CHECK_MODULES(G_UDEV, gudev-1.0 , [
AC_DEFINE([HAVE_GUDEV], 1, [Define if gudev is installed])
HAVE_GUDEV="yes" ],
[HAVE_GUDEV="no"])
PKG_CHECK_MODULES(LIBUSB, libusb-1.0 , [
AC_DEFINE([HAVE_LIBUSB], 1, [Define if libusb 1.x is installed])
HAVE_LIBUSB="yes" ],
[HAVE_LIBUSB="no"])
])
AC_SUBST(LIBUDEV_CFLAGS)
AC_SUBST(LIBUDEV_LIBS)
AC_SUBST(LIBUSB_CFLAGS)
AC_SUBST(LIBUSB_LIBS)
dnl *** ext plug-ins ***
dnl keep this list sorted alphabetically !
@ -1905,6 +1926,7 @@ AM_CONDITIONAL(USE_VP8, false)
AM_CONDITIONAL(USE_RTMP, false)
AM_CONDITIONAL(USE_TELETEXTDEC, false)
AM_CONDITIONAL(USE_SNDIO, false)
AM_CONDITIONAL(USE_UVCH264, false)
fi dnl of EXT plugins
@ -2079,6 +2101,7 @@ sys/linsys/Makefile
sys/osxvideo/Makefile
sys/qtwrapper/Makefile
sys/shm/Makefile
sys/uvch264/Makefile
sys/vcd/Makefile
sys/vdpau/Makefile
sys/vdpau/gstvdp/Makefile
@ -2097,6 +2120,7 @@ tests/examples/directfb/Makefile
tests/examples/mxf/Makefile
tests/examples/scaletempo/Makefile
tests/examples/opencv/Makefile
tests/examples/uvch264/Makefile
tests/icles/Makefile
ext/voamrwbenc/Makefile
ext/voaacenc/Makefile

View file

@ -130,9 +130,15 @@ else
WINSCREENCAP_DIR=
endif
SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DIRECTSHOW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(PVR_DIR) $(QT_DIR) $(SHM_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR)
if USE_UVCH264
UVCH264_DIR=uvch264
else
UVCH264_DIR=
endif
SUBDIRS = $(ACM_DIR) $(APPLE_MEDIA_DIR) $(AVC_DIR) $(D3DVIDEOSINK_DIR) $(DECKLINK_DIR) $(DIRECTDRAW_DIR) $(DIRECTSOUND_DIR) $(DIRECTSHOW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(LINSYS_DIR) $(OSX_VIDEO_DIR) $(PVR_DIR) $(QT_DIR) $(SHM_DIR) $(UVCH264_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR) $(WINSCREENCAP_DIR)
DIST_SUBDIRS = acmenc acmmp3dec applemedia avc d3dvideosink decklink directdraw directsound dvb linsys fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \
osxvideo pvr2d qtwrapper shm vcd vdpau wasapi wininet winks winscreencap
osxvideo pvr2d qtwrapper shm uvch264 vcd vdpau wasapi wininet winks winscreencap
include $(top_srcdir)/common/parallel-subdirs.mak

48
sys/uvch264/Makefile.am Normal file
View file

@ -0,0 +1,48 @@
glib_gen_prefix = __gst_uvc_h264
glib_gen_basename = gstuvch264
include $(top_srcdir)/common/gst-glib-gen.mak
built_sources = gstuvch264-marshal.c
built_headers = gstuvch264-marshal.h
BUILT_SOURCES = $(built_sources) $(built_headers)
CLEANFILES = $(BUILT_SOURCES)
EXTRA_DIST = gstuvch264-marshal.list
plugin_LTLIBRARIES = libgstuvch264.la
libgstuvch264_la_SOURCES = gstuvch264.c \
gstuvch264_mjpgdemux.c \
gstuvch264_src.c \
uvc_h264.c
nodist_libgstuvch264_la_SOURCES = $(built_sources)
libgstuvch264_la_CFLAGS = $(GST_PLUGINS_BAD_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_VIDEO_CFLAGS) \
$(GST_CFLAGS) \
$(G_UDEV_CFLAGS) \
$(LIBUSB_CFLAGS) \
-DGST_USE_UNSTABLE_API
libgstuvch264_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstuvch264_la_LIBTOOLFLAGS = --tag=disable-static
libgstuvch264_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) \
$(GST_BASE_LIBS) \
$(GST_PLUGINS_BASE_LIBS) \
$(GST_VIDEO_LIBS) \
$(GST_LIBS) \
$(G_UDEV_LIBS) \
$(LIBUSB_LIBS) \
$(top_builddir)/gst-libs/gst/basecamerabinsrc/libgstbasecamerabinsrc-$(GST_MAJORMINOR).la
noinst_HEADERS = gstuvch264_mjpgdemux.h \
gstuvch264_src.h \
uvc_h264.h

View file

@ -0,0 +1,3 @@
BOOLEAN:STRING,POINTER,POINTER,POINTER
BOOLEAN:STRING,POINTER,POINTER
BOOLEAN:STRING,POINTER

50
sys/uvch264/gstuvch264.c Normal file
View file

@ -0,0 +1,50 @@
/* GStreamer
*
* uvch264: a plugin for handling UVC compliant H264 encoding cameras
*
* Copyright (C) 2012 Cisco Systems, Inc.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <gst/gst.h>
#include "gstuvch264_mjpgdemux.h"
#include "gstuvch264_src.h"
static gboolean
plugin_init (GstPlugin * plugin)
{
if (!gst_element_register (plugin, "uvch264_mjpgdemux", GST_RANK_NONE,
GST_TYPE_UVC_H264_MJPG_DEMUX))
return FALSE;
if (!gst_element_register (plugin, "uvch264_src", GST_RANK_NONE,
GST_TYPE_UVC_H264_SRC))
return FALSE;
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"uvch264",
"UVC compliant H264 encoding cameras plugin",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -0,0 +1,723 @@
/* GStreamer
*
* uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG
*
* Copyright (C) 2012 Cisco Systems, Inc.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-uvch264-mjpgdemux
* @short_description: UVC H264 compliant MJPG demuxer
*
* Parses a MJPG stream from a UVC H264 compliant encoding camera and extracts
* each muxed stream into separate pads.
*
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <string.h>
#include <linux/uvcvideo.h>
#include <linux/usb/video.h>
#include <sys/ioctl.h>
#ifndef UVCIOC_GET_LAST_SCR
#include <time.h>
struct uvc_last_scr_sample
{
__u32 dev_frequency;
__u32 dev_stc;
__u16 dev_sof;
struct timespec host_ts;
__u16 host_sof;
};
#define UVCIOC_GET_LAST_SCR _IOR('u', 0x23, struct uvc_last_scr_sample)
#endif
#include "gstuvch264_mjpgdemux.h"
enum
{
PROP_0,
PROP_DEVICE_FD,
PROP_NUM_CLOCK_SAMPLES
};
#define DEFAULT_NUM_CLOCK_SAMPLES 32
static GstStaticPadTemplate mjpgsink_pad_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("image/jpeg, "
"width = (int) [ 0, MAX ],"
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
);
static GstStaticPadTemplate jpegsrc_pad_template =
GST_STATIC_PAD_TEMPLATE ("jpeg",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("image/jpeg, "
"width = (int) [ 0, MAX ],"
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
);
static GstStaticPadTemplate h264src_pad_template =
GST_STATIC_PAD_TEMPLATE ("h264",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-h264, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
);
static GstStaticPadTemplate yuy2src_pad_template =
GST_STATIC_PAD_TEMPLATE ("yuy2",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv, "
"format = (fourcc) YUY2, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
);
static GstStaticPadTemplate nv12src_pad_template =
GST_STATIC_PAD_TEMPLATE ("nv12",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv, "
"format = (fourcc) NV21, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0/1, MAX ] ")
);
GST_DEBUG_CATEGORY_STATIC (uvc_h264_mjpg_demux_debug);
#define GST_CAT_DEFAULT uvc_h264_mjpg_demux_debug
typedef struct
{
guint32 dev_stc;
guint32 dev_sof;
GstClockTime host_ts;
guint32 host_sof;
} GstUvcH264ClockSample;
struct _GstUvcH264MjpgDemuxPrivate
{
int device_fd;
int num_clock_samples;
GstUvcH264ClockSample *clock_samples;
int last_sample;
int num_samples;
GstPad *sink_pad;
GstPad *jpeg_pad;
GstPad *h264_pad;
GstPad *yuy2_pad;
GstPad *nv12_pad;
GstCaps *h264_caps;
GstCaps *yuy2_caps;
GstCaps *nv12_caps;
guint16 h264_width;
guint16 h264_height;
guint16 yuy2_width;
guint16 yuy2_height;
guint16 nv12_width;
guint16 nv12_height;
};
typedef struct
{
guint16 version;
guint16 header_len;
guint32 type;
guint16 width;
guint16 height;
guint32 frame_interval;
guint16 delay;
guint32 pts;
} __attribute__ ((packed)) AuxiliaryStreamHeader;
static void gst_uvc_h264_mjpg_demux_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_uvc_h264_mjpg_demux_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static void gst_uvc_h264_mjpg_demux_dispose (GObject * object);
static GstFlowReturn gst_uvc_h264_mjpg_demux_chain (GstPad * pad,
GstBuffer * buffer);
static gboolean gst_uvc_h264_mjpg_demux_sink_setcaps (GstPad * pad,
GstCaps * caps);
static GstCaps *gst_uvc_h264_mjpg_demux_getcaps (GstPad * pad);
#define _do_init(x) \
GST_DEBUG_CATEGORY_INIT (uvc_h264_mjpg_demux_debug, \
"uvch264_mjpgdemux", 0, "UVC H264 MJPG Demuxer");
GST_BOILERPLATE_FULL (GstUvcH264MjpgDemux, gst_uvc_h264_mjpg_demux, GstElement,
GST_TYPE_ELEMENT, _do_init);
static void
gst_uvc_h264_mjpg_demux_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
GstPadTemplate *pt;
/* do not use gst_element_class_add_static_pad_template to stay compatible
* with gstreamer 0.10.35 */
pt = gst_static_pad_template_get (&mjpgsink_pad_template);
gst_element_class_add_pad_template (element_class, pt);
gst_object_unref (pt);
pt = gst_static_pad_template_get (&jpegsrc_pad_template);
gst_element_class_add_pad_template (element_class, pt);
gst_object_unref (pt);
pt = gst_static_pad_template_get (&h264src_pad_template);
gst_element_class_add_pad_template (element_class, pt);
gst_object_unref (pt);
pt = gst_static_pad_template_get (&yuy2src_pad_template);
gst_element_class_add_pad_template (element_class, pt);
gst_object_unref (pt);
pt = gst_static_pad_template_get (&nv12src_pad_template);
gst_element_class_add_pad_template (element_class, pt);
gst_object_unref (pt);
gst_element_class_set_details_simple (element_class,
"UVC H264 MJPG Demuxer",
"Video/Demuxer",
"Demux UVC H264 auxiliary streams from MJPG images",
"Youness Alaoui <youness.alaoui@collabora.co.uk>");
}
static void
gst_uvc_h264_mjpg_demux_class_init (GstUvcH264MjpgDemuxClass * klass)
{
GObjectClass *gobject_class = (GObjectClass *) klass;
g_type_class_add_private (gobject_class, sizeof (GstUvcH264MjpgDemuxPrivate));
gobject_class->set_property = gst_uvc_h264_mjpg_demux_set_property;
gobject_class->get_property = gst_uvc_h264_mjpg_demux_get_property;
gobject_class->dispose = gst_uvc_h264_mjpg_demux_dispose;
g_object_class_install_property (gobject_class, PROP_DEVICE_FD,
g_param_spec_int ("device-fd", "device-fd",
"File descriptor of the v4l2 device",
-1, G_MAXINT, -1, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_NUM_CLOCK_SAMPLES,
g_param_spec_int ("num-clock-samples", "num-clock-samples",
"Number of clock samples to gather for the PTS synchronization"
" (-1 = unlimited)",
0, G_MAXINT, DEFAULT_NUM_CLOCK_SAMPLES,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
}
static void
gst_uvc_h264_mjpg_demux_init (GstUvcH264MjpgDemux * self,
GstUvcH264MjpgDemuxClass * g_class)
{
self->priv = G_TYPE_INSTANCE_GET_PRIVATE (self, GST_TYPE_UVC_H264_MJPG_DEMUX,
GstUvcH264MjpgDemuxPrivate);
self->priv->device_fd = -1;
/* create the sink and src pads */
self->priv->sink_pad =
gst_pad_new_from_static_template (&mjpgsink_pad_template, "sink");
gst_pad_set_chain_function (self->priv->sink_pad,
GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_chain));
gst_pad_set_setcaps_function (self->priv->sink_pad,
GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_sink_setcaps));
gst_pad_set_getcaps_function (self->priv->sink_pad,
GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_getcaps));
gst_element_add_pad (GST_ELEMENT (self), self->priv->sink_pad);
/* JPEG */
self->priv->jpeg_pad =
gst_pad_new_from_static_template (&jpegsrc_pad_template, "jpeg");
gst_pad_set_getcaps_function (self->priv->jpeg_pad,
GST_DEBUG_FUNCPTR (gst_uvc_h264_mjpg_demux_getcaps));
gst_element_add_pad (GST_ELEMENT (self), self->priv->jpeg_pad);
/* H264 */
self->priv->h264_pad =
gst_pad_new_from_static_template (&h264src_pad_template, "h264");
gst_pad_use_fixed_caps (self->priv->h264_pad);
gst_element_add_pad (GST_ELEMENT (self), self->priv->h264_pad);
/* YUY2 */
self->priv->yuy2_pad =
gst_pad_new_from_static_template (&yuy2src_pad_template, "yuy2");
gst_pad_use_fixed_caps (self->priv->yuy2_pad);
gst_element_add_pad (GST_ELEMENT (self), self->priv->yuy2_pad);
/* NV12 */
self->priv->nv12_pad =
gst_pad_new_from_static_template (&nv12src_pad_template, "nv12");
gst_pad_use_fixed_caps (self->priv->nv12_pad);
gst_element_add_pad (GST_ELEMENT (self), self->priv->nv12_pad);
self->priv->h264_caps = gst_caps_new_simple ("video/x-h264", NULL);
self->priv->yuy2_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'), NULL);
self->priv->nv12_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('N', 'V', '1', '2'), NULL);
self->priv->h264_width = self->priv->h264_height = 0;
self->priv->yuy2_width = self->priv->yuy2_height = 0;
self->priv->nv12_width = self->priv->nv12_height = 0;
}
static void
gst_uvc_h264_mjpg_demux_dispose (GObject * object)
{
GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
if (self->priv->h264_caps)
gst_caps_unref (self->priv->h264_caps);
self->priv->h264_caps = NULL;
if (self->priv->yuy2_caps)
gst_caps_unref (self->priv->yuy2_caps);
self->priv->yuy2_caps = NULL;
if (self->priv->nv12_caps)
gst_caps_unref (self->priv->nv12_caps);
self->priv->nv12_caps = NULL;
if (self->priv->clock_samples)
g_free (self->priv->clock_samples);
self->priv->clock_samples = NULL;
G_OBJECT_CLASS (parent_class)->dispose (object);
}
static void
gst_uvc_h264_mjpg_demux_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
switch (prop_id) {
case PROP_DEVICE_FD:
self->priv->device_fd = g_value_get_int (value);
break;
case PROP_NUM_CLOCK_SAMPLES:
self->priv->num_clock_samples = g_value_get_int (value);
if (self->priv->clock_samples) {
if (self->priv->num_clock_samples) {
self->priv->clock_samples = g_realloc_n (self->priv->clock_samples,
self->priv->num_clock_samples, sizeof (GstUvcH264ClockSample));
if (self->priv->num_samples > self->priv->num_clock_samples) {
self->priv->num_samples = self->priv->num_clock_samples;
if (self->priv->last_sample >= self->priv->num_samples)
self->priv->last_sample = self->priv->num_samples - 1;
}
} else {
g_free (self->priv->clock_samples);
self->priv->clock_samples = NULL;
self->priv->last_sample = -1;
self->priv->num_samples = 0;
}
}
if (self->priv->num_clock_samples > 0) {
self->priv->clock_samples = g_malloc0_n (self->priv->num_clock_samples,
sizeof (GstUvcH264ClockSample));
self->priv->last_sample = -1;
self->priv->num_samples = 0;
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
break;
}
}
static void
gst_uvc_h264_mjpg_demux_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (object);
switch (prop_id) {
case PROP_DEVICE_FD:
g_value_set_int (value, self->priv->device_fd);
break;
case PROP_NUM_CLOCK_SAMPLES:
g_value_set_int (value, self->priv->num_clock_samples);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (self, prop_id, pspec);
break;
}
}
static gboolean
gst_uvc_h264_mjpg_demux_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (GST_OBJECT_PARENT (pad));
return gst_pad_set_caps (self->priv->jpeg_pad, caps);
}
static GstCaps *
gst_uvc_h264_mjpg_demux_getcaps (GstPad * pad)
{
GstUvcH264MjpgDemux *self = GST_UVC_H264_MJPG_DEMUX (GST_OBJECT_PARENT (pad));
GstCaps *result = NULL;
if (pad == self->priv->jpeg_pad)
result = gst_pad_peer_get_caps (self->priv->sink_pad);
else if (pad == self->priv->sink_pad)
result = gst_pad_peer_get_caps (self->priv->jpeg_pad);
/* TODO: intersect with template and fixate caps */
if (result == NULL)
result = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
return result;
}
static gboolean
_pts_to_timestamp (GstUvcH264MjpgDemux * self, GstBuffer * buf, guint32 pts)
{
GstUvcH264MjpgDemuxPrivate *priv = self->priv;
GstUvcH264ClockSample *current_sample = NULL;
GstUvcH264ClockSample *oldest_sample = NULL;
guint32 next_sample;
struct uvc_last_scr_sample sample;
guint32 dev_sof;
if (self->priv->device_fd == -1 || priv->clock_samples == NULL)
return FALSE;
if (-1 == ioctl (priv->device_fd, UVCIOC_GET_LAST_SCR, &sample)) {
//GST_WARNING_OBJECT (self, " GET_LAST_SCR error");
return FALSE;
}
dev_sof = (guint32) (sample.dev_sof + 2048) << 16;
if (priv->num_samples > 0 &&
priv->clock_samples[priv->last_sample].dev_sof == dev_sof) {
current_sample = &priv->clock_samples[priv->last_sample];
} else {
next_sample = (priv->last_sample + 1) % priv->num_clock_samples;
current_sample = &priv->clock_samples[next_sample];
current_sample->dev_stc = sample.dev_stc;
current_sample->dev_sof = dev_sof;
current_sample->host_ts = sample.host_ts.tv_sec * GST_SECOND +
sample.host_ts.tv_nsec * GST_NSECOND;
current_sample->host_sof = (guint32) (sample.host_sof + 2048) << 16;
priv->num_samples++;
priv->last_sample = next_sample;
/* Debug printing */
GST_DEBUG_OBJECT (self, "device frequency: %u", sample.dev_frequency);
GST_DEBUG_OBJECT (self, "dev_sof: %u", sample.dev_sof);
GST_DEBUG_OBJECT (self, "dev_stc: %u", sample.dev_stc);
GST_DEBUG_OBJECT (self, "host_ts: %lu -- %" GST_TIME_FORMAT,
current_sample->host_ts, GST_TIME_ARGS (current_sample->host_ts));
GST_DEBUG_OBJECT (self, "host_sof: %u", sample.host_sof);
GST_DEBUG_OBJECT (self, "PTS: %u", pts);
GST_DEBUG_OBJECT (self, "Diff: %u - %f\n", sample.dev_stc - pts,
(gdouble) (sample.dev_stc - pts) / sample.dev_frequency);
}
if (priv->num_samples < priv->num_clock_samples)
return FALSE;
next_sample = (priv->last_sample + 1) % priv->num_clock_samples;
oldest_sample = &priv->clock_samples[next_sample];
/* TODO: Use current_sample and oldest_sample to do the
* double linear regression and calculate a new PTS */
(void) oldest_sample;
return TRUE;
}
static GstFlowReturn
gst_uvc_h264_mjpg_demux_chain (GstPad * pad, GstBuffer * buf)
{
GstUvcH264MjpgDemux *self;
GstFlowReturn ret = GST_FLOW_OK;
GstBufferList *jpeg_buf = gst_buffer_list_new ();
GstBufferListIterator *jpeg_it = gst_buffer_list_iterate (jpeg_buf);
GstBufferList *aux_buf = NULL;
GstBufferListIterator *aux_it = NULL;
AuxiliaryStreamHeader aux_header = { 0 };
GstBuffer *sub_buffer = NULL;
guint32 aux_size = 0;
GstPad *aux_pad = NULL;
GstCaps **aux_caps = NULL;
guint last_offset;
guint i;
guchar *data;
guint size;
self = GST_UVC_H264_MJPG_DEMUX (GST_PAD_PARENT (pad));
last_offset = 0;
data = GST_BUFFER_DATA (buf);
size = GST_BUFFER_SIZE (buf);
if (data == NULL || size == 0) {
ret = gst_pad_push (self->priv->jpeg_pad, buf);
goto done;
}
gst_buffer_list_iterator_add_group (jpeg_it);
for (i = 0; i < size - 1; i++) {
/* Check for APP4 (0xe4) marker in the jpeg */
if (data[i] == 0xff && data[i + 1] == 0xe4) {
guint16 segment_size;
/* Sanity check sizes and get segment size */
if (i + 4 >= size) {
GST_ELEMENT_ERROR (self, STREAM, DEMUX,
("Not enough data to read marker size"), (NULL));
ret = GST_FLOW_ERROR;
goto done;
}
segment_size = GUINT16_FROM_BE (*((guint16 *) (data + i + 2)));
if (i + segment_size + 2 >= size) {
GST_ELEMENT_ERROR (self, STREAM, DEMUX,
("Not enough data to read marker content"), (NULL));
ret = GST_FLOW_ERROR;
goto done;
}
GST_DEBUG_OBJECT (self,
"Found APP4 marker (%d). JPG: %d-%d - APP4: %d - %d", segment_size,
last_offset, i, i, i + 2 + segment_size);
/* Add JPEG data between the last offset and this market */
if (i - last_offset > 0) {
sub_buffer = gst_buffer_create_sub (buf, last_offset, i - last_offset);
gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_ALL);
gst_buffer_list_iterator_add (jpeg_it, sub_buffer);
}
last_offset = i + 2 + segment_size;
/* Reset i/segment size to the app4 data (ignore marker header/size) */
i += 4;
segment_size -= 2;
/* If this is a new auxiliary stream, initialize everything properly */
if (aux_buf == NULL) {
if (segment_size < sizeof (aux_header) + sizeof (aux_size)) {
GST_ELEMENT_ERROR (self, STREAM, DEMUX,
("Not enough data to read aux header"), (NULL));
ret = GST_FLOW_ERROR;
goto done;
}
aux_header = *((AuxiliaryStreamHeader *) (data + i));
/* version should be little endian but it looks more like BE */
aux_header.version = GUINT16_FROM_BE (aux_header.version);
aux_header.header_len = GUINT16_FROM_LE (aux_header.header_len);
aux_header.width = GUINT16_FROM_LE (aux_header.width);
aux_header.height = GUINT16_FROM_LE (aux_header.height);
aux_header.frame_interval = GUINT32_FROM_LE (aux_header.frame_interval);
aux_header.delay = GUINT16_FROM_LE (aux_header.delay);
aux_header.pts = GUINT32_FROM_LE (aux_header.pts);
GST_DEBUG_OBJECT (self, "New auxiliary stream : v%d - %d bytes - %"
GST_FOURCC_FORMAT " %dx%d -- %d *100ns -- %d ms -- %d",
aux_header.version, aux_header.header_len,
GST_FOURCC_ARGS (aux_header.type),
aux_header.width, aux_header.height,
aux_header.frame_interval, aux_header.delay, aux_header.pts);
aux_size = *((guint32 *) (data + i + aux_header.header_len));
GST_DEBUG_OBJECT (self, "Auxiliary stream size : %d bytes", aux_size);
if (aux_size > 0) {
guint16 *width = NULL;
guint16 *height = NULL;
/* Find the auxiliary stream's pad and caps */
switch (aux_header.type) {
case GST_MAKE_FOURCC ('H', '2', '6', '4'):
aux_pad = self->priv->h264_pad;
aux_caps = &self->priv->h264_caps;
width = &self->priv->h264_width;
height = &self->priv->h264_height;
break;
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
aux_pad = self->priv->yuy2_pad;
aux_caps = &self->priv->yuy2_caps;
width = &self->priv->yuy2_width;
height = &self->priv->yuy2_height;
break;
case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
aux_pad = self->priv->nv12_pad;
aux_caps = &self->priv->nv12_caps;
width = &self->priv->nv12_width;
height = &self->priv->nv12_height;
break;
default:
GST_ELEMENT_ERROR (self, STREAM, DEMUX,
("Unknown auxiliary stream format : %" GST_FOURCC_FORMAT,
GST_FOURCC_ARGS (aux_header.type)), (NULL));
ret = GST_FLOW_ERROR;
break;
}
if (ret != GST_FLOW_OK)
goto done;
if (*width != aux_header.width || *height != aux_header.height) {
GstCaps *peercaps = gst_pad_peer_get_caps (aux_pad);
GstStructure *s = NULL;
gint fps_num = 1000000000 / aux_header.frame_interval;
gint fps_den = 100;
/* TODO: intersect with pad template */
GST_DEBUG ("peercaps : %" GST_PTR_FORMAT, peercaps);
if (peercaps && !gst_caps_is_any (peercaps))
s = gst_caps_get_structure (peercaps, 0);
if (s) {
/* TODO: make sure it contains the right format/width/height */
gst_structure_fixate_field_nearest_fraction (s, "framerate",
fps_num, fps_den);
GST_DEBUG ("Fixated struct : %" GST_PTR_FORMAT, s);
gst_structure_get_fraction (s, "framerate", &fps_num, &fps_den);
}
if (peercaps)
gst_caps_unref (peercaps);
*width = aux_header.width;
*height = aux_header.height;
*aux_caps = gst_caps_make_writable (*aux_caps);
/* FIXME: fps must match the caps and be allowed and represent
our first buffer */
gst_caps_set_simple (*aux_caps,
"width", G_TYPE_INT, aux_header.width,
"height", G_TYPE_INT, aux_header.height,
"framerate", GST_TYPE_FRACTION, fps_num, fps_den, NULL);
if (!gst_pad_set_caps (aux_pad, *aux_caps)) {
ret = GST_FLOW_NOT_NEGOTIATED;
goto done;
}
}
/* Create new auxiliary buffer list and adjust i/segment size */
aux_buf = gst_buffer_list_new ();
aux_it = gst_buffer_list_iterate (aux_buf);
gst_buffer_list_iterator_add_group (aux_it);
}
i += sizeof (aux_header) + sizeof (aux_size);
segment_size -= sizeof (aux_header) + sizeof (aux_size);
}
if (segment_size > aux_size) {
GST_ELEMENT_ERROR (self, STREAM, DEMUX,
("Expected %d auxiliary data, got %d bytes", aux_size,
segment_size), (NULL));
ret = GST_FLOW_ERROR;
goto done;
}
if (segment_size > 0) {
sub_buffer = gst_buffer_create_sub (buf, i, segment_size);
GST_BUFFER_DURATION (sub_buffer) =
aux_header.frame_interval * 100 * GST_NSECOND;
gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_TIMESTAMPS);
gst_buffer_set_caps (sub_buffer, *aux_caps);
_pts_to_timestamp (self, sub_buffer, aux_header.pts);
gst_buffer_list_iterator_add (aux_it, sub_buffer);
aux_size -= segment_size;
/* Push completed aux data */
if (aux_size == 0) {
gst_buffer_list_iterator_free (aux_it);
aux_it = NULL;
GST_DEBUG_OBJECT (self, "Pushing %" GST_FOURCC_FORMAT
" auxiliary buffer %" GST_PTR_FORMAT,
GST_FOURCC_ARGS (aux_header.type), *aux_caps);
ret = gst_pad_push_list (aux_pad, aux_buf);
aux_buf = NULL;
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "Error pushing %" GST_FOURCC_FORMAT
" auxiliary data", GST_FOURCC_ARGS (aux_header.type));
goto done;
}
}
}
i += segment_size - 1;
} else if (data[i] == 0xff && data[i + 1] == 0xda) {
/* The APP4 markers must be before the SOS marker, so this is the end */
GST_DEBUG_OBJECT (self, "Found SOS marker.");
sub_buffer = gst_buffer_create_sub (buf, last_offset, size - last_offset);
gst_buffer_copy_metadata (sub_buffer, buf, GST_BUFFER_COPY_ALL);
gst_buffer_list_iterator_add (jpeg_it, sub_buffer);
last_offset = size;
break;
}
}
gst_buffer_list_iterator_free (jpeg_it);
jpeg_it = NULL;
if (aux_buf != NULL) {
GST_ELEMENT_ERROR (self, STREAM, DEMUX,
("Incomplete auxiliary stream. %d bytes missing", aux_size), (NULL));
ret = GST_FLOW_ERROR;
goto done;
}
if (last_offset != size) {
/* this means there was no SOS marker in the jpg, so we assume the JPG was
just a container */
GST_DEBUG_OBJECT (self, "SOS marker wasn't found. MJPG is container only");
gst_buffer_list_unref (jpeg_buf);
jpeg_buf = NULL;
} else {
ret = gst_pad_push_list (self->priv->jpeg_pad, jpeg_buf);
jpeg_buf = NULL;
}
if (ret != GST_FLOW_OK) {
GST_WARNING_OBJECT (self, "Error pushing jpeg data");
goto done;
}
done:
/* In case of error, unref whatever was left */
if (aux_it)
gst_buffer_list_iterator_free (aux_it);
if (aux_buf)
gst_buffer_list_unref (aux_buf);
if (jpeg_it)
gst_buffer_list_iterator_free (jpeg_it);
if (jpeg_buf)
gst_buffer_list_unref (jpeg_buf);
/* We must always unref the input buffer since we never push it out */
gst_buffer_unref (buf);
return ret;
}

View file

@ -0,0 +1,66 @@
/* GStreamer
*
* uvch264_mjpg_demux: a demuxer for muxed stream in UVC H264 compliant MJPG
*
* Copyright (C) 2012 Cisco Systems, Inc.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_UVC_H264_MJPG_DEMUX_H__
#define __GST_UVC_H264_MJPG_DEMUX_H__
#include <gst/gst.h>
G_BEGIN_DECLS
#define GST_TYPE_UVC_H264_MJPG_DEMUX \
(gst_uvc_h264_mjpg_demux_get_type())
#define GST_UVC_H264_MJPG_DEMUX(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), \
GST_TYPE_UVC_H264_MJPG_DEMUX, \
GstUvcH264MjpgDemux))
#define GST_UVC_H264_MJPG_DEMUX_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), \
GST_TYPE_UVC_H264_MJPG_DEMUX, \
GstUvcH264MjpgDemuxClass))
#define GST_IS_UVC_H264_MJPG_DEMUX(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), \
GST_TYPE_UVC_H264_MJPG_DEMUX))
#define GST_IS_UVC_H264_MJPG_DEMUX_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), \
GST_TYPE_UVC_H264_MJPG_DEMUX))
typedef struct _GstUvcH264MjpgDemux GstUvcH264MjpgDemux;
typedef struct _GstUvcH264MjpgDemuxPrivate GstUvcH264MjpgDemuxPrivate;
typedef struct _GstUvcH264MjpgDemuxClass GstUvcH264MjpgDemuxClass;
struct _GstUvcH264MjpgDemux {
GstElement element;
GstUvcH264MjpgDemuxPrivate *priv;
};
struct _GstUvcH264MjpgDemuxClass {
GstElementClass parent_class;
};
GType gst_uvc_h264_mjpg_demux_get_type (void);
G_END_DECLS
#endif /* __GST_UVC_H264_MJPG_DEMUX_H__ */

3180
sys/uvch264/gstuvch264_src.c Normal file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,166 @@
/*
* GStreamer
*
* Copyright (C) 2012 Cisco Systems, Inc.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_UVC_H264_SRC_H__
#define __GST_UVC_H264_SRC_H__
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <gst/gst.h>
#include <gst/basecamerabinsrc/gstbasecamerasrc.h>
#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB)
#include <libusb.h>
#endif
#include "uvc_h264.h"
G_BEGIN_DECLS
#define GST_TYPE_UVC_H264_SRC \
(gst_uvc_h264_src_get_type())
#define GST_UVC_H264_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj), GST_TYPE_UVC_H264_SRC, GstUvcH264Src))
#define GST_UVC_H264_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass), GST_TYPE_UVC_H264_SRC, GstUvcH264SrcClass))
#define GST_IS_UVC_H264_SRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj), GST_TYPE_UVC_H264_SRC))
#define GST_IS_UVC_H264_SRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass), GST_TYPE_UVC_H264_SRC))
GType gst_uvc_h264_src_get_type (void);
typedef struct _GstUvcH264Src GstUvcH264Src;
typedef struct _GstUvcH264SrcClass GstUvcH264SrcClass;
enum GstVideoRecordingStatus {
GST_VIDEO_RECORDING_STATUS_DONE,
GST_VIDEO_RECORDING_STATUS_STARTING,
GST_VIDEO_RECORDING_STATUS_RUNNING,
GST_VIDEO_RECORDING_STATUS_FINISHING
};
enum {
QP_I_FRAME = 0,
QP_P_FRAME,
QP_B_FRAME,
QP_FRAMES
};
typedef enum {
UVC_H264_SRC_FORMAT_NONE,
UVC_H264_SRC_FORMAT_JPG,
UVC_H264_SRC_FORMAT_H264,
UVC_H264_SRC_FORMAT_RAW
} GstUvcH264SrcFormat;
/**
* GstUcH264Src:
*
*/
struct _GstUvcH264Src
{
GstBaseCameraSrc parent;
GstPad *vfsrc;
GstPad *imgsrc;
GstPad *vidsrc;
/* source elements */
GstElement *v4l2_src;
GstElement *mjpg_demux;
GstElement *jpeg_dec;
GstElement *vid_colorspace;
GstElement *vf_colorspace;
GstUvcH264SrcFormat main_format;
guint16 main_width;
guint16 main_height;
guint32 main_frame_interval;
UvcH264StreamFormat main_stream_format;
guint16 main_profile;
GstUvcH264SrcFormat secondary_format;
guint16 secondary_width;
guint16 secondary_height;
guint32 secondary_frame_interval;
int v4l2_fd;
guint8 h264_unit_id;
#if defined (HAVE_GUDEV) && defined (HAVE_LIBUSB)
libusb_context *usb_ctx;
#endif
GstPadEventFunction srcpad_event_func;
GstEvent *key_unit_event;
GstSegment segment;
gboolean started;
/* When restarting the source */
gboolean reconfiguring;
gboolean vid_newseg;
gboolean vf_newseg;
gchar *colorspace_name;
gchar *jpeg_decoder_name;
int num_clock_samples;
/* v4l2src proxied properties */
guint32 num_buffers;
gchar *device;
/* Static controls */
guint32 initial_bitrate;
guint16 slice_units;
UvcH264SliceMode slice_mode;
guint16 iframe_period;
UvcH264UsageType usage_type;
UvcH264Entropy entropy;
gboolean enable_sei;
guint8 num_reorder_frames;
gboolean preview_flipped;
guint16 leaky_bucket_size;
/* Dynamic controls */
UvcH264RateControl rate_control;
gboolean fixed_framerate;
guint8 level_idc;
guint32 peak_bitrate;
guint32 average_bitrate;
gint8 min_qp[QP_FRAMES];
gint8 max_qp[QP_FRAMES];
guint8 ltr_buffer_size;
guint8 ltr_encoder_control;
};
/**
* GstUvcH264SrcClass:
*
*/
struct _GstUvcH264SrcClass
{
GstBaseCameraSrcClass parent;
};
#endif /* __GST_UVC_H264_SRC_H__ */

122
sys/uvch264/uvc_h264.c Normal file
View file

@ -0,0 +1,122 @@
/*
* GStreamer
*
* Copyright (C) 2012 Cisco Systems, Inc.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include "uvc_h264.h"
GType
uvc_h264_slicemode_get_type (void)
{
static GType type = 0;
static const GEnumValue types[] = {
{UVC_H264_SLICEMODE_IGNORED, "Ignored", "ignored"},
{UVC_H264_SLICEMODE_BITSPERSLICE, "Bits per slice", "bits/slice"},
{UVC_H264_SLICEMODE_MBSPERSLICE, "MBs per Slice", "MBs/slice"},
{UVC_H264_SLICEMODE_SLICEPERFRAME, "Slice Per Frame", "slice/frame"},
{0, NULL, NULL}
};
if (!type) {
type = g_enum_register_static ("UvcH264SliceMode", types);
}
return type;
}
GType
uvc_h264_usagetype_get_type (void)
{
static GType type = 0;
static const GEnumValue types[] = {
{UVC_H264_USAGETYPE_REALTIME, "Realtime (video conferencing)", "realtime"},
{UVC_H264_USAGETYPE_BROADCAST, "Broadcast", "broadcast"},
{UVC_H264_USAGETYPE_STORAGE, "Storage", "storage"},
{UVC_H264_USAGETYPE_UCCONFIG_0, "UCConfig 0", "ucconfig0"},
{UVC_H264_USAGETYPE_UCCONFIG_1, "UCConfig 1", "ucconfig1"},
{UVC_H264_USAGETYPE_UCCONFIG_2Q, "UCConfig 2Q", "ucconfig2q"},
{UVC_H264_USAGETYPE_UCCONFIG_2S, "UCConfig 2S", "ucconfig2s"},
{UVC_H264_USAGETYPE_UCCONFIG_3, "UCConfig 3", "ucconfig3"},
{0, NULL, NULL}
};
if (!type) {
type = g_enum_register_static ("UvcH264UsageType", types);
}
return type;
}
GType
uvc_h264_ratecontrol_get_type (void)
{
static GType type = 0;
static const GEnumValue types[] = {
{UVC_H264_RATECONTROL_CBR, "Constant bit rate", "cbr"},
{UVC_H264_RATECONTROL_VBR, "Variable bit rate", "vbr"},
{UVC_H264_RATECONTROL_CONST_QP, "Constant QP", "qp"},
{0, NULL, NULL}
};
if (!type) {
type = g_enum_register_static ("UvcH264RateControl", types);
}
return type;
}
GType
uvc_h264_streamformat_get_type (void)
{
static GType type = 0;
static const GEnumValue types[] = {
{UVC_H264_STREAMFORMAT_ANNEXB, "Byte stream format (Annex B)", "byte"},
{UVC_H264_STREAMFORMAT_NAL, "NAL stream format", "nal"},
{0, NULL, NULL}
};
if (!type) {
type = g_enum_register_static ("UvcH264StreamFormat", types);
}
return type;
}
GType
uvc_h264_entropy_get_type (void)
{
static GType type = 0;
static const GEnumValue types[] = {
{UVC_H264_ENTROPY_CAVLC, "CAVLC", "cavlc"},
{UVC_H264_ENTROPY_CABAC, "CABAC", "cabac"},
{0, NULL, NULL}
};
if (!type) {
type = g_enum_register_static ("UvcH264Entropy", types);
}
return type;
}

335
sys/uvch264/uvc_h264.h Normal file
View file

@ -0,0 +1,335 @@
/*
* uvc_h264.h - Definitions of the UVC H.264 Payload specification Version 1.0
*
* Copyright (c) 2011 USB Implementers Forum, Inc.
*
* Modification into glib-like header by :
* Copyright (C) 2012 Cisco Systems, Inc.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
*
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef _UVC_H264_H_
#define _UVC_H264_H_
/* Header File for the little-endian platform */
#include <glib.h>
#include <glib-object.h>
/* bmHints defines */
#define UVC_H264_BMHINTS_RESOLUTION (0x0001)
#define UVC_H264_BMHINTS_PROFILE (0x0002)
#define UVC_H264_BMHINTS_RATECONTROL (0x0004)
#define UVC_H264_BMHINTS_USAGE (0x0008)
#define UVC_H264_BMHINTS_SLICEMODE (0x0010)
#define UVC_H264_BMHINTS_SLICEUNITS (0x0020)
#define UVC_H264_BMHINTS_MVCVIEW (0x0040)
#define UVC_H264_BMHINTS_TEMPORAL (0x0080)
#define UVC_H264_BMHINTS_SNR (0x0100)
#define UVC_H264_BMHINTS_SPATIAL (0x0200)
#define UVC_H264_BMHINTS_SPATIAL_RATIO (0x0400)
#define UVC_H264_BMHINTS_FRAME_INTERVAL (0x0800)
#define UVC_H264_BMHINTS_LEAKY_BKT_SIZE (0x1000)
#define UVC_H264_BMHINTS_BITRATE (0x2000)
#define UVC_H264_BMHINTS_ENTROPY (0x4000)
#define UVC_H264_BMHINTS_IFRAMEPERIOD (0x8000)
#define UVC_H264_QP_STEPS_I_FRAME_TYPE (0x01)
#define UVC_H264_QP_STEPS_P_FRAME_TYPE (0x02)
#define UVC_H264_QP_STEPS_B_FRAME_TYPE (0x04)
#define UVC_H264_QP_STEPS_ALL_FRAME_TYPES (UVC_H264_QP_STEPS_I_FRAME_TYPE | \
UVC_H264_QP_STEPS_P_FRAME_TYPE | UVC_H264_QP_STEPS_B_FRAME_TYPE)
/* wSliceMode defines */
typedef enum
{
UVC_H264_SLICEMODE_IGNORED = 0x0000,
UVC_H264_SLICEMODE_BITSPERSLICE = 0x0001,
UVC_H264_SLICEMODE_MBSPERSLICE = 0x0002,
UVC_H264_SLICEMODE_SLICEPERFRAME = 0x0003
} UvcH264SliceMode;
#define UVC_H264_SLICEMODE_TYPE (uvc_h264_slicemode_get_type())
GType uvc_h264_slicemode_get_type (void);
/* bUsageType defines */
typedef enum {
UVC_H264_USAGETYPE_REALTIME = 0x01,
UVC_H264_USAGETYPE_BROADCAST = 0x02,
UVC_H264_USAGETYPE_STORAGE = 0x03,
UVC_H264_USAGETYPE_UCCONFIG_0 = 0x04,
UVC_H264_USAGETYPE_UCCONFIG_1 = 0x05,
UVC_H264_USAGETYPE_UCCONFIG_2Q = 0x06,
UVC_H264_USAGETYPE_UCCONFIG_2S = 0x07,
UVC_H264_USAGETYPE_UCCONFIG_3 = 0x08,
} UvcH264UsageType;
#define UVC_H264_USAGETYPE_TYPE (uvc_h264_usagetype_get_type())
GType uvc_h264_usagetype_get_type (void);
/* bRateControlMode defines */
typedef enum {
UVC_H264_RATECONTROL_CBR = 0x01,
UVC_H264_RATECONTROL_VBR = 0x02,
UVC_H264_RATECONTROL_CONST_QP = 0x03,
} UvcH264RateControl;
#define UVC_H264_RATECONTROL_FIXED_FRM_FLG (0x10)
#define UVC_H264_RATECONTROL_TYPE (uvc_h264_ratecontrol_get_type())
GType uvc_h264_ratecontrol_get_type (void);
/* bStreamFormat defines */
typedef enum {
UVC_H264_STREAMFORMAT_ANNEXB = 0x00,
UVC_H264_STREAMFORMAT_NAL = 0x01,
} UvcH264StreamFormat;
#define UVC_H264_STREAMFORMAT_TYPE (uvc_h264_streamformat_get_type())
GType uvc_h264_streamformat_get_type (void);
/* bEntropyCABAC defines */
typedef enum {
UVC_H264_ENTROPY_CAVLC = 0x00,
UVC_H264_ENTROPY_CABAC = 0x01,
} UvcH264Entropy;
#define UVC_H264_ENTROPY_TYPE (uvc_h264_entropy_get_type())
GType uvc_h264_entropy_get_type (void);
/* bProfile defines */
#define UVC_H264_PROFILE_CONSTRAINED_BASELINE 0x4240
#define UVC_H264_PROFILE_BASELINE 0x4200
#define UVC_H264_PROFILE_MAIN 0x4D00
#define UVC_H264_PROFILE_HIGH 0x6400
/* bTimingstamp defines */
#define UVC_H264_TIMESTAMP_SEI_DISABLE (0x00)
#define UVC_H264_TIMESTAMP_SEI_ENABLE (0x01)
/* bPreviewFlipped defines */
#define UVC_H264_PREFLIPPED_DISABLE (0x00)
#define UVC_H264_PREFLIPPED_HORIZONTAL (0x01)
/* wPicType defines */
#define UVC_H264_PICTYPE_I_FRAME (0x00)
#define UVC_H264_PICTYPE_IDR (0x01)
#define UVC_H264_PICTYPE_IDR_WITH_PPS_SPS (0x02)
/* wLayerID Macro */
/* wLayerID
|------------+------------+------------+----------------+------------|
| Reserved | StreamID | QualityID | DependencyID | TemporalID |
| (3 bits) | (3 bits) | (3 bits) | (4 bits) | (3 bits) |
|------------+------------+------------+----------------+------------|
|15 13|12 10|9 7|6 3|2 0|
|------------+------------+------------+----------------+------------|
*/
#define xLayerID(stream_id, quality_id, dependency_id, temporal_id) \
((((stream_id) & 7) << 10) | \
(((quality_id) & 7) << 7) | \
(((dependency_id) & 15) << 3) | \
((temporal_id) & 7))
/* id extraction from wLayerID */
#define xStream_id(layer_id) (((layer_id) >> 10) & 7)
#define xQuality_id(layer_id) (((layer_id) >> 7) & 7)
#define xDependency_id(layer_id) (((layer_id) >> 3) & 15)
#define xTemporal_id(layer_id) ((layer_id)&7)
/* UVC H.264 control selectors */
typedef enum _uvcx_control_selector_t
{
UVCX_VIDEO_CONFIG_PROBE = 0x01,
UVCX_VIDEO_CONFIG_COMMIT = 0x02,
UVCX_RATE_CONTROL_MODE = 0x03,
UVCX_TEMPORAL_SCALE_MODE = 0x04,
UVCX_SPATIAL_SCALE_MODE = 0x05,
UVCX_SNR_SCALE_MODE = 0x06,
UVCX_LTR_BUFFER_SIZE_CONTROL = 0x07,
UVCX_LTR_PICTURE_CONTROL = 0x08,
UVCX_PICTURE_TYPE_CONTROL = 0x09,
UVCX_VERSION = 0x0A,
UVCX_ENCODER_RESET = 0x0B,
UVCX_FRAMERATE_CONFIG = 0x0C,
UVCX_VIDEO_ADVANCE_CONFIG = 0x0D,
UVCX_BITRATE_LAYERS = 0x0E,
UVCX_QP_STEPS_LAYERS = 0x0F,
} uvcx_control_selector_t;
typedef struct _uvcx_video_config_probe_commit_t
{
guint32 dwFrameInterval;
guint32 dwBitRate;
guint16 bmHints;
guint16 wConfigurationIndex;
guint16 wWidth;
guint16 wHeight;
guint16 wSliceUnits;
guint16 wSliceMode;
guint16 wProfile;
guint16 wIFramePeriod;
guint16 wEstimatedVideoDelay;
guint16 wEstimatedMaxConfigDelay;
guint8 bUsageType;
guint8 bRateControlMode;
guint8 bTemporalScaleMode;
guint8 bSpatialScaleMode;
guint8 bSNRScaleMode;
guint8 bStreamMuxOption;
guint8 bStreamFormat;
guint8 bEntropyCABAC;
guint8 bTimestamp;
guint8 bNumOfReorderFrames;
guint8 bPreviewFlipped;
guint8 bView;
guint8 bReserved1;
guint8 bReserved2;
guint8 bStreamID;
guint8 bSpatialLayerRatio;
guint16 wLeakyBucketSize;
} __attribute__((packed)) uvcx_video_config_probe_commit_t;
typedef struct _uvcx_rate_control_mode_t
{
guint16 wLayerID;
guint8 bRateControlMode;
} __attribute__((packed)) uvcx_rate_control_mode_t;
typedef struct _uvcx_temporal_scale_mode_t
{
guint16 wLayerID;
guint8 bTemporalScaleMode;
} __attribute__((packed)) uvcx_temporal_scale_mode_t;
typedef struct _uvcx_spatial_scale_mode_t
{
guint16 wLayerID;
guint8 bSpatialScaleMode;
} __attribute__((packed)) uvcx_spatial_scale_mode_t;
typedef struct _uvcx_snr_scale_mode_t
{
guint16 wLayerID;
guint8 bSNRScaleMode;
guint8 bMGSSublayerMode;
} __attribute__((packed)) uvcx_snr_scale_mode_t;
typedef struct _uvcx_ltr_buffer_size_control_t
{
guint16 wLayerID;
guint8 bLTRBufferSize;
guint8 bLTREncoderControl;
} __attribute__((packed)) uvcx_ltr_buffer_size_control_t;
typedef struct _uvcx_ltr_picture_control
{
guint16 wLayerID;
guint8 bPutAtPositionInLTRBuffer;
guint8 bEncodeUsingLTR;
} __attribute__((packed)) uvcx_ltr_picture_control;
typedef struct _uvcx_picture_type_control_t
{
guint16 wLayerID;
guint16 wPicType;
} __attribute__((packed)) uvcx_picture_type_control_t;
typedef struct _uvcx_version_t
{
guint16 wVersion;
} __attribute__((packed)) uvcx_version_t;
typedef struct _uvcx_encoder_reset
{
guint16 wLayerID;
} __attribute__((packed)) uvcx_encoder_reset;
typedef struct _uvcx_framerate_config_t
{
guint16 wLayerID;
guint32 dwFrameInterval;
} __attribute__((packed)) uvcx_framerate_config_t;
typedef struct _uvcx_video_advance_config_t
{
guint16 wLayerID;
guint32 dwMb_max;
guint8 blevel_idc;
guint8 bReserved;
} __attribute__((packed)) uvcx_video_advance_config_t;
typedef struct _uvcx_bitrate_layers_t
{
guint16 wLayerID;
guint32 dwPeakBitrate;
guint32 dwAverageBitrate;
} __attribute__((packed)) uvcx_bitrate_layers_t;
typedef struct _uvcx_qp_steps_layers_t
{
guint16 wLayerID;
guint8 bFrameType;
guint8 bMinQp;
guint8 bMaxQp;
} __attribute__((packed)) uvcx_qp_steps_layers_t;
#ifdef _WIN32
// GUID of the UVC H.264 extension unit: {A29E7641-DE04-47E3-8B2B-F4341AFF003B}
DEFINE_GUID(GUID_UVCX_H264_XU, 0xA29E7641, 0xDE04, 0x47E3, 0x8B, 0x2B, 0xF4, 0x34, 0x1A, 0xFF, 0x00, 0x3B);
#else
#define GUID_UVCX_H264_XU \
{0x41, 0x76, 0x9e, 0xa2, 0x04, 0xde, 0xe3, 0x47, 0x8b, 0x2b, 0xF4, 0x34, 0x1A, 0xFF, 0x00, 0x3B}
#endif
#endif /*_UVC_H264_H_*/

View file

@ -156,6 +156,20 @@ else
check_curl =
endif
if USE_UVCH264
check_uvch264=elements/uvch264demux
else
check_uvch264=
endif
uvch264_dist_data = elements/uvch264demux_data/valid_h264_jpg.mjpg \
elements/uvch264demux_data/valid_h264_jpg.jpg \
elements/uvch264demux_data/valid_h264_jpg.h264 \
elements/uvch264demux_data/valid_h264_yuy2.mjpg \
elements/uvch264demux_data/valid_h264_yuy2.h264 \
elements/uvch264demux_data/valid_h264_yuy2.yuy2
VALGRIND_TO_FIX = \
elements/mpeg2enc \
elements/mplex \
@ -209,6 +223,7 @@ check_PROGRAMS = \
elements/rtpmux \
libs/mpegvideoparser \
libs/h264parser \
$(check_uvch264) \
libs/vc1parser \
$(check_schro) \
$(check_vp8) \
@ -332,8 +347,10 @@ elements_assrender_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_API_VERSION
elements_mpegtsmux_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(AM_CFLAGS)
elements_mpegtsmux_LDADD = $(GST_PLUGINS_BASE_LIBS) -lgstvideo-$(GST_API_VERSION) $(GST_BASE_LIBS) $(LDADD)
elements_uvch264demux_CFLAGS = -DUVCH264DEMUX_DATADIR="$(srcdir)/elements/uvch264demux_data" \
$(AM_CFLAGS)
EXTRA_DIST = gst-plugins-bad.supp
EXTRA_DIST = gst-plugins-bad.supp $(uvch264_dist_data)
orc_bayer_CFLAGS = $(ORC_CFLAGS)
orc_bayer_LDADD = $(ORC_LIBS) -lorc-test-0.4

View file

@ -44,6 +44,7 @@ schroenc
spectrum
timidity
y4menc
uvch264demux
videorecordingbin
viewfinderbin
voaacenc

View file

@ -0,0 +1,696 @@
/* GStreamer
*
* unit test for uvch264_demux
*
* Copyright (C) <2012> Collabora Ltd.
* Author: Youness Alaoui <youness.alaoui@collabora.co.uk>
* Copyright (C) <2008> Sebastian Dröge <sebastian.droege@collabora.co.uk>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include <gst/check/gstcheck.h>
#include <string.h>
static GstElement *demux;
static GstPad *mjpg_pad, *h264_pad, *yuy2_pad, *nv12_pad, *jpg_pad;
static gboolean have_h264_eos, have_yuy2_eos, have_nv12_eos, have_jpg_eos;
static GstBuffer *buffer_h264, *buffer_yuy2, *buffer_nv12, *buffer_jpg;
static GError *gerror;
static gchar *error_debug;
static GstStaticPadTemplate mjpg_template =
GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
GST_STATIC_CAPS ("image/jpeg, width=640, height=480, framerate=15/1"));
static GstStaticPadTemplate sink_template =
GST_STATIC_PAD_TEMPLATE ("sink", GST_PAD_SINK, GST_PAD_ALWAYS,
GST_STATIC_CAPS_ANY);
#define STRINGIFY_(x) #x
#define STRINGIFY(x) STRINGIFY_ (x)
#define DATADIR STRINGIFY (UVCH264DEMUX_DATADIR)
#define VALID_H264_JPG_MJPG_FILENAME DATADIR "/valid_h264_jpg.mjpg"
#define VALID_H264_JPG_JPG_FILENAME DATADIR "/valid_h264_jpg.jpg"
#define VALID_H264_JPG_H264_FILENAME DATADIR "/valid_h264_jpg.h264"
#define VALID_H264_YUY2_MJPG_FILENAME DATADIR "/valid_h264_yuy2.mjpg"
#define VALID_H264_YUY2_YUY2_FILENAME DATADIR "/valid_h264_yuy2.yuy2"
#define VALID_H264_YUY2_H264_FILENAME DATADIR "/valid_h264_yuy2.h264"
#define _sink_chain_func(type) \
static GstFlowReturn \
_sink_##type##_chain (GstPad * pad, GstBuffer * buffer) \
{ \
fail_unless (GST_BUFFER_CAPS (buffer) != NULL); \
\
buffer_##type = buffer; \
\
return GST_FLOW_OK; \
}
#define _sink_event_func(type) \
static gboolean \
_sink_##type##_event (GstPad * pad, GstEvent * event) \
{ \
if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) \
have_##type##_eos = TRUE; \
\
gst_event_unref (event); \
\
return TRUE; \
}
_sink_chain_func (h264);
_sink_chain_func (yuy2);
_sink_chain_func (nv12);
_sink_chain_func (jpg);
_sink_event_func (h264);
_sink_event_func (yuy2);
_sink_event_func (nv12);
_sink_event_func (jpg);
static GstBusSyncReply
_bus_sync_handler (GstBus * bus, GstMessage * message, gpointer data)
{
if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ERROR) {
fail_unless (gerror == NULL && error_debug == NULL);
fail_unless (GST_MESSAGE_SRC (message) == GST_OBJECT (demux));
gst_message_parse_error (message, &gerror, &error_debug);
}
return GST_BUS_PASS;
}
static void
_teardown_test (void)
{
GstBus *bus;
gst_element_set_state (demux, GST_STATE_NULL);
bus = GST_ELEMENT_BUS (demux);
gst_bus_set_flushing (bus, TRUE);
gst_object_unref (bus);
gst_pad_set_active (mjpg_pad, FALSE);
gst_object_unref (mjpg_pad);
if (h264_pad) {
gst_pad_set_active (h264_pad, FALSE);
gst_object_unref (h264_pad);
}
if (yuy2_pad) {
gst_pad_set_active (yuy2_pad, FALSE);
gst_object_unref (yuy2_pad);
}
if (nv12_pad) {
gst_pad_set_active (nv12_pad, FALSE);
gst_object_unref (nv12_pad);
}
if (jpg_pad) {
gst_pad_set_active (jpg_pad, FALSE);
gst_object_unref (jpg_pad);
}
if (gerror) {
g_error_free (gerror);
gerror = NULL;
}
if (error_debug) {
g_free (error_debug);
error_debug = NULL;
}
gst_object_unref (demux);
mjpg_pad = h264_pad = yuy2_pad = nv12_pad = jpg_pad = NULL;
demux = NULL;
}
static void
_setup_test (gboolean link_h264, gboolean link_yuy2, gboolean link_nv12,
gboolean link_jpg)
{
GstBus *bus = gst_bus_new ();
GstPad *sinkpad, *h264pad, *yuy2pad, *nv12pad, *jpgpad;
have_h264_eos = have_yuy2_eos = have_nv12_eos = have_jpg_eos = FALSE;
buffer_h264 = buffer_yuy2 = buffer_nv12 = buffer_jpg = NULL;
demux = gst_element_factory_make ("uvch264_mjpgdemux", NULL);
fail_unless (demux != NULL);
gst_element_set_bus (demux, bus);
gst_bus_set_sync_handler (bus, _bus_sync_handler, NULL);
mjpg_pad = gst_pad_new_from_static_template (&mjpg_template, "src");
fail_unless (mjpg_pad != NULL);
sinkpad = gst_element_get_static_pad (demux, "sink");
fail_unless (sinkpad != NULL);
fail_unless (gst_pad_link (mjpg_pad, sinkpad) == GST_PAD_LINK_OK);
gst_object_unref (sinkpad);
gst_pad_set_active (mjpg_pad, TRUE);
if (link_h264) {
h264pad = gst_element_get_static_pad (demux, "h264");
fail_unless (h264pad != NULL);
h264_pad = gst_pad_new_from_static_template (&sink_template, "h264");
fail_unless (h264_pad != NULL);
gst_pad_set_chain_function (h264_pad, _sink_h264_chain);
gst_pad_set_event_function (h264_pad, _sink_h264_event);
fail_unless (gst_pad_link (h264pad, h264_pad) == GST_PAD_LINK_OK);
gst_object_unref (h264pad);
gst_pad_set_active (h264_pad, TRUE);
}
if (link_yuy2) {
yuy2pad = gst_element_get_static_pad (demux, "yuy2");
fail_unless (yuy2pad != NULL);
yuy2_pad = gst_pad_new_from_static_template (&sink_template, "yuy2");
fail_unless (yuy2_pad != NULL);
gst_pad_set_chain_function (yuy2_pad, _sink_yuy2_chain);
gst_pad_set_event_function (yuy2_pad, _sink_yuy2_event);
fail_unless (gst_pad_link (yuy2pad, yuy2_pad) == GST_PAD_LINK_OK);
gst_object_unref (yuy2pad);
gst_pad_set_active (yuy2_pad, TRUE);
}
if (link_nv12) {
nv12pad = gst_element_get_static_pad (demux, "nv12");
fail_unless (nv12pad != NULL);
nv12_pad = gst_pad_new_from_static_template (&sink_template, "nv12");
fail_unless (nv12_pad != NULL);
gst_pad_set_chain_function (nv12_pad, _sink_nv12_chain);
gst_pad_set_event_function (nv12_pad, _sink_nv12_event);
fail_unless (gst_pad_link (nv12pad, nv12_pad) == GST_PAD_LINK_OK);
gst_object_unref (nv12pad);
gst_pad_set_active (nv12_pad, TRUE);
}
if (link_jpg) {
jpgpad = gst_element_get_static_pad (demux, "jpeg");
fail_unless (jpgpad != NULL);
jpg_pad = gst_pad_new_from_static_template (&sink_template, "jpeg");
fail_unless (jpg_pad != NULL);
gst_pad_set_chain_function (jpg_pad, _sink_jpg_chain);
gst_pad_set_event_function (jpg_pad, _sink_jpg_event);
fail_unless (gst_pad_link (jpgpad, jpg_pad) == GST_PAD_LINK_OK);
gst_object_unref (jpgpad);
gst_pad_set_active (jpg_pad, TRUE);
}
gst_element_set_state (demux, GST_STATE_PLAYING);
}
static GstBuffer *
_buffer_from_file (const gchar * filename)
{
GstBuffer *buffer = gst_buffer_new ();
gchar *contents = NULL;
gsize length = 0;
fail_unless (g_file_get_contents (filename, &contents, &length, NULL));
GST_BUFFER_MALLOCDATA (buffer) = (guint8 *) contents;
GST_BUFFER_DATA (buffer) = (guint8 *) contents;
GST_BUFFER_SIZE (buffer) = length;
GST_BUFFER_OFFSET (buffer) = 0;
return buffer;
}
GST_START_TEST (test_valid_h264_jpg)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstCaps *h264_caps;
GstBuffer *buffer;
gchar *h264_data, *jpg_data;
gsize h264_size, jpg_size;
_setup_test (TRUE, TRUE, TRUE, TRUE);
h264_caps = gst_caps_new_simple ("video/x-h264",
"width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 15, 1, NULL);
buffer = _buffer_from_file (VALID_H264_JPG_MJPG_FILENAME);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (g_file_get_contents (VALID_H264_JPG_H264_FILENAME,
&h264_data, &h264_size, NULL));
fail_unless (g_file_get_contents (VALID_H264_JPG_JPG_FILENAME,
&jpg_data, &jpg_size, NULL));
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos);
fail_unless (have_yuy2_eos);
fail_unless (have_nv12_eos);
fail_unless (have_jpg_eos);
fail_unless (buffer_h264 != NULL);
fail_unless (buffer_jpg != NULL);
fail_unless (buffer_nv12 == NULL);
fail_unless (buffer_yuy2 == NULL);
fail_unless (gerror == NULL && error_debug == NULL);
fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_h264),
h264_caps));
fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_jpg),
mjpg_caps));
fail_unless (GST_BUFFER_SIZE (buffer_h264) == h264_size);
fail_unless (GST_BUFFER_SIZE (buffer_jpg) == jpg_size);
fail_unless (memcmp (GST_BUFFER_DATA (buffer_h264), h264_data,
h264_size) == 0);
fail_unless (memcmp (GST_BUFFER_DATA (buffer_jpg), jpg_data, jpg_size) == 0);
gst_caps_unref (mjpg_caps);
gst_caps_unref (h264_caps);
g_free (h264_data);
g_free (jpg_data);
gst_buffer_unref (buffer_h264);
gst_buffer_unref (buffer_jpg);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_valid_h264_yuy2)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstCaps *h264_caps;
GstCaps *yuy2_caps;
GstBuffer *buffer;
gchar *h264_data, *yuy2_data;
gsize h264_size, yuy2_size;
_setup_test (TRUE, TRUE, TRUE, TRUE);
h264_caps = gst_caps_new_simple ("video/x-h264",
"width", G_TYPE_INT, 640, "height", G_TYPE_INT, 480,
"framerate", GST_TYPE_FRACTION, 15, 1, NULL);
yuy2_caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'),
"width", G_TYPE_INT, 160, "height", G_TYPE_INT, 90,
"framerate", GST_TYPE_FRACTION, 15, 1, NULL);
buffer = _buffer_from_file (VALID_H264_YUY2_MJPG_FILENAME);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (g_file_get_contents (VALID_H264_YUY2_H264_FILENAME,
&h264_data, &h264_size, NULL));
fail_unless (g_file_get_contents (VALID_H264_YUY2_YUY2_FILENAME,
&yuy2_data, &yuy2_size, NULL));
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos);
fail_unless (have_yuy2_eos);
fail_unless (have_nv12_eos);
fail_unless (have_jpg_eos);
fail_unless (buffer_h264 != NULL);
fail_unless (buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL);
fail_unless (buffer_yuy2 != NULL);
fail_unless (gerror == NULL && error_debug == NULL);
fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_h264),
h264_caps));
fail_unless (gst_caps_is_always_compatible (GST_BUFFER_CAPS (buffer_yuy2),
yuy2_caps));
fail_unless (GST_BUFFER_SIZE (buffer_h264) == h264_size);
fail_unless (GST_BUFFER_SIZE (buffer_yuy2) == yuy2_size);
fail_unless (memcmp (GST_BUFFER_DATA (buffer_h264), h264_data,
h264_size) == 0);
fail_unless (memcmp (GST_BUFFER_DATA (buffer_yuy2), yuy2_data,
yuy2_size) == 0);
gst_caps_unref (mjpg_caps);
gst_caps_unref (yuy2_caps);
gst_caps_unref (h264_caps);
g_free (h264_data);
g_free (yuy2_data);
gst_buffer_unref (buffer_h264);
gst_buffer_unref (buffer_yuy2);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_no_data)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new ();
_setup_test (TRUE, TRUE, TRUE, TRUE);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg != NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror == NULL && error_debug == NULL);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_data_zero)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
_setup_test (TRUE, TRUE, TRUE, TRUE);
memset (GST_BUFFER_DATA (buffer), 0, 1024);
GST_BUFFER_SIZE (buffer) = 1024;
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_no_marker_size)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror != NULL);
fail_unless (gerror->domain == GST_STREAM_ERROR);
fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX);
fail_unless (memcmp (gerror->message,
"Not enough data to read marker size",
strlen (gerror->message)) == 0);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_not_enough_data)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00, 0xff, 0x00, 0x00
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror != NULL);
fail_unless (gerror->domain == GST_STREAM_ERROR);
fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX);
fail_unless (memcmp (gerror->message,
"Not enough data to read marker content",
strlen (gerror->message)) == 0);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_no_aux_header)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00, 0x02, 0x00, 0x00,
0xff, 0xd9
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror != NULL);
fail_unless (gerror->domain == GST_STREAM_ERROR);
fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX);
fail_unless (memcmp (gerror->message,
"Not enough data to read aux header", strlen (gerror->message)) == 0);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_empty_aux_data)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00, 0x1C, 0x00, 0x01,
0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07,
0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00,
0x40, 0x62, 0xcb, 0x0a, 0x00, 0x00, 0x00, 0x00,
0xff, 0xd9
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror == NULL);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_unknown_fcc)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00, 0x2C, 0x00, 0x01,
0x16, 0x00, 0x48, 0x30, 0x30, 0x30, 0x80, 0x07,
0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00,
0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xff, 0xd9
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror != NULL);
fail_unless (gerror->domain == GST_STREAM_ERROR);
fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX);
fail_unless (memcmp (gerror->message,
"Unknown auxiliary stream format : H000",
strlen (gerror->message)) == 0);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_not_enough_aux_data)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00, 0x1C, 0x00, 0x01,
0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07,
0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00,
0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00,
0xff, 0xd9
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror != NULL);
fail_unless (gerror->domain == GST_STREAM_ERROR);
fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX);
fail_unless (memcmp (gerror->message,
"Incomplete auxiliary stream. 16 bytes missing",
strlen (gerror->message)) == 0);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_too_much_aux_data)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00, 0x3C, 0x00, 0x01,
0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07,
0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00,
0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xff, 0xd9
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_ERROR);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 == NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror != NULL);
fail_unless (gerror->domain == GST_STREAM_ERROR);
fail_unless (gerror->code == GST_STREAM_ERROR_DEMUX);
fail_unless (memcmp (gerror->message,
"Expected 16 auxiliary data, got 32 bytes",
strlen (gerror->message)) == 0);
_teardown_test ();
}
GST_END_TEST;
GST_START_TEST (test_no_sos_marker)
{
GstCaps *mjpg_caps = gst_static_pad_template_get_caps (&mjpg_template);
GstBuffer *buffer = gst_buffer_new_and_alloc (1024);
const guchar data[] = {
0xff, 0xd8, 0xff, 0xe4, 0x00, 0x2C, 0x00, 0x01,
0x16, 0x00, 0x48, 0x32, 0x36, 0x34, 0x80, 0x07,
0x38, 0x04, 0x2a, 0x2c, 0x0a, 0x00, 0x1b, 0x00,
0x40, 0x62, 0xcb, 0x0a, 0x10, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xff, 0xd9
};
const guchar h264_data[] = {
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00
};
_setup_test (TRUE, TRUE, TRUE, TRUE);
memcpy (GST_BUFFER_DATA (buffer), data, sizeof (data));
GST_BUFFER_SIZE (buffer) = sizeof (data);
gst_buffer_set_caps (buffer, mjpg_caps);
fail_unless (gst_pad_push (mjpg_pad, buffer) == GST_FLOW_OK);
fail_unless (gst_pad_push_event (mjpg_pad, gst_event_new_eos ()));
fail_unless (have_h264_eos && have_yuy2_eos && have_nv12_eos && have_jpg_eos);
fail_unless (buffer_h264 != NULL && buffer_jpg == NULL);
fail_unless (buffer_nv12 == NULL && buffer_yuy2 == NULL);
fail_unless (gerror == NULL);
fail_unless (GST_BUFFER_SIZE (buffer_h264) == sizeof (h264_data));
fail_unless (memcmp (GST_BUFFER_DATA (buffer_h264), h264_data,
sizeof (h264_data)) == 0);
_teardown_test ();
}
GST_END_TEST;
static Suite *
uvch264demux_suite (void)
{
Suite *s = suite_create ("uvch264demux");
TCase *tc_chain = tcase_create ("general");
suite_add_tcase (s, tc_chain);
tcase_set_timeout (tc_chain, 180);
tcase_add_test (tc_chain, test_valid_h264_jpg);
tcase_add_test (tc_chain, test_valid_h264_yuy2);
tcase_add_test (tc_chain, test_no_data);
tcase_add_test (tc_chain, test_data_zero);
tcase_add_test (tc_chain, test_no_marker_size);
tcase_add_test (tc_chain, test_not_enough_data);
tcase_add_test (tc_chain, test_no_aux_header);
tcase_add_test (tc_chain, test_empty_aux_data);
tcase_add_test (tc_chain, test_unknown_fcc);
tcase_add_test (tc_chain, test_no_sos_marker);
tcase_add_test (tc_chain, test_not_enough_aux_data);
tcase_add_test (tc_chain, test_too_much_aux_data);
return s;
}
GST_CHECK_MAIN (uvch264demux);

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 34 KiB

File diff suppressed because one or more lines are too long

View file

@ -1,5 +1,11 @@
if USE_UVCH264
UVCH264_DIR=uvch264
else
UVCH264_DIR=
endif
if HAVE_GTK
GTK_EXAMPLES=mxf scaletempo camerabin2
GTK_EXAMPLES=mxf scaletempo camerabin2 $(UVCH264_DIR)
else
GTK_EXAMPLES=
endif
@ -13,6 +19,6 @@ endif
OPENCV_EXAMPLES=opencv
SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) $(OPENCV_EXAMPLES)
DIST_SUBDIRS= camerabin2 directfb mxf scaletempo opencv
DIST_SUBDIRS= camerabin2 directfb mxf scaletempo opencv uvch264
include $(top_srcdir)/common/parallel-subdirs.mak

View file

@ -0,0 +1,36 @@
TEST_UVCH264_GLADE_FILES = window.glade \
boolean_property.glade \
enum_property.glade \
int_property.glade
if HAVE_GTK
TEST_UVCH264_EXAMPLES = test-uvch264
test_uvch264_SOURCES = test-uvch264.c
test_uvch264_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_VIDEO_CFLAGS) \
$(GST_CFLAGS) \
$(GTK_CFLAGS) \
$(GMODULE_EXPORT_CFLAGS) \
-DGST_USE_UNSTABLE_API
test_uvch264_LDADD = \
$(GST_PLUGINS_BASE_LIBS) \
$(GST_VIDEO_LIBS) \
$(GST_LIBS) \
-lgstinterfaces-@GST_MAJORMINOR@ \
$(GTK_LIBS) \
$(GMODULE_EXPORT_LIBS)
noinst_DATA = $(TEST_UVCH264_GLADE_FILES)
else
TEST_UVCH264_EXAMPLES =
endif
noinst_PROGRAMS = $(TEST_UVCH264_EXAMPLES)
EXTRA_DIST = $(TEST_UVCH264_GLADE_FILES)

View file

@ -0,0 +1,94 @@
<?xml version="1.0" encoding="UTF-8"?>
<interface>
<!-- interface-requires gtk+ 3.0 -->
<object class="GtkHBox" id="boolean-property">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkLabel" id="label">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="width_chars">18</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkToggleButton" id="value">
<property name="label" translatable="yes"> Disabled </property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="toggled" handler="on_button_toggled" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkButton" id="get">
<property name="label" translatable="yes">Get</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_get_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="set">
<property name="label" translatable="yes">Set</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_set_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label66">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Default</property>
<property name="width_chars">8</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">4</property>
</packing>
</child>
<child>
<object class="GtkToggleButton" id="default">
<property name="label" translatable="yes"> Disabled </property>
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="toggled" handler="on_button_toggled" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">5</property>
</packing>
</child>
</object>
</interface>

View file

@ -0,0 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<interface>
<!-- interface-requires gtk+ 3.0 -->
<object class="GtkHBox" id="enum-property">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkLabel" id="label">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="width_chars">18</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkComboBoxText" id="value">
<property name="visible">True</property>
<property name="can_focus">False</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkButton" id="get">
<property name="label" translatable="yes">Get</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_get_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="set">
<property name="label" translatable="yes">Set</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_set_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label72">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Default</property>
<property name="width_chars">8</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">4</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="default">
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="invisible_char_set">True</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">5</property>
</packing>
</child>
</object>
</interface>

View file

@ -0,0 +1,88 @@
<?xml version="1.0" encoding="UTF-8"?>
<interface>
<!-- interface-requires gtk+ 3.0 -->
<object class="GtkHBox" id="enum-property">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkLabel" id="label">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="width_chars">18</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkComboBox" id="value">
<property name="visible">True</property>
<property name="can_focus">False</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkButton" id="get">
<property name="label" translatable="yes">Get</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_get_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="set">
<property name="label" translatable="yes">Set</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_set_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label72">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Default</property>
<property name="width_chars">8</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">4</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="default">
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="invisible_char_set">True</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">5</property>
</packing>
</child>
</object>
</interface>

View file

@ -0,0 +1,147 @@
<?xml version="1.0" encoding="UTF-8"?>
<interface>
<!-- interface-requires gtk+ 3.0 -->
<object class="GtkHBox" id="int-property">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkLabel" id="label">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="width_chars">18</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="value">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="width_chars">10</property>
<property name="invisible_char_set">True</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkButton" id="get">
<property name="label" translatable="yes">Get</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_get_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="set">
<property name="label" translatable="yes">Set</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_set_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Minimum</property>
<property name="width_chars">8</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">4</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="minimum">
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="width_chars">10</property>
<property name="invisible_char_set">True</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">5</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Default</property>
<property name="width_chars">8</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">6</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="default">
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="width_chars">10</property>
<property name="invisible_char_set">True</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">7</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label4">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Maximum</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">8</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="maximum">
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="width_chars">10</property>
<property name="invisible_char_set">True</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">9</property>
</packing>
</child>
</object>
</interface>

View file

@ -0,0 +1,673 @@
#include <gst/gst.h>
#include <gtk/gtk.h>
#include <gdk/gdkx.h>
#include <gst/interfaces/xoverlay.h>
#include <gst/video/video.h>
#define WINDOW_GLADE "window.glade"
#define INT_PROPERTY_GLADE "int_property.glade"
#define ENUM_PROPERTY_GLADE "enum_property.glade"
#define BOOL_PROPERTY_GLADE "boolean_property.glade"
#define PROPERTY_TO_VBOX \
properties[i].dynamic ? GTK_BOX (dynamic_vbox) : GTK_BOX (static_vbox)
#define GET_WIDGET(object, type, name) \
type (gtk_builder_get_object ((object)->builder, name))
#define GET_PROP_WIDGET(type, name) GET_WIDGET (&(properties[i]), type, name)
static guint h264_xid, preview_xid;
typedef struct
{
GtkBuilder *builder;
GstElement *src;
enum
{ NONE, INT, ENUM, BOOL } type;
const gchar *property_name;
gboolean readonly;
gboolean dynamic;
} Prop;
typedef struct
{
GtkBuilder *builder;
GstElement *bin;
GstElement *src;
GstElement *identity;
GstElement *vid_capsfilter;
GstElement *vf_capsfilter;
} Main;
Prop properties[] = {
{NULL, NULL, INT, "initial-bitrate", FALSE, FALSE},
{NULL, NULL, INT, "slice-units", FALSE, FALSE},
{NULL, NULL, ENUM, "slice-mode", FALSE, FALSE},
{NULL, NULL, INT, "iframe-period", FALSE, FALSE},
{NULL, NULL, ENUM, "usage-type", FALSE, FALSE},
{NULL, NULL, ENUM, "entropy", FALSE, FALSE},
{NULL, NULL, BOOL, "enable-sei", FALSE, FALSE},
{NULL, NULL, INT, "num-reorder-frames", FALSE, FALSE},
{NULL, NULL, BOOL, "preview-flipped", FALSE, FALSE},
{NULL, NULL, INT, "leaky-bucket-size", FALSE, FALSE},
{NULL, NULL, INT, "num-clock-samples", FALSE, TRUE},
{NULL, NULL, ENUM, "rate-control", FALSE, TRUE},
{NULL, NULL, BOOL, "fixed-framerate", FALSE, TRUE},
{NULL, NULL, INT, "max-mbps", TRUE, TRUE},
{NULL, NULL, INT, "level-idc", FALSE, TRUE},
{NULL, NULL, INT, "peak-bitrate", FALSE, TRUE},
{NULL, NULL, INT, "average-bitrate", FALSE, TRUE},
{NULL, NULL, INT, "min-iframe-qp", FALSE, TRUE},
{NULL, NULL, INT, "max-iframe-qp", FALSE, TRUE},
{NULL, NULL, INT, "min-pframe-qp", FALSE, TRUE},
{NULL, NULL, INT, "max-pframe-qp", FALSE, TRUE},
{NULL, NULL, INT, "min-bframe-qp", FALSE, TRUE},
{NULL, NULL, INT, "max-bframe-qp", FALSE, TRUE},
{NULL, NULL, INT, "ltr-buffer-size", FALSE, TRUE},
{NULL, NULL, INT, "ltr-encoder-control", FALSE, TRUE},
};
static void set_drop_probability (Main * self);
static void get_all_properties (void);
static void probe_all_properties (gboolean playing);
/* Callbacks */
void on_button_toggled (GtkToggleButton * button, gpointer user_data);
void on_get_button_clicked (GtkButton * button, gpointer user_data);
void on_set_button_clicked (GtkButton * button, gpointer user_data);
void on_button_ready_clicked (GtkButton * button, gpointer user_data);
void on_button_null_clicked (GtkButton * button, gpointer user_data);
void on_button_playing_clicked (GtkButton * button, gpointer user_data);
void on_iframe_button_clicked (GtkButton * button, gpointer user_data);
void on_renegotiate_button_clicked (GtkButton * button, gpointer user_data);
void on_start_capture_button_clicked (GtkButton * button, gpointer user_data);
void on_stop_capture_button_clicked (GtkButton * button, gpointer user_data);
void on_window_destroyed (GtkWindow * window, gpointer user_data);
static GstEvent *
new_upstream_force_key_unit (GstClockTime running_time,
gboolean all_headers, guint count)
{
GstEvent *force_key_unit_event;
GstStructure *s;
s = gst_structure_new ("GstForceKeyUnit",
"running-time", GST_TYPE_CLOCK_TIME, running_time,
"all-headers", G_TYPE_BOOLEAN, all_headers,
"count", G_TYPE_UINT, count, NULL);
force_key_unit_event = gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM, s);
return force_key_unit_event;
}
void
on_get_button_clicked (GtkButton * button, gpointer user_data)
{
Prop *property = user_data;
switch (property->type) {
case INT:
{
gchar *val;
gint val_int;
g_object_get (property->src, property->property_name, &val_int, NULL);
val = g_strdup_printf ("%d", val_int);
gtk_entry_set_text (GET_WIDGET (property, GTK_ENTRY, "value"), val);
g_free (val);
}
break;
case ENUM:
{
GParamSpec *param;
gint val;
g_object_get (property->src, property->property_name, &val, NULL);
param = g_object_class_find_property (G_OBJECT_GET_CLASS (property->src),
property->property_name);
if (G_IS_PARAM_SPEC_ENUM (param)) {
GEnumValue *values;
guint i = 0;
values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values;
while (values[i].value_name) {
if (values[i].value == val) {
gtk_combo_box_set_active (GET_WIDGET (property,
(GtkComboBox *), "value"), i);
break;
}
i++;
}
}
}
break;
case BOOL:
{
gboolean val;
g_object_get (property->src, property->property_name, &val, NULL);
gtk_toggle_button_set_active (GET_WIDGET (property,
(GtkToggleButton *), "value"), val);
}
break;
case NONE:
default:
break;
}
}
void
on_set_button_clicked (GtkButton * button, gpointer user_data)
{
Prop *property = user_data;
switch (property->type) {
case INT:
{
int val_int;
const gchar *val;
val = gtk_entry_get_text (GET_WIDGET (property, GTK_ENTRY, "value"));
val_int = (int) g_ascii_strtoll (val, NULL, 0);
g_object_set (property->src, property->property_name, val_int, NULL);
}
break;
case ENUM:
{
GParamSpec *param;
param = g_object_class_find_property (G_OBJECT_GET_CLASS (property->src),
property->property_name);
if (G_IS_PARAM_SPEC_ENUM (param)) {
GEnumValue *values;
guint val = 0;
values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values;
val = gtk_combo_box_get_active (GET_WIDGET (property,
(GtkComboBox *), "value"));
g_object_set (property->src, property->property_name,
values[val].value, NULL);
}
}
break;
case BOOL:
{
gboolean val;
val = gtk_toggle_button_get_active (GET_WIDGET (property,
(GtkToggleButton *), "value"));
g_object_set (property->src, property->property_name, val, NULL);
}
break;
case NONE:
default:
break;
}
get_all_properties ();
}
void
on_button_toggled (GtkToggleButton * button, gpointer user_data)
{
if (gtk_toggle_button_get_active (button))
gtk_button_set_label (GTK_BUTTON (button), " Enabled ");
else
gtk_button_set_label (GTK_BUTTON (button), " Disabled ");
}
static gboolean
set_caps (Main * self, gboolean send_event)
{
const gchar *h264_filter;
const gchar *raw_filter;
GstCaps *h264_caps = NULL;
GstCaps *raw_caps = NULL;
gboolean ret = TRUE;
h264_filter = gtk_entry_get_text (GET_WIDGET (self, GTK_ENTRY, "h264_caps"));
raw_filter =
gtk_entry_get_text (GET_WIDGET (self, GTK_ENTRY, "preview_caps"));
if (h264_filter)
h264_caps = gst_caps_from_string (h264_filter);
if (raw_filter)
raw_caps = gst_caps_from_string (raw_filter);
g_debug ("H264 caps : %s", gst_caps_to_string (h264_caps));
g_debug ("Preview caps : %s", gst_caps_to_string (raw_caps));
if (!h264_caps || !raw_caps) {
g_debug ("Invalid caps");
ret = FALSE;
goto end;
}
g_object_set (self->vid_capsfilter, "caps", h264_caps, NULL);
g_object_set (self->vf_capsfilter, "caps", raw_caps, NULL);
if (send_event) {
gst_element_send_event (GST_ELEMENT (self->src),
gst_event_new_custom (GST_EVENT_CUSTOM_UPSTREAM,
gst_structure_new ("renegotiate", NULL)));
}
end:
if (h264_caps)
gst_caps_unref (h264_caps);
if (raw_caps)
gst_caps_unref (raw_caps);
return ret;
}
void
on_button_ready_clicked (GtkButton * button, gpointer user_data)
{
Main *self = user_data;
set_caps (self, FALSE);
gst_element_set_state (self->bin, GST_STATE_READY);
probe_all_properties (FALSE);
get_all_properties ();
}
void
on_button_null_clicked (GtkButton * button, gpointer user_data)
{
Main *self = user_data;
gst_element_set_state (self->bin, GST_STATE_NULL);
probe_all_properties (FALSE);
get_all_properties ();
}
void
on_button_playing_clicked (GtkButton * button, gpointer user_data)
{
Main *self = user_data;
if (gst_element_set_state (self->bin, GST_STATE_PLAYING) ==
GST_STATE_CHANGE_FAILURE) {
g_debug ("Unable to go to state PLAYING");
}
set_caps (self, FALSE);
probe_all_properties (TRUE);
get_all_properties ();
set_drop_probability (self);
}
void
on_iframe_button_clicked (GtkButton * button, gpointer user_data)
{
Main *self = user_data;
GstEvent *event;
gboolean pps_sps;
set_drop_probability (self);
pps_sps = gtk_toggle_button_get_active (GET_WIDGET (self, (GtkToggleButton *),
"pps_sps"));
event = new_upstream_force_key_unit (GST_CLOCK_TIME_NONE, pps_sps, 0);
gst_element_send_event (GST_ELEMENT (self->src), event);
}
void
on_renegotiate_button_clicked (GtkButton * button, gpointer user_data)
{
Main *self = user_data;
set_caps (self, TRUE);
probe_all_properties (GST_STATE (self->bin) >= GST_STATE_PAUSED);
get_all_properties ();
}
void
on_start_capture_button_clicked (GtkButton * button, gpointer user_data)
{
Main *self = user_data;
set_caps (self, FALSE);
g_signal_emit_by_name (G_OBJECT (self->src), "start-capture", NULL);
probe_all_properties (GST_STATE (self->bin) >= GST_STATE_PAUSED);
get_all_properties ();
}
void
on_stop_capture_button_clicked (GtkButton * button, gpointer user_data)
{
Main *self = user_data;
set_caps (self, FALSE);
g_signal_emit_by_name (G_OBJECT (self->src), "stop-capture", NULL);
probe_all_properties (GST_STATE (self->bin) >= GST_STATE_PAUSED);
get_all_properties ();
}
void
on_window_destroyed (GtkWindow * window, gpointer user_data)
{
gtk_main_quit ();
}
static gboolean
_bus_callback (GstBus * bus, GstMessage * message, gpointer user_data)
{
const GstStructure *s = gst_message_get_structure (message);
GstObject *source = NULL;
if (GST_MESSAGE_TYPE (message) == GST_MESSAGE_ELEMENT &&
gst_structure_has_name (s, "prepare-xwindow-id")) {
source = GST_MESSAGE_SRC (message);
if (!g_strcmp0 (gst_object_get_name (source), "h264_sink"))
gst_x_overlay_set_window_handle (GST_X_OVERLAY (source), h264_xid);
else
gst_x_overlay_set_window_handle (GST_X_OVERLAY (source), preview_xid);
}
return TRUE;
}
static void
set_drop_probability (Main * self)
{
const gchar *drop;
gdouble drop_probability = 0.0;
drop = gtk_entry_get_text (GET_WIDGET (self, GTK_ENTRY, "drop"));
drop_probability = g_ascii_strtod (drop, NULL);
g_debug ("Setting drop probability to : %f", drop_probability);
g_object_set (self->identity, "drop-probability", drop_probability, NULL);
}
static void
get_all_properties (void)
{
int i;
for (i = 0; i < G_N_ELEMENTS (properties); i++)
on_get_button_clicked (NULL, &properties[i]);
}
static void
probe_all_properties (gboolean playing)
{
int i;
for (i = 0; i < G_N_ELEMENTS (properties); i++) {
gboolean return_value, changeable, default_bool;
guint mask, minimum, maximum, default_int;
GParamSpec *param;
/* When playing, ignore static controls */
if (playing && !properties[i].dynamic)
continue;
switch (properties[i].type) {
case INT:
g_signal_emit_by_name (G_OBJECT (properties[i].src), "get-int-setting",
properties[i].property_name, &minimum, &default_int, &maximum,
&return_value, NULL);
if (return_value) {
gchar *min, *def, *max;
min = g_strdup_printf ("%d", minimum);
def = g_strdup_printf ("%d", default_int);
max = g_strdup_printf ("%d", maximum);
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "minimum"), min);
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"), def);
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "maximum"), max);
g_free (min);
g_free (def);
g_free (max);
} else {
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "minimum"), "");
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"), "");
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "maximum"), "");
}
break;
case ENUM:
g_signal_emit_by_name (G_OBJECT (properties[i].src), "get-enum-setting",
properties[i].property_name, &mask, &default_int, &return_value,
NULL);
param =
g_object_class_find_property (G_OBJECT_GET_CLASS (properties
[i].src), properties[i].property_name);
if (G_IS_PARAM_SPEC_ENUM (param)) {
GEnumValue *values;
guint j = 0;
values = G_ENUM_CLASS (g_type_class_ref (param->value_type))->values;
if (return_value) {
while (values[j].value_name) {
if (values[j].value == default_int) {
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"),
values[j].value_name);
break;
}
j++;
}
} else {
gtk_entry_set_text (GET_PROP_WIDGET (GTK_ENTRY, "default"), "");
}
j = 0;
while (values[j].value_name) {
#if !GTK_CHECK_VERSION (2, 24, 0)
gtk_combo_box_remove_text (GET_PROP_WIDGET ((GtkComboBox *),
"value"), 0);
#else
gtk_combo_box_text_remove (GET_PROP_WIDGET ((GtkComboBoxText *),
"value"), 0);
#endif
j++;
}
j = 0;
while (values[j].value_name) {
gchar *val;
if (return_value && (mask & (1 << values[j].value)) != 0)
val = g_strdup_printf ("**%s**", values[j].value_name);
else
val = g_strdup (values[j].value_name);
#if !GTK_CHECK_VERSION (2, 24, 0)
gtk_combo_box_append_text (GET_PROP_WIDGET ((GtkComboBox *),
"value"), val);
#else
gtk_combo_box_text_append_text (GET_PROP_WIDGET ((GtkComboBoxText
*), "value"), val);
#endif
g_free (val);
j++;
}
}
break;
case BOOL:
g_signal_emit_by_name (G_OBJECT (properties[i].src),
"get-boolean-setting", properties[i].property_name,
&changeable, &default_bool, &return_value, NULL);
if (return_value) {
gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "value"),
changeable);
gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "get"),
changeable);
gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"),
changeable);
gtk_toggle_button_set_active (GET_PROP_WIDGET ((GtkToggleButton *),
"default"), default_bool);
}
break;
case NONE:
default:
break;
}
}
}
int
main (int argc, char *argv[])
{
Main self = { NULL, NULL, NULL, NULL };
GstBus *bus = NULL;
GtkWidget *window, *static_vbox, *dynamic_vbox, *da;
gchar *drop;
gdouble drop_probability;
GdkWindow *gdk_win = NULL;
const char *device = "/dev/video0";
GError *error = NULL;
int i;
gtk_init (&argc, &argv);
gst_init (&argc, &argv);
if (argc > 1)
device = argv[1];
else
g_print ("Usage : %s [device]\nUsing default device : %s\n",
argv[0], device);
self.bin = gst_parse_launch ("uvch264_src name=src src.vidsrc ! queue ! "
"capsfilter name=vid_cf ! identity name=identity ! ffdec_h264 ! "
"xvimagesink name=h264_sink async=false "
"src.vfsrc ! queue ! capsfilter name=vf_cf ! "
"xvimagesink name=preview_sink async=false", NULL);
if (!self.bin)
return -1;
/* Listen to the bus for messages */
bus = gst_element_get_bus (self.bin);
gst_bus_add_watch (bus, _bus_callback, self.bin);
gst_object_unref (bus);
self.src = gst_bin_get_by_name (GST_BIN (self.bin), "src");
self.identity = gst_bin_get_by_name (GST_BIN (self.bin), "identity");
self.vid_capsfilter = gst_bin_get_by_name (GST_BIN (self.bin), "vid_cf");
self.vf_capsfilter = gst_bin_get_by_name (GST_BIN (self.bin), "vf_cf");
self.builder = gtk_builder_new ();
gtk_builder_add_from_file (self.builder, WINDOW_GLADE, &error);
if (error) {
g_debug ("Unable to load glade file : %s", error->message);
goto end;
}
gtk_builder_connect_signals (self.builder, &self);
g_object_get (self.identity, "drop-probability", &drop_probability, NULL);
drop = g_strdup_printf ("%f", drop_probability);
gtk_entry_set_text (GET_WIDGET (&self, GTK_ENTRY, "drop"), drop);
g_free (drop);
window = GET_WIDGET (&self, GTK_WIDGET, "window");
static_vbox = GET_WIDGET (&self, GTK_WIDGET, "static");
dynamic_vbox = GET_WIDGET (&self, GTK_WIDGET, "dynamic");
da = GET_WIDGET (&self, GTK_WIDGET, "h264");
gtk_widget_realize (da);
gdk_win = gtk_widget_get_window (da);
h264_xid = GDK_WINDOW_XID (gdk_win);
da = GET_WIDGET (&self, GTK_WIDGET, "preview");
gtk_widget_realize (da);
gdk_win = gtk_widget_get_window (da);
preview_xid = GDK_WINDOW_XID (gdk_win);
set_caps (&self, FALSE);
g_object_set (self.src, "device", device, NULL);
if (gst_element_set_state (self.bin, GST_STATE_READY) ==
GST_STATE_CHANGE_FAILURE) {
g_debug ("Unable to go to state READY");
goto end;
}
for (i = 0; i < G_N_ELEMENTS (properties); i++) {
switch (properties[i].type) {
case INT:
properties[i].src = self.src;
properties[i].builder = gtk_builder_new ();
gtk_builder_add_from_file (properties[i].builder, INT_PROPERTY_GLADE,
NULL);
gtk_builder_connect_signals (properties[i].builder, &properties[i]);
gtk_box_pack_start (PROPERTY_TO_VBOX,
GET_PROP_WIDGET (GTK_WIDGET, "int-property"), TRUE, TRUE, 2);
gtk_label_set_label (GET_PROP_WIDGET (GTK_LABEL, "label"),
properties[i].property_name);
if (properties[i].readonly)
gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"), FALSE);
break;
case ENUM:
properties[i].src = self.src;
properties[i].builder = gtk_builder_new ();
#if !GTK_CHECK_VERSION (2, 24, 0)
gtk_builder_add_from_file (properties[i].builder,
"enum_property_gtk2.glade", NULL);
#else
gtk_builder_add_from_file (properties[i].builder, ENUM_PROPERTY_GLADE,
NULL);
#endif
gtk_builder_connect_signals (properties[i].builder, &properties[i]);
gtk_box_pack_start (PROPERTY_TO_VBOX,
GET_PROP_WIDGET (GTK_WIDGET, "enum-property"), TRUE, TRUE, 2);
gtk_label_set_label (GET_PROP_WIDGET (GTK_LABEL, "label"),
properties[i].property_name);
#if !GTK_CHECK_VERSION (2, 24, 0)
{
GtkComboBox *combo_box;
GtkCellRenderer *cell;
GtkListStore *store;
combo_box = GET_PROP_WIDGET ((GtkComboBox *), "value");
store = gtk_list_store_new (1, G_TYPE_STRING);
gtk_combo_box_set_model (combo_box, GTK_TREE_MODEL (store));
g_object_unref (store);
cell = gtk_cell_renderer_text_new ();
gtk_cell_layout_pack_start (GTK_CELL_LAYOUT (combo_box), cell, TRUE);
gtk_cell_layout_set_attributes (GTK_CELL_LAYOUT (combo_box), cell,
"text", 0, NULL);
}
#endif
if (properties[i].readonly)
gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"), FALSE);
break;
case BOOL:
properties[i].src = self.src;
properties[i].builder = gtk_builder_new ();
gtk_builder_add_from_file (properties[i].builder, BOOL_PROPERTY_GLADE,
NULL);
gtk_builder_connect_signals (properties[i].builder, &properties[i]);
gtk_box_pack_start (PROPERTY_TO_VBOX,
GET_PROP_WIDGET (GTK_WIDGET, "boolean-property"), TRUE, TRUE, 2);
gtk_label_set_label (GET_PROP_WIDGET (GTK_LABEL, "label"),
properties[i].property_name);
if (properties[i].readonly)
gtk_widget_set_sensitive (GET_PROP_WIDGET (GTK_WIDGET, "set"), FALSE);
break;
case NONE:
default:
break;
}
}
probe_all_properties (FALSE);
get_all_properties ();
gtk_widget_show (window);
gtk_main ();
end:
g_object_unref (G_OBJECT (self.builder));
for (i = 0; i < G_N_ELEMENTS (properties); i++) {
if (properties[i].builder)
g_object_unref (G_OBJECT (properties[i].builder));
}
gst_element_set_state (self.bin, GST_STATE_NULL);
gst_object_unref (self.src);
gst_object_unref (self.identity);
gst_object_unref (self.vid_capsfilter);
gst_object_unref (self.vf_capsfilter);
gst_object_unref (self.bin);
return 0;
}

View file

@ -0,0 +1,345 @@
<?xml version="1.0" encoding="UTF-8"?>
<interface>
<!-- interface-requires gtk+ 3.0 -->
<object class="GtkWindow" id="window">
<property name="can_focus">False</property>
<property name="title" translatable="yes">Test for uvch264_src</property>
<signal name="destroy" handler="on_window_destroyed" swapped="no"/>
<child>
<object class="GtkHBox" id="hbox1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkVBox" id="vbox">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="orientation">vertical</property>
<child>
<object class="GtkHBox" id="box26">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="homogeneous">True</property>
<child>
<object class="GtkButton" id="button2">
<property name="label" translatable="yes">State NULL</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_button_null_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkButton" id="button51">
<property name="label" translatable="yes">State READY</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_button_ready_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkButton" id="button52">
<property name="label" translatable="yes">State PLAYING</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_button_playing_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="button3">
<property name="label" translatable="yes">Start capture</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_start_capture_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<object class="GtkButton" id="button4">
<property name="label" translatable="yes">Stop capture</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_stop_capture_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">4</property>
</packing>
</child>
<child>
<object class="GtkButton" id="button5">
<property name="label" translatable="yes">Renegotiate</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_renegotiate_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">5</property>
</packing>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label3">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Static controls</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkVBox" id="static">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="orientation">vertical</property>
<child>
<placeholder/>
</child>
</object>
<packing>
<property name="expand">True</property>
<property name="fill">True</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label4">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Dynamic controls</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<object class="GtkVBox" id="dynamic">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="orientation">vertical</property>
<child>
<placeholder/>
</child>
</object>
<packing>
<property name="expand">True</property>
<property name="fill">True</property>
<property name="position">4</property>
</packing>
</child>
<child>
<object class="GtkHBox" id="box1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
<object class="GtkLabel" id="label5">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Drop probability % (between 0.0 and 1.0)</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="drop">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkCheckButton" id="pps_sps">
<property name="label" translatable="yes">With SPS/PPS</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">False</property>
<property name="use_action_appearance">False</property>
<property name="xalign">0</property>
<property name="draw_indicator">True</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkButton" id="button1">
<property name="label" translatable="yes">Request keyframe</property>
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="receives_default">True</property>
<property name="use_action_appearance">False</property>
<signal name="clicked" handler="on_iframe_button_clicked" swapped="no"/>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">5</property>
</packing>
</child>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkVBox" id="box2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="orientation">vertical</property>
<child>
<object class="GtkLabel" id="label1">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">H264</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">0</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="h264_caps">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="text" translatable="yes">video/x-h264,width=640,height=480,profile=constrained-baseline,stream-format=bytestream,framerate=15/1</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
<child>
<object class="GtkDrawingArea" id="h264">
<property name="width_request">320</property>
<property name="height_request">240</property>
<property name="visible">True</property>
<property name="app_paintable">True</property>
<property name="can_focus">False</property>
<property name="double_buffered">False</property>
<property name="halign">center</property>
<property name="valign">center</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">False</property>
<property name="position">2</property>
</packing>
</child>
<child>
<object class="GtkLabel" id="label2">
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="label" translatable="yes">Preview</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">3</property>
</packing>
</child>
<child>
<object class="GtkEntry" id="preview_caps">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="invisible_char">●</property>
<property name="text" translatable="yes">video/x-raw-yuv,width=320,height=240,format=(fourcc)YUY2,framerate=15/1</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">True</property>
<property name="position">4</property>
</packing>
</child>
<child>
<object class="GtkDrawingArea" id="preview">
<property name="width_request">320</property>
<property name="height_request">240</property>
<property name="visible">True</property>
<property name="app_paintable">True</property>
<property name="can_focus">False</property>
<property name="double_buffered">False</property>
<property name="halign">center</property>
<property name="valign">center</property>
</object>
<packing>
<property name="expand">False</property>
<property name="fill">False</property>
<property name="position">5</property>
</packing>
</child>
</object>
<packing>
<property name="expand">True</property>
<property name="fill">True</property>
<property name="position">1</property>
</packing>
</child>
</object>
</child>
</object>
</interface>