Remove v4l plugin

The old v4l interface has been deprecated for years and even
been removed from the kernel headers. If anyone still needs
this plugin, they can resurrect it in gst-plugins-bad, there's
no reason for it to be in -base.
This commit is contained in:
Tim-Philipp Müller 2011-06-07 20:43:24 +01:00
parent 2a94b0eb04
commit c092981b16
42 changed files with 7 additions and 7679 deletions

View file

@ -79,6 +79,9 @@ CRUFT_FILES = \
$(top_builddir)/gst-libs/gst/audio/testchannels \ $(top_builddir)/gst-libs/gst/audio/testchannels \
$(top_builddir)/tools/gst-discoverer $(top_builddir)/tools/gst-discoverer
CRUFT_DIRS = \
$(top_srcdir)/sys/v4l \
$(top_srcdir)/tests/examples/v4l
include $(top_srcdir)/common/cruft.mak include $(top_srcdir)/common/cruft.mak

View file

@ -568,51 +568,6 @@ AG_GST_CHECK_FEATURE(XSHM, [X Shared Memory extension], , [
AC_SUBST(XSHM_LIBS) AC_SUBST(XSHM_LIBS)
]) ])
dnl v4l/v4l2 checks have been moved down because they require X
dnl *** Video 4 Linux ***
dnl for information about the header/define, see sys/v4l/gstv4lelement.h
dnl renamed to GST_V4L in accordance with V4L2 below
translit(dnm, m, l) AM_CONDITIONAL(USE_GST_V4L, true)
AG_GST_CHECK_FEATURE(GST_V4L, [Video 4 Linux], video4linux, [
AC_CHECK_DECL(VID_TYPE_MPEG_ENCODER, HAVE_GST_V4L="yes", HAVE_GST_V4L="no", [
#include <sys/types.h>
#define _LINUX_TIME_H
#define __user
#include <linux/videodev.h>
])
dnl we can build v4l without Xv, but then we won't have XOverlay support
if test "x$HAVE_GST_V4L" = "xyes" -a "x$HAVE_XVIDEO" != "xyes"
then
AC_MSG_NOTICE([NO XVIDEO FOUND, VIDEO4LINUX WILL BE BUILT])
AC_MSG_NOTICE([WITHOUT XOVERLAY SUPPORT])
fi
])
# Optional gudev for device probing
AC_ARG_WITH([gudev],
AC_HELP_STRING([--with-gudev],
[device detection with gudev]),
[],
[with_gudev=check])
if test x$HAVE_GST_V4L = xyes; then
if test x$with_gudev != xno; then
PKG_CHECK_MODULES(GUDEV, [ gudev-1.0 >= 143 ],
[ have_gudev=yes
AC_DEFINE(HAVE_GUDEV, 1,
[Whether gudev is available for device detection])
AC_DEFINE([G_UDEV_API_IS_SUBJECT_TO_CHANGE], 1, [I know the API is subject to change.])
], [
have_gudev=no
])
else
have_gudev=no
fi
fi
AC_SUBST(GUDEV_CFLAGS)
AC_SUBST(GUDEV_LIBS)
dnl *** ext plug-ins *** dnl *** ext plug-ins ***
dnl keep this list sorted alphabetically ! dnl keep this list sorted alphabetically !
@ -938,7 +893,6 @@ gst/volume/Makefile
sys/Makefile sys/Makefile
sys/ximage/Makefile sys/ximage/Makefile
sys/xvimage/Makefile sys/xvimage/Makefile
sys/v4l/Makefile
ext/Makefile ext/Makefile
ext/alsa/Makefile ext/alsa/Makefile
ext/cdparanoia/Makefile ext/cdparanoia/Makefile
@ -1011,7 +965,6 @@ tests/examples/seek/Makefile
tests/examples/snapshot/Makefile tests/examples/snapshot/Makefile
tests/examples/playrec/Makefile tests/examples/playrec/Makefile
tests/examples/volume/Makefile tests/examples/volume/Makefile
tests/examples/v4l/Makefile
tests/files/Makefile tests/files/Makefile
tests/icles/Makefile tests/icles/Makefile
tests/icles/playback/Makefile tests/icles/playback/Makefile

View file

@ -95,7 +95,6 @@
<xi:include href="xml/plugin-theora.xml" /> <xi:include href="xml/plugin-theora.xml" />
<xi:include href="xml/plugin-typefindfunctions.xml" /> <xi:include href="xml/plugin-typefindfunctions.xml" />
<xi:include href="xml/plugin-uridecodebin.xml" /> <xi:include href="xml/plugin-uridecodebin.xml" />
<xi:include href="xml/plugin-video4linux.xml" />
<xi:include href="xml/plugin-videorate.xml" /> <xi:include href="xml/plugin-videorate.xml" />
<xi:include href="xml/plugin-videoscale.xml" /> <xi:include href="xml/plugin-videoscale.xml" />
<xi:include href="xml/plugin-videotestsrc.xml" /> <xi:include href="xml/plugin-videotestsrc.xml" />

View file

@ -1,28 +0,0 @@
<plugin>
<name>video4linux</name>
<description>elements for Video 4 Linux</description>
<filename>../../sys/v4l/.libs/libgstvideo4linux.so</filename>
<basename>libgstvideo4linux.so</basename>
<version>0.10.32.1</version>
<license>LGPL</license>
<source>gst-plugins-base</source>
<package>GStreamer Base Plug-ins git</package>
<origin>Unknown package origin</origin>
<elements>
<element>
<name>v4lsrc</name>
<longname>Video (video4linux/raw) Source</longname>
<class>Source/Video</class>
<description>Reads raw frames from a video4linux device</description>
<author>GStreamer maintainers &lt;gstreamer-devel@lists.sourceforge.net&gt;</author>
<pads>
<caps>
<name>src</name>
<direction>source</direction>
<presence>always</presence>
<details>ANY</details>
</caps>
</pads>
</element>
</elements>
</plugin>

View file

@ -114,7 +114,6 @@ rm -rf $RPM_BUILD_ROOT
%{_libdir}/gstreamer-%{majorminor}/libgstvideorate.so %{_libdir}/gstreamer-%{majorminor}/libgstvideorate.so
%{_libdir}/gstreamer-%{majorminor}/libgstvideoscale.so %{_libdir}/gstreamer-%{majorminor}/libgstvideoscale.so
%{_libdir}/gstreamer-%{majorminor}/libgsttcp.so %{_libdir}/gstreamer-%{majorminor}/libgsttcp.so
%{_libdir}/gstreamer-%{majorminor}/libgstvideo4linux.so
%{_libdir}/gstreamer-%{majorminor}/libgstaudioresample.so %{_libdir}/gstreamer-%{majorminor}/libgstaudioresample.so
%{_libdir}/gstreamer-%{majorminor}/libgstaudiotestsrc.so %{_libdir}/gstreamer-%{majorminor}/libgstaudiotestsrc.so
%{_libdir}/gstreamer-%{majorminor}/libgstgdp.so %{_libdir}/gstreamer-%{majorminor}/libgstgdp.so

View file

@ -59,7 +59,7 @@
* ]| Decode an Ogg/Theora file and adjust the framerate to 15 fps before playing. * ]| Decode an Ogg/Theora file and adjust the framerate to 15 fps before playing.
* To create the test Ogg/Theora file refer to the documentation of theoraenc. * To create the test Ogg/Theora file refer to the documentation of theoraenc.
* |[ * |[
* gst-launch -v v4lsrc ! videorate ! video/x-raw-yuv,framerate=25/2 ! theoraenc ! oggmux ! filesink location=v4l.ogg * gst-launch -v v4l2src ! videorate ! video/x-raw-yuv,framerate=25/2 ! theoraenc ! oggmux ! filesink location=recording.ogg
* ]| Capture video from a V4L device, and adjust the stream to 12.5 fps before * ]| Capture video from a V4L device, and adjust the stream to 12.5 fps before
* encoding to Ogg/Theora. * encoding to Ogg/Theora.
* </refsect2> * </refsect2>

View file

@ -20,4 +20,3 @@ gst-libs/gst/tag/tags.c
gst-libs/gst/cdda/gstcddabasesrc.c gst-libs/gst/cdda/gstcddabasesrc.c
gst-libs/gst/pbutils/descriptions.c gst-libs/gst/pbutils/descriptions.c
gst-libs/gst/pbutils/missing-plugins.c gst-libs/gst/pbutils/missing-plugins.c
sys/v4l/v4l_calls.c

View file

@ -1,9 +1,3 @@
if USE_GST_V4L
V4L_DIR=v4l
else
V4L_DIR=
endif
if USE_X if USE_X
XIMAGE_DIR=ximage XIMAGE_DIR=ximage
else else
@ -18,11 +12,9 @@ endif
SUBDIRS = \ SUBDIRS = \
$(XIMAGE_DIR) \ $(XIMAGE_DIR) \
$(XVIMAGE_DIR) \ $(XVIMAGE_DIR)
$(V4L_DIR)
DIST_SUBDIRS = \ DIST_SUBDIRS = \
v4l \
ximage \ ximage \
xvimage xvimage

2
sys/v4l/.gitignore vendored
View file

@ -1,2 +0,0 @@
gstv4lelement-marshal.h
gstv4lelement-marshal.c

View file

@ -1,41 +0,0 @@
plugin_LTLIBRARIES = libgstvideo4linux.la
if USE_XVIDEO
xv_source = gstv4lxoverlay.c
xv_libs = $(X_LIBS) $(XVIDEO_LIBS)
else
xv_source =
xv_libs =
endif
libgstvideo4linux_la_SOURCES = \
gstv4l.c \
gstv4lcolorbalance.c \
gstv4lelement.c \
gstv4lsrc.c \
gstv4ltuner.c \
v4l_calls.c \
v4lsrc_calls.c $(xv_source)
# gstv4ljpegsrc.c \
# gstv4lmjpegsrc.c v4lmjpegsrc_calls.c \
# gstv4lmjpegsink.c v4lmjpegsink_calls.c
libgstvideo4linux_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) $(GST_BASE_CFLAGS) $(GST_CFLAGS) $(X_CFLAGS) \
$(GUDEV_CFLAGS)
libgstvideo4linux_la_LIBADD = \
$(top_builddir)/gst-libs/gst/interfaces/libgstinterfaces-$(GST_MAJORMINOR).la \
$(GST_BASE_LIBS) $(GST_LIBS) $(xv_libs) \
$(GUDEV_LIBS)
libgstvideo4linux_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstvideo4linux_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstv4lelement.h v4l_calls.h \
gstv4lsrc.h v4lsrc_calls.h \
gstv4ljpegsrc.h \
gstv4lmjpegsrc.h v4lmjpegsrc_calls.h \
gstv4lmjpegsink.h v4lmjpegsink_calls.h \
videodev_mjpeg.h \
gstv4ltuner.h gstv4lxoverlay.h \
gstv4lcolorbalance.h

View file

@ -1,35 +0,0 @@
General Idea:
=============
_____/ gstv4lsrc.[ch]
_____/ \ v4lsrc_calls.[ch]
/
gstv4lelement.[ch] _/____________/ gstv4lmjpegsrc.[ch]
v4l_calls.[ch] \ \ v4lmjpegsrc_calls.[ch]
\_____
\_____/ gstv4lmjpegsink.[ch]
\ v4lmjpegsink_calls.[ch]
I.e., all the files on the right are child classes of
the v4lelement 'parent' on the left.
* v4lelement handles generic v4l stuff (picture settings,
audio, norm/input setting, open()/close())
* v4lsrc, v4lmjpegsrc handle the capture specific
functions. Maybe we'd need a v4lmpegsrc too
* v4lmjpegsink handles mjpeg hardware playback of video
Useful Documentation:
=====================
MJPEG/V4L API : ./videodev_mjpeg.h
V4L API : /usr/include/linux/videodev.h or
/usr/src/linux/Documentation/video4linux/API.html or
http://linux.bytesex.org/v4l2/API.html
V4L2 API : /usr/include/linux/videodev2.h or
http://v4l2spec.bytesex.org/
BSD/Meteor API: /usr/include/machine/ioctl_meteor.h
mjpegtools : http://www.sourceforge.net/projects/mjpeg

View file

@ -1,44 +0,0 @@
TODO list (short term):
=======================
* v4lsrc/v4lmjpegsrc/v4l2src: fix interlacing (not handled at all...)
TODO list (long term):
======================
* v4lmpegsrc (*hint* MPEG card needed *hint*)
* v4l2sink
* BSD-videosrc (bktr)
Useful Documentation:
=====================
MJPEG/V4L API : ./videodev_mjpeg.h
V4L API : /usr/include/linux/videodev.h or
/usr/src/linux/Documentation/video4linux/API.html or
http://linux.bytesex.org/v4l2/API.html
V4L2 API : /usr/include/linux/videodev2.h or
http://v4l2spec.bytesex.org/
BSD/Meteor API: /usr/include/machine/ioctl_meteor.h
mjpegtools : http://www.sourceforge.net/projects/mjpeg
Capturing:
==========
* sound is the master clock
* it's probably a good idea to create an audiosource element:
- autodetect alsa/oss
- first try alsa, then oss... they work the same internally
* same for videosource:
- autodetect v4l/v4l2 + mjpeg capabilities
- for this, just open device using v4l2element. On success:
+ use v4l2
- on failure:
+ use v4lelement and query for MJPEG capabilities
+ if that's available, combine caps of v4lmjpegsrc and v4lsrc
* both sources run in their own GstThread with a high priority
* an encoder element takes care of encoding + muxing. A toplevel element
(reverse of spider) is probably a good idea here. How? Don't know...
* format negotiation via filtered caps
* statistics via listening to the frame_{lost,inserted,deleted,captures}
signals and GST_PAD_QUERY_POSITION (gst_pad_query())

View file

@ -1,67 +0,0 @@
/* GStreamer
*
* gstv4l.c: plugin for v4l elements
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gst/gst-i18n-plugin.h"
#include <gst/gst.h>
#include "gstv4lelement.h"
#include "gstv4lsrc.h"
/* #include "gstv4ljpegsrc.h" */
/* #include "gstv4lmjpegsrc.h" */
/* #include "gstv4lmjpegsink.h" */
GST_DEBUG_CATEGORY (v4l_debug); /* used in v4l_calls.c and v4lsrc_calls.c */
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (v4l_debug, "v4l", 0, "V4L API calls");
if (!gst_element_register (plugin, "v4lsrc", GST_RANK_MARGINAL,
GST_TYPE_V4LSRC))
/* !gst_element_register (plugin, "v4ljpegsrc", */
/* GST_RANK_NONE, GST_TYPE_V4LJPEGSRC) || */
/* !gst_element_register (plugin, "v4lmjpegsrc", */
/* GST_RANK_NONE, GST_TYPE_V4LMJPEGSRC) || */
/* !gst_element_register (plugin, "v4lmjpegsink", */
/* GST_RANK_NONE, GST_TYPE_V4LMJPEGSINK)) */
return FALSE;
#ifdef ENABLE_NLS
setlocale (LC_ALL, "");
bindtextdomain (GETTEXT_PACKAGE, LOCALEDIR);
bind_textdomain_codeset (GETTEXT_PACKAGE, "UTF-8");
#endif /* ENABLE_NLS */
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"video4linux",
"elements for Video 4 Linux",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -1,150 +0,0 @@
/* GStreamer
*
* gstv4lcolorbalance.c: color balance interface implementation for V4L
*
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include "gstv4lcolorbalance.h"
#include "gstv4lelement.h"
static void
gst_v4l_color_balance_channel_class_init (GstV4lColorBalanceChannelClass *
klass);
static void gst_v4l_color_balance_channel_init (GstV4lColorBalanceChannel *
channel);
static const GList *gst_v4l_color_balance_list_channels (GstColorBalance *
balance);
static void gst_v4l_color_balance_set_value (GstColorBalance * balance,
GstColorBalanceChannel * channel, gint value);
static gint gst_v4l_color_balance_get_value (GstColorBalance * balance,
GstColorBalanceChannel * channel);
static GstColorBalanceChannelClass *parent_class = NULL;
GType
gst_v4l_color_balance_channel_get_type (void)
{
static GType gst_v4l_color_balance_channel_type = 0;
if (!gst_v4l_color_balance_channel_type) {
static const GTypeInfo v4l_tuner_channel_info = {
sizeof (GstV4lColorBalanceChannelClass),
NULL,
NULL,
(GClassInitFunc) gst_v4l_color_balance_channel_class_init,
NULL,
NULL,
sizeof (GstV4lColorBalanceChannel),
0,
(GInstanceInitFunc) gst_v4l_color_balance_channel_init,
NULL
};
gst_v4l_color_balance_channel_type =
g_type_register_static (GST_TYPE_COLOR_BALANCE_CHANNEL,
"GstV4lColorBalanceChannel", &v4l_tuner_channel_info, 0);
}
return gst_v4l_color_balance_channel_type;
}
static void
gst_v4l_color_balance_channel_class_init (GstV4lColorBalanceChannelClass *
klass)
{
parent_class = g_type_class_peek_parent (klass);
}
static void
gst_v4l_color_balance_channel_init (GstV4lColorBalanceChannel * channel)
{
channel->index = 0;
}
void
gst_v4l_color_balance_interface_init (GstColorBalanceClass * klass)
{
GST_COLOR_BALANCE_TYPE (klass) = GST_COLOR_BALANCE_HARDWARE;
/* default virtual functions */
klass->list_channels = gst_v4l_color_balance_list_channels;
klass->set_value = gst_v4l_color_balance_set_value;
klass->get_value = gst_v4l_color_balance_get_value;
}
static G_GNUC_UNUSED gboolean
gst_v4l_color_balance_contains_channel (GstV4lElement * v4lelement,
GstV4lColorBalanceChannel * v4lchannel)
{
const GList *item;
for (item = v4lelement->colors; item != NULL; item = item->next)
if (item->data == v4lchannel)
return TRUE;
return FALSE;
}
static const GList *
gst_v4l_color_balance_list_channels (GstColorBalance * balance)
{
return GST_V4LELEMENT (balance)->colors;
}
static void
gst_v4l_color_balance_set_value (GstColorBalance * balance,
GstColorBalanceChannel * channel, gint value)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (balance);
GstV4lColorBalanceChannel *v4lchannel =
GST_V4L_COLOR_BALANCE_CHANNEL (channel);
/* assert that we're opened and that we're using a known item */
g_return_if_fail (GST_V4L_IS_OPEN (v4lelement));
g_return_if_fail (gst_v4l_color_balance_contains_channel (v4lelement,
v4lchannel));
gst_v4l_set_picture (v4lelement, v4lchannel->index, value);
}
static gint
gst_v4l_color_balance_get_value (GstColorBalance * balance,
GstColorBalanceChannel * channel)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (balance);
GstV4lColorBalanceChannel *v4lchannel =
GST_V4L_COLOR_BALANCE_CHANNEL (channel);
gint value;
/* assert that we're opened and that we're using a known item */
g_return_val_if_fail (GST_V4L_IS_OPEN (v4lelement), 0);
g_return_val_if_fail (gst_v4l_color_balance_contains_channel (v4lelement,
v4lchannel), 0);
if (!gst_v4l_get_picture (v4lelement, v4lchannel->index, &value))
return 0;
return value;
}

View file

@ -1,59 +0,0 @@
/* GStreamer
*
* gstv4lcolorbalance.h: color balance interface implementation for V4L
*
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4L_COLOR_BALANCE_H__
#define __GST_V4L_COLOR_BALANCE_H__
#include <gst/gst.h>
#include <gst/interfaces/colorbalance.h>
#include "v4l_calls.h"
G_BEGIN_DECLS
#define GST_TYPE_V4L_COLOR_BALANCE_CHANNEL \
(gst_v4l_color_balance_channel_get_type ())
#define GST_V4L_COLOR_BALANCE_CHANNEL(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L_COLOR_BALANCE_CHANNEL, \
GstV4lColorBalanceChannel))
#define GST_V4L_COLOR_BALANCE_CHANNEL_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_V4L_COLOR_BALANCE_CHANNEL, \
GstV4lColorBalanceChannelClass))
#define GST_IS_V4L_COLOR_BALANCE_CHANNEL(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L_COLOR_BALANCE_CHANNEL))
#define GST_IS_V4L_COLOR_BALANCE_CHANNEL_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_V4L_COLOR_BALANCE_CHANNEL))
typedef struct _GstV4lColorBalanceChannel {
GstColorBalanceChannel parent;
GstV4lPictureType index;
} GstV4lColorBalanceChannel;
typedef struct _GstV4lColorBalanceChannelClass {
GstColorBalanceChannelClass parent;
} GstV4lColorBalanceChannelClass;
GType gst_v4l_color_balance_channel_get_type (void);
void gst_v4l_color_balance_interface_init (GstColorBalanceClass *klass);
#endif /* __GST_V4L_COLOR_BALANCE_H__ */

View file

@ -1,542 +0,0 @@
/* GStreamer
*
* gstv4lelement.c: base class for V4L elements
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#include <unistd.h>
#include <string.h>
#include <gst/interfaces/propertyprobe.h>
#ifdef HAVE_GUDEV
#include <gudev/gudev.h>
#endif
#include "v4l_calls.h"
#include "gstv4ltuner.h"
#ifdef HAVE_XVIDEO
#include "gstv4lxoverlay.h"
#endif
#include "gstv4lcolorbalance.h"
enum
{
PROP_0,
PROP_DEVICE,
PROP_DEVICE_NAME,
PROP_FLAGS
};
GST_DEBUG_CATEGORY (v4lelement_debug);
#define GST_CAT_DEFAULT v4lelement_debug
static void gst_v4lelement_init_interfaces (GType type);
#define gst_v4lelement_parent_class parent_class
G_DEFINE_TYPE_WITH_CODE (GstV4lElement, gst_v4lelement,
GST_TYPE_PUSH_SRC, gst_v4lelement_init_interfaces (g_define_type_id));
static void gst_v4lelement_dispose (GObject * object);
static void gst_v4lelement_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_v4lelement_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
/* element methods */
static GstStateChangeReturn gst_v4lelement_change_state (GstElement * element,
GstStateChange transition);
static gboolean
gst_v4l_iface_supported (GstImplementsInterface * iface, GType iface_type)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (iface);
#ifdef HAVE_XVIDEO
g_assert (iface_type == GST_TYPE_TUNER ||
iface_type == GST_TYPE_X_OVERLAY || iface_type == GST_TYPE_COLOR_BALANCE);
#else
g_assert (iface_type == GST_TYPE_TUNER ||
iface_type == GST_TYPE_COLOR_BALANCE);
#endif
if (v4lelement->video_fd == -1)
return FALSE;
#ifdef HAVE_XVIDEO
if (iface_type == GST_TYPE_X_OVERLAY && !GST_V4L_IS_OVERLAY (v4lelement))
return FALSE;
#endif
return TRUE;
}
static void
gst_v4l_interface_init (GstImplementsInterfaceClass * klass)
{
/* default virtual functions */
klass->supported = gst_v4l_iface_supported;
}
static const GList *
gst_v4l_probe_get_properties (GstPropertyProbe * probe)
{
GObjectClass *klass = G_OBJECT_GET_CLASS (probe);
static GList *list = NULL;
if (!list) {
list = g_list_append (NULL, g_object_class_find_property (klass, "device"));
}
return list;
}
static gboolean init = FALSE;
static GList *devices = NULL;
#ifdef HAVE_GUDEV
static gboolean
gst_v4l_class_probe_devices_with_udev (GstV4lElementClass * klass,
gboolean check)
{
GUdevClient *client = NULL;
GList *item;
if (!check) {
while (devices) {
gchar *device = devices->data;
devices = g_list_remove (devices, device);
g_free (device);
}
GST_INFO ("Enumerating video4linux devices from udev");
client = g_udev_client_new (NULL);
if (!client) {
GST_WARNING ("Failed to initialize gudev client");
goto finish;
}
item = g_udev_client_query_by_subsystem (client, "video4linux");
while (item) {
GUdevDevice *device = item->data;
gchar *devnode = g_strdup (g_udev_device_get_device_file (device));
gint api = g_udev_device_get_property_as_int (device, "ID_V4L_VERSION");
GST_INFO ("Found new device: %s, API: %d", devnode, api);
/* Append v4l1 devices only. If api is 0 probably v4l_id has
been stripped out of the current udev installation, append
anyway */
if (api == 0) {
GST_WARNING
("Couldn't retrieve ID_V4L_VERSION, silly udev installation?");
}
if ((api == 1 || api == 0)) {
devices = g_list_append (devices, devnode);
} else {
g_free (devnode);
}
g_object_unref (device);
item = item->next;
}
g_list_free (item);
init = TRUE;
}
finish:
if (client) {
g_object_unref (client);
}
klass->devices = devices;
return init;
}
#endif /* HAVE_GUDEV */
static gboolean
gst_v4l_class_probe_devices (GstV4lElementClass * klass, gboolean check)
{
if (!check) {
const gchar *dev_base[] = { "/dev/video", "/dev/v4l/video", NULL };
gint base, n, fd;
while (devices) {
gchar *device = devices->data;
devices = g_list_remove (devices, device);
g_free (device);
}
/* detect /dev entries */
for (n = 0; n < 64; n++) {
for (base = 0; dev_base[base] != NULL; base++) {
struct stat s;
gchar *device = g_strdup_printf ("%s%d", dev_base[base], n);
/* does the /dev/ entry exist at all? */
if (stat (device, &s) == 0) {
/* yes: is a device attached? */
if ((fd = open (device, O_RDONLY)) > 0 || errno == EBUSY) {
if (fd > 0)
close (fd);
devices = g_list_append (devices, device);
break;
}
}
g_free (device);
}
}
init = TRUE;
}
klass->devices = devices;
return init;
}
static void
gst_v4l_probe_probe_property (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec)
{
GstV4lElementClass *klass = GST_V4LELEMENT_GET_CLASS (probe);
switch (prop_id) {
case PROP_DEVICE:
#ifdef HAVE_GUDEV
if (!gst_v4l_class_probe_devices_with_udev (klass, FALSE))
gst_v4l_class_probe_devices (klass, FALSE);
#else /* !HAVE_GUDEV */
gst_v4l_class_probe_devices (klass, FALSE);
#endif /* HAVE_GUDEV */
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (probe, prop_id, pspec);
break;
}
}
static gboolean
gst_v4l_probe_needs_probe (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec)
{
GstV4lElementClass *klass = GST_V4LELEMENT_GET_CLASS (probe);
gboolean ret = FALSE;
switch (prop_id) {
case PROP_DEVICE:
#ifdef HAVE_GUDEV
ret = !gst_v4l_class_probe_devices_with_udev (klass, FALSE);
#else /* !HAVE_GUDEV */
ret = !gst_v4l_class_probe_devices (klass, TRUE);
#endif /* HAVE_GUDEV */
ret = !gst_v4l_class_probe_devices (klass, TRUE);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (probe, prop_id, pspec);
break;
}
return ret;
}
static GValueArray *
gst_v4l_class_list_devices (GstV4lElementClass * klass)
{
GValueArray *array;
GValue value = { 0 };
GList *item;
if (!klass->devices)
return NULL;
array = g_value_array_new (g_list_length (klass->devices));
item = klass->devices;
g_value_init (&value, G_TYPE_STRING);
while (item) {
gchar *device = item->data;
g_value_set_string (&value, device);
g_value_array_append (array, &value);
item = item->next;
}
g_value_unset (&value);
return array;
}
static GValueArray *
gst_v4l_probe_get_values (GstPropertyProbe * probe,
guint prop_id, const GParamSpec * pspec)
{
GstV4lElementClass *klass = GST_V4LELEMENT_GET_CLASS (probe);
GValueArray *array = NULL;
switch (prop_id) {
case PROP_DEVICE:
array = gst_v4l_class_list_devices (klass);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (probe, prop_id, pspec);
break;
}
return array;
}
static void
gst_v4l_property_probe_interface_init (GstPropertyProbeInterface * iface)
{
iface->get_properties = gst_v4l_probe_get_properties;
iface->probe_property = gst_v4l_probe_probe_property;
iface->needs_probe = gst_v4l_probe_needs_probe;
iface->get_values = gst_v4l_probe_get_values;
}
#define GST_TYPE_V4L_DEVICE_FLAGS (gst_v4l_device_get_type ())
static GType
gst_v4l_device_get_type (void)
{
static GType v4l_device_type = 0;
if (v4l_device_type == 0) {
static const GFlagsValue values[] = {
{VID_TYPE_CAPTURE, "CAPTURE", "Device can capture"},
{VID_TYPE_TUNER, "TUNER", "Device has a tuner"},
{VID_TYPE_OVERLAY, "OVERLAY", "Device can do overlay"},
{VID_TYPE_MPEG_DECODER, "MPEG_DECODER", "Device can decode MPEG"},
{VID_TYPE_MPEG_ENCODER, "MPEG_ENCODER", "Device can encode MPEG"},
{VID_TYPE_MJPEG_DECODER, "MJPEG_DECODER", "Device can decode MJPEG"},
{VID_TYPE_MJPEG_ENCODER, "MJPEG_ENCODER", "Device can encode MJPEG"},
{0x10000, "AUDIO", "Device handles audio"},
{0, NULL, NULL}
};
v4l_device_type = g_flags_register_static ("GstV4lDeviceTypeFlags", values);
}
return v4l_device_type;
}
static void
gst_v4lelement_init_interfaces (GType type)
{
static const GInterfaceInfo v4liface_info = {
(GInterfaceInitFunc) gst_v4l_interface_init,
NULL,
NULL,
};
static const GInterfaceInfo v4l_tuner_info = {
(GInterfaceInitFunc) gst_v4l_tuner_interface_init,
NULL,
NULL,
};
#ifdef HAVE_XVIDEO
static const GInterfaceInfo v4l_xoverlay_info = {
(GInterfaceInitFunc) gst_v4l_xoverlay_interface_init,
NULL,
NULL,
};
#endif
static const GInterfaceInfo v4l_colorbalance_info = {
(GInterfaceInitFunc) gst_v4l_color_balance_interface_init,
NULL,
NULL,
};
static const GInterfaceInfo v4l_propertyprobe_info = {
(GInterfaceInitFunc) gst_v4l_property_probe_interface_init,
NULL,
NULL,
};
g_type_add_interface_static (type,
GST_TYPE_IMPLEMENTS_INTERFACE, &v4liface_info);
g_type_add_interface_static (type, GST_TYPE_TUNER, &v4l_tuner_info);
#ifdef HAVE_XVIDEO
g_type_add_interface_static (type, GST_TYPE_X_OVERLAY, &v4l_xoverlay_info);
#endif
g_type_add_interface_static (type,
GST_TYPE_COLOR_BALANCE, &v4l_colorbalance_info);
g_type_add_interface_static (type,
GST_TYPE_PROPERTY_PROBE, &v4l_propertyprobe_info);
}
static void
gst_v4lelement_class_init (GstV4lElementClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
gobject_class = (GObjectClass *) klass;
element_class = GST_ELEMENT_CLASS (klass);
GST_DEBUG_CATEGORY_INIT (v4lelement_debug, "v4lelement", 0,
"V4L Base Class debug");
gobject_class->set_property = gst_v4lelement_set_property;
gobject_class->get_property = gst_v4lelement_get_property;
gobject_class->dispose = gst_v4lelement_dispose;
element_class->change_state = gst_v4lelement_change_state;
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DEVICE,
g_param_spec_string ("device", "Device", "Device location",
NULL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_DEVICE_NAME,
g_param_spec_string ("device-name", "Device name", "Name of the device",
NULL, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_FLAGS,
g_param_spec_flags ("flags", "Flags", "Device type flags",
GST_TYPE_V4L_DEVICE_FLAGS, 0,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
}
static void
gst_v4lelement_init (GstV4lElement * v4lelement)
{
/* some default values */
v4lelement->video_fd = -1;
v4lelement->buffer = NULL;
v4lelement->videodev = g_strdup ("/dev/video0");
v4lelement->norms = NULL;
v4lelement->channels = NULL;
v4lelement->colors = NULL;
v4lelement->xwindow_id = 0;
}
static void
gst_v4lelement_dispose (GObject * object)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (object);
if (v4lelement->videodev) {
g_free (v4lelement->videodev);
v4lelement->videodev = NULL;
}
if (((GObjectClass *) parent_class)->dispose)
((GObjectClass *) parent_class)->dispose (object);
}
static void
gst_v4lelement_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (object);
switch (prop_id) {
case PROP_DEVICE:
if (v4lelement->videodev)
g_free (v4lelement->videodev);
v4lelement->videodev = g_strdup (g_value_get_string (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4lelement_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (object);
switch (prop_id) {
case PROP_DEVICE:
g_value_set_string (value, v4lelement->videodev);
break;
case PROP_DEVICE_NAME:{
gchar *new = NULL;
if (GST_V4L_IS_OPEN (v4lelement)) {
new = v4lelement->vcap.name;
} else if (gst_v4l_open (v4lelement)) {
new = v4lelement->vcap.name;
gst_v4l_close (v4lelement);
}
g_value_set_string (value, new);
break;
}
case PROP_FLAGS:{
guint flags = 0;
if (GST_V4L_IS_OPEN (v4lelement)) {
flags |= v4lelement->vcap.type & 0x3C0B;
if (v4lelement->vcap.audios)
flags |= 0x10000;
}
g_value_set_flags (value, flags);
break;
}
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_v4lelement_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstV4lElement *v4lelement = GST_V4LELEMENT (element);
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
/* open the device */
if (!gst_v4l_open (v4lelement))
return GST_STATE_CHANGE_FAILURE;
#ifdef HAVE_XVIDEO
gst_v4l_xoverlay_start (v4lelement);
#endif
break;
default:
break;
}
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_NULL:
/* close the device */
#ifdef HAVE_XVIDEO
gst_v4l_xoverlay_stop (v4lelement);
#endif
if (!gst_v4l_close (v4lelement))
return GST_STATE_CHANGE_FAILURE;
break;
default:
break;
}
return ret;
}

View file

@ -1,116 +0,0 @@
/* GStreamer
*
* gstv4lelement.h: base class for V4L elements
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4LELEMENT_H__
#define __GST_V4LELEMENT_H__
/* Because of some really cool feature in video4linux1, also known as
* 'not including sys/types.h and sys/time.h', we had to include it
* ourselves. In all their intelligence, these people decided to fix
* this in the next version (video4linux2) in such a cool way that it
* breaks all compilations of old stuff...
* The real problem is actually that linux/time.h doesn't use proper
* macro checks before defining types like struct timeval. The proper
* fix here is to either fuck the kernel header (which is what we do
* by defining _LINUX_TIME_H, an innocent little hack) or by fixing it
* upstream, which I'll consider doing later on. If you get compiler
* errors here, check your linux/time.h && sys/time.h header setup.
*/
#include <sys/types.h>
#define _LINUX_TIME_H
#include <linux/videodev.h>
#include <gst/gst.h>
#include <gst/base/gstpushsrc.h>
G_BEGIN_DECLS
#define GST_TYPE_V4LELEMENT \
(gst_v4lelement_get_type())
#define GST_V4LELEMENT(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4LELEMENT,GstV4lElement))
#define GST_V4LELEMENT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4LELEMENT,GstV4lElementClass))
#define GST_IS_V4LELEMENT(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4LELEMENT))
#define GST_IS_V4LELEMENT_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4LELEMENT))
#define GST_V4LELEMENT_GET_CLASS(klass) \
(G_TYPE_INSTANCE_GET_CLASS ((klass), GST_TYPE_V4LELEMENT, GstV4lElementClass))
typedef struct _GstV4lElement GstV4lElement;
typedef struct _GstV4lElementClass GstV4lElementClass;
typedef struct _GstV4lXv GstV4lXv;
struct _GstV4lElement {
GstPushSrc element;
/* the video device */
char *videodev;
/* the video-device's file descriptor */
gint video_fd;
/* the video buffer (mmap()'ed) */
guint8 *buffer;
/* the video device's capabilities */
struct video_capability vcap;
/* the video device's window properties */
struct video_window vwin;
/* some more info about the current input's capabilities */
struct video_channel vchan;
/* lists... */
GList *colors;
GList *norms;
GList *channels;
/* X-overlay */
GstV4lXv *xv;
gulong xwindow_id;
};
struct _GstV4lElementClass {
GstPushSrcClass parent_class;
/* probed devices */
GList *devices;
/* actions */
gboolean (*get_attribute) (GstElement *element,
const gchar *attr_name,
int *value);
gboolean (*set_attribute) (GstElement *element,
const gchar *attr_name,
const int value);
};
GType gst_v4lelement_get_type(void);
G_END_DECLS
#endif /* __GST_V4LELEMENT_H__ */

View file

@ -1,293 +0,0 @@
/* GStreamer
*
* gstv4ljpegsrc.c: V4L source element for JPEG cameras
*
* Copyright (C) 2004-2005 Jan Schmidt <thaytan@mad.scientist.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
e Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <string.h>
#include <sys/time.h>
#include "gstv4ljpegsrc.h"
#include "v4lsrc_calls.h"
GST_DEBUG_CATEGORY_STATIC (v4ljpegsrc_debug);
#define GST_CAT_DEFAULT v4ljpegsrc_debug
/* init functions */
static void gst_v4ljpegsrc_base_init (gpointer g_class);
static void gst_v4ljpegsrc_class_init (GstV4lJpegSrcClass * klass);
static void gst_v4ljpegsrc_init (GstV4lJpegSrc * v4ljpegsrc);
/* buffer functions */
static GstPadLinkReturn gst_v4ljpegsrc_src_link (GstPad * pad,
const GstCaps * caps);
static GstCaps *gst_v4ljpegsrc_getcaps (GstPad * pad);
static GstData *gst_v4ljpegsrc_get (GstPad * pad);
static GstElementClass *parent_class = NULL;
GType
gst_v4ljpegsrc_get_type (void)
{
static GType v4ljpegsrc_type = 0;
if (!v4ljpegsrc_type) {
static const GTypeInfo v4ljpegsrc_info = {
sizeof (GstV4lJpegSrcClass),
gst_v4ljpegsrc_base_init,
NULL,
(GClassInitFunc) gst_v4ljpegsrc_class_init,
NULL,
NULL,
sizeof (GstV4lJpegSrc),
0,
(GInstanceInitFunc) gst_v4ljpegsrc_init,
NULL
};
v4ljpegsrc_type =
g_type_register_static (GST_TYPE_V4LSRC, "GstV4lJpegSrc",
&v4ljpegsrc_info, 0);
GST_DEBUG_CATEGORY_INIT (v4ljpegsrc_debug, "v4ljpegsrc", 0,
"V4L JPEG source element");
}
return v4ljpegsrc_type;
}
static void
gst_v4ljpegsrc_base_init (gpointer g_class)
{
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details_simple (gstelement_class,
"Video (video4linux/raw) Jpeg Source", "Source/Video",
"Reads jpeg frames from a video4linux (eg ov519) device",
"Jan Schmidt <thaytan@mad.scientist.com>");
}
static void
gst_v4ljpegsrc_class_init (GstV4lJpegSrcClass * klass)
{
parent_class = g_type_class_peek_parent (klass);
}
static void
gst_v4ljpegsrc_init (GstV4lJpegSrc * v4ljpegsrc)
{
GstV4lSrc *v4lsrc = GST_V4LSRC (v4ljpegsrc);
GstPad *pad = v4lsrc->srcpad;
/*
* Stash away and then replace the getcaps and get functions on the src pad
*/
v4ljpegsrc->getfn = GST_RPAD_GETFUNC (pad);
v4ljpegsrc->getcapsfn = GST_RPAD_GETCAPSFUNC (pad);
gst_pad_set_get_function (v4lsrc->srcpad, gst_v4ljpegsrc_get);
gst_pad_set_getcaps_function (v4lsrc->srcpad, gst_v4ljpegsrc_getcaps);
gst_pad_set_link_function (v4lsrc->srcpad, gst_v4ljpegsrc_src_link);
}
static GstPadLinkReturn
gst_v4ljpegsrc_src_link (GstPad * pad, const GstCaps * vscapslist)
{
GstV4lJpegSrc *v4ljpegsrc;
GstV4lSrc *v4lsrc;
gint w, h, palette = -1;
const GValue *fps;
GstStructure *structure;
gboolean was_capturing;
struct video_window *vwin;
v4ljpegsrc = GST_V4LJPEGSRC (gst_pad_get_parent (pad));
v4lsrc = GST_V4LSRC (v4ljpegsrc);
vwin = &GST_V4LELEMENT (v4lsrc)->vwin;
was_capturing = v4lsrc->is_capturing;
/* in case the buffers are active (which means that we already
* did capsnego before and didn't clean up), clean up anyways */
if (GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lsrc))) {
if (was_capturing) {
if (!gst_v4lsrc_capture_stop (v4lsrc))
return GST_PAD_LINK_REFUSED;
}
if (!gst_v4lsrc_capture_deinit (v4lsrc))
return GST_PAD_LINK_REFUSED;
} else if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc))) {
return GST_PAD_LINK_DELAYED;
}
structure = gst_caps_get_structure (vscapslist, 0);
gst_structure_get_int (structure, "width", &w);
gst_structure_get_int (structure, "height", &h);
fps = gst_structure_get_value (structure, "framerate");
GST_DEBUG_OBJECT (v4ljpegsrc, "linking with %dx%d at %d/%d fps", w, h,
gst_value_get_fraction_numerator (fps),
gst_value_get_fraction_denominator (fps));
/* set framerate if it's not already correct */
if (fps != gst_v4lsrc_get_fps (v4lsrc)) {
int fps_index = fps / 15.0 * 16;
GST_DEBUG_OBJECT (v4ljpegsrc, "Trying to set fps index %d", fps_index);
/* set bits 16 to 21 to 0 */
vwin->flags &= (0x3F00 - 1);
/* set bits 16 to 21 to the index */
vwin->flags |= fps_index << 16;
if (!gst_v4l_set_window_properties (GST_V4LELEMENT (v4lsrc))) {
return GST_PAD_LINK_DELAYED;
}
}
/*
* Try to set the camera to capture RGB24
*/
palette = VIDEO_PALETTE_RGB24;
v4lsrc->buffer_size = w * h * 3;
GST_DEBUG_OBJECT (v4ljpegsrc, "trying to set_capture %dx%d, palette %d",
w, h, palette);
/* this only fills in v4lsrc->mmap values */
if (!gst_v4lsrc_set_capture (v4lsrc, w, h, palette)) {
GST_WARNING_OBJECT (v4ljpegsrc, "could not set_capture %dx%d, palette %d",
w, h, palette);
return GST_PAD_LINK_REFUSED;
}
/* first try the negotiated settings using try_capture */
if (!gst_v4lsrc_try_capture (v4lsrc, w, h, palette)) {
GST_DEBUG_OBJECT (v4ljpegsrc, "failed trying palette %d for %dx%d", palette,
w, h);
return GST_PAD_LINK_REFUSED;
}
if (!gst_v4lsrc_capture_init (v4lsrc))
return GST_PAD_LINK_REFUSED;
if (was_capturing || GST_STATE (v4lsrc) == GST_STATE_PLAYING) {
if (!gst_v4lsrc_capture_start (v4lsrc))
return GST_PAD_LINK_REFUSED;
}
return GST_PAD_LINK_OK;
}
static GstCaps *
gst_v4ljpegsrc_getcaps (GstPad * pad)
{
GstCaps *list;
GstV4lJpegSrc *v4ljpegsrc = GST_V4LJPEGSRC (gst_pad_get_parent (pad));
GstV4lSrc *v4lsrc = GST_V4LSRC (v4ljpegsrc);
struct video_capability *vcap = &GST_V4LELEMENT (v4lsrc)->vcap;
gfloat fps = 0.0;
if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc))) {
return gst_caps_new_any ();
}
if (!v4lsrc->autoprobe) {
/* FIXME: query current caps and return those, with _any appended */
return gst_caps_new_any ();
}
list = gst_caps_new_simple ("image/jpeg", NULL);
GST_DEBUG_OBJECT (v4ljpegsrc,
"Device reports w: %d-%d, h: %d-%d, fps: %f",
vcap->minwidth, vcap->maxwidth, vcap->minheight, vcap->maxheight, fps);
if (vcap->minwidth < vcap->maxwidth) {
gst_caps_set_simple (list, "width", GST_TYPE_INT_RANGE, vcap->minwidth,
vcap->maxwidth, NULL);
} else {
gst_caps_set_simple (list, "width", G_TYPE_INT, vcap->minwidth, NULL);
}
if (vcap->minheight < vcap->maxheight) {
gst_caps_set_simple (list, "height", GST_TYPE_INT_RANGE, vcap->minheight,
vcap->maxheight, NULL);
} else {
gst_caps_set_simple (list, "height", G_TYPE_INT, vcap->minheight, NULL);
}
if (v4lsrc->fps_list) {
GstStructure *structure = gst_caps_get_structure (list, 0);
gst_structure_set_value (structure, "framerate", v4lsrc->fps_list);
}
GST_DEBUG_OBJECT (v4ljpegsrc, "caps: %" GST_PTR_FORMAT, list);
return list;
}
static GstData *
gst_v4ljpegsrc_get (GstPad * pad)
{
GstV4lJpegSrc *v4ljpegsrc;
GstV4lSrc *v4lsrc;
GstData *data;
GstBuffer *buf;
GstBuffer *outbuf;
int jpeg_size;
g_return_val_if_fail (pad != NULL, NULL);
v4ljpegsrc = GST_V4LJPEGSRC (gst_pad_get_parent (pad));
v4lsrc = GST_V4LSRC (v4ljpegsrc);
/* Fetch from the v4lsrc class get fn. */
data = v4ljpegsrc->getfn (pad);
/* If not a buffer, return it unchanged */
if (!data || (!GST_IS_BUFFER (data)))
return data;
buf = GST_BUFFER (data);
/* Confirm that the buffer contains jpeg data */
/*
* Create a new subbuffer from the jpeg data
* The first 2 bytes in the buffer are the size of the jpeg data
*/
if (GST_BUFFER_SIZE (buf) > 2) {
jpeg_size = (int) (GST_READ_UINT16_LE (GST_BUFFER_DATA (buf))) * 8;
} else
jpeg_size = 0;
/* Check that the size is sensible */
if ((jpeg_size <= 0) || (jpeg_size > GST_BUFFER_SIZE (buf) - 2)) {
GST_ELEMENT_ERROR (v4ljpegsrc, STREAM, FORMAT, (NULL),
("Invalid non-jpeg frame from camera"));
return NULL;
}
GST_DEBUG_OBJECT (v4ljpegsrc, "Creating JPEG subbuffer of size %d",
jpeg_size);
outbuf = gst_buffer_create_sub (buf, 2, jpeg_size);
/* Copy timestamps onto the subbuffer */
gst_buffer_stamp (outbuf, buf);
/* Release the main buffer */
gst_buffer_unref (buf);
return GST_DATA (outbuf);
}

View file

@ -1,57 +0,0 @@
/* GStreamer
*
* gstv4ljpegsrc.h: V4L video source element for JPEG cameras
*
* Copyright (C) 2001-2005 Jan Schmidt <thaytan@mad.scientist.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4LJPEGSRC_H__
#define __GST_V4LJPEGSRC_H__
#include <gstv4lsrc.h>
G_BEGIN_DECLS
#define GST_TYPE_V4LJPEGSRC \
(gst_v4ljpegsrc_get_type())
#define GST_V4LJPEGSRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4LJPEGSRC,GstV4lJpegSrc))
#define GST_V4LJPEGSRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4LJPEGSRC,GstV4lJpegSrcClass))
#define GST_IS_V4LJPEGSRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4LJPEGSRC))
#define GST_IS_V4LJPEGSRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4LJPEGSRC))
typedef struct _GstV4lJpegSrc GstV4lJpegSrc;
typedef struct _GstV4lJpegSrcClass GstV4lJpegSrcClass;
struct _GstV4lJpegSrc
{
GstV4lSrc v4lsrc;
GstPadGetFunction getfn;
GstPadGetCapsFunction getcapsfn;
};
struct _GstV4lJpegSrcClass
{
GstV4lSrcClass parent_class;
};
GType gst_v4ljpegsrc_get_type (void);
G_END_DECLS
#endif /* __GST_V4LJPEGSRC_H__ */

View file

@ -1,431 +0,0 @@
/* GStreamer
*
* gstv4lmjpegsink.c: hardware MJPEG video sink plugin
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <string.h>
#include "v4lmjpegsink_calls.h"
GST_DEBUG_CATEGORY_STATIC (v4lmjpegsink_debug);
#define GST_CAT_DEFAULT v4lmjpegsink_debug
/* v4lmjpegsink signals and args */
enum
{
SIGNAL_FRAME_DISPLAYED,
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_NUMBUFS,
ARG_BUFSIZE,
ARG_X_OFFSET,
ARG_Y_OFFSET,
ARG_FRAMES_DISPLAYED,
ARG_FRAME_TIME
};
/* init functions */
static void gst_v4lmjpegsink_base_init (gpointer g_class);
static void gst_v4lmjpegsink_class_init (GstV4lMjpegSinkClass * klass);
static void gst_v4lmjpegsink_init (GstV4lMjpegSink * v4lmjpegsink);
/* the chain of buffers */
static GstPadLinkReturn gst_v4lmjpegsink_sinkconnect (GstPad * pad,
const GstCaps * vscapslist);
static void gst_v4lmjpegsink_chain (GstPad * pad, GstData * _data);
/* get/set gst object functions */
static void gst_v4lmjpegsink_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_v4lmjpegsink_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_v4lmjpegsink_change_state (GstElement *
element);
static void gst_v4lmjpegsink_set_clock (GstElement * element, GstClock * clock);
static GstElementClass *parent_class = NULL;
static guint gst_v4lmjpegsink_signals[LAST_SIGNAL] = { 0 };
GType
gst_v4lmjpegsink_get_type (void)
{
static GType v4lmjpegsink_type = 0;
if (!v4lmjpegsink_type) {
static const GTypeInfo v4lmjpegsink_info = {
sizeof (GstV4lMjpegSinkClass),
gst_v4lmjpegsink_base_init,
NULL,
(GClassInitFunc) gst_v4lmjpegsink_class_init,
NULL,
NULL,
sizeof (GstV4lMjpegSink),
0,
(GInstanceInitFunc) gst_v4lmjpegsink_init,
};
v4lmjpegsink_type =
g_type_register_static (GST_TYPE_V4LELEMENT, "GstV4lMjpegSink",
&v4lmjpegsink_info, 0);
}
return v4lmjpegsink_type;
}
static void
gst_v4lmjpegsink_base_init (gpointer g_class)
{
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("image/jpeg, "
"width = (int) [ 1, MAX ], "
"height = (int) [ 1, MAX ], " "framerate = (fraction) [ 0, MAX ]")
);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details_simple (gstelement_class,
"Video (video4linux/MJPEG) sink", "Sink/Video",
"Writes MJPEG-encoded frames to a zoran MJPEG/video4linux device",
"GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&sink_template));
}
static void
gst_v4lmjpegsink_class_init (GstV4lMjpegSinkClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_peek_parent (klass);
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_NUMBUFS,
g_param_spec_int ("num-buffers", "num-buffers", "num-buffers",
G_MININT, G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BUFSIZE,
g_param_spec_int ("buffer-size", "buffer-size", "buffer-size",
G_MININT, G_MAXINT, 0, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_X_OFFSET,
g_param_spec_int ("x-offset", "x-offset", "x-offset",
G_MININT, G_MAXINT, 0, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_Y_OFFSET,
g_param_spec_int ("y-offset", "y-offset", "y-offset",
G_MININT, G_MAXINT, 0, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_FRAMES_DISPLAYED,
g_param_spec_int ("frames-displayed", "frames-displayed",
"frames-displayed", G_MININT, G_MAXINT, 0,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_FRAME_TIME,
g_param_spec_int ("frame-time", "frame-time", "frame-time", G_MININT,
G_MAXINT, 0, G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
GST_DEBUG_CATEGORY_INIT (v4lmjpegsink_debug, "v4lmjpegsink", 0,
"V4L MJPEG sink element");
gobject_class->set_property = gst_v4lmjpegsink_set_property;
gobject_class->get_property = gst_v4lmjpegsink_get_property;
gst_v4lmjpegsink_signals[SIGNAL_FRAME_DISPLAYED] =
g_signal_new ("frame-displayed", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstV4lMjpegSinkClass,
frame_displayed), NULL, NULL, g_cclosure_marshal_VOID__VOID,
G_TYPE_NONE, 0);
gstelement_class->change_state = gst_v4lmjpegsink_change_state;
gstelement_class->set_clock = gst_v4lmjpegsink_set_clock;
}
static void
gst_v4lmjpegsink_init (GstV4lMjpegSink * v4lmjpegsink)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (v4lmjpegsink);
v4lmjpegsink->sinkpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"sink"), "sink");
gst_element_add_pad (GST_ELEMENT (v4lmjpegsink), v4lmjpegsink->sinkpad);
gst_pad_set_chain_function (v4lmjpegsink->sinkpad, gst_v4lmjpegsink_chain);
gst_pad_set_link_function (v4lmjpegsink->sinkpad,
gst_v4lmjpegsink_sinkconnect);
v4lmjpegsink->clock = NULL;
v4lmjpegsink->width = -1;
v4lmjpegsink->height = -1;
v4lmjpegsink->x_offset = -1;
v4lmjpegsink->y_offset = -1;
v4lmjpegsink->numbufs = 64;
v4lmjpegsink->bufsize = 256;
GST_OBJECT_FLAG_SET (v4lmjpegsink, GST_ELEMENT_THREAD_SUGGESTED);
}
static GstPadLinkReturn
gst_v4lmjpegsink_sinkconnect (GstPad * pad, const GstCaps * vscapslist)
{
GstV4lMjpegSink *v4lmjpegsink;
GstStructure *structure;
v4lmjpegsink = GST_V4LMJPEGSINK (gst_pad_get_parent (pad));
/* in case the buffers are active (which means that we already
* did capsnego before and didn't clean up), clean up anyways */
if (GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsink)))
if (!gst_v4lmjpegsink_playback_deinit (v4lmjpegsink))
return GST_PAD_LINK_REFUSED;
structure = gst_caps_get_structure (vscapslist, 0);
gst_structure_get_int (structure, "width", &v4lmjpegsink->width);
gst_structure_get_int (structure, "height", &v4lmjpegsink->height);
if (!gst_v4lmjpegsink_set_playback (v4lmjpegsink, v4lmjpegsink->width, v4lmjpegsink->height, v4lmjpegsink->x_offset, v4lmjpegsink->y_offset, GST_V4LELEMENT (v4lmjpegsink)->vchan.norm, 0)) /* TODO: interlacing */
return GST_PAD_LINK_REFUSED;
/* set buffer info */
if (!gst_v4lmjpegsink_set_buffer (v4lmjpegsink,
v4lmjpegsink->numbufs, v4lmjpegsink->bufsize))
return GST_PAD_LINK_REFUSED;
if (!gst_v4lmjpegsink_playback_init (v4lmjpegsink))
return GST_PAD_LINK_REFUSED;
return GST_PAD_LINK_OK;
}
static void
gst_v4lmjpegsink_set_clock (GstElement * element, GstClock * clock)
{
GstV4lMjpegSink *v4mjpegsink = GST_V4LMJPEGSINK (element);
v4mjpegsink->clock = clock;
}
static void
gst_v4lmjpegsink_chain (GstPad * pad, GstData * _data)
{
GstBuffer *buf = GST_BUFFER (_data);
GstV4lMjpegSink *v4lmjpegsink;
gint num;
g_return_if_fail (pad != NULL);
g_return_if_fail (GST_IS_PAD (pad));
g_return_if_fail (buf != NULL);
v4lmjpegsink = GST_V4LMJPEGSINK (gst_pad_get_parent (pad));
if (v4lmjpegsink->clock) {
GST_DEBUG ("videosink: clock wait: %" G_GUINT64_FORMAT,
GST_BUFFER_TIMESTAMP (buf));
gst_element_wait (GST_ELEMENT (v4lmjpegsink), GST_BUFFER_TIMESTAMP (buf));
}
#if 0
if (GST_BUFFER_POOL (buf) == v4lmjpegsink->bufferpool) {
num = GPOINTER_TO_INT (GST_BUFFER_POOL_PRIVATE (buf));
gst_v4lmjpegsink_play_frame (v4lmjpegsink, num);
} else {
#endif
/* check size */
if (GST_BUFFER_SIZE (buf) > v4lmjpegsink->breq.size) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, WRITE, (NULL),
("Buffer too big (%d KB), max. buffersize is %ld KB",
GST_BUFFER_SIZE (buf) / 1024, v4lmjpegsink->breq.size / 1024));
return;
}
/* put JPEG data to the device */
gst_v4lmjpegsink_wait_frame (v4lmjpegsink, &num);
memcpy (gst_v4lmjpegsink_get_buffer (v4lmjpegsink, num),
GST_BUFFER_DATA (buf), GST_BUFFER_SIZE (buf));
gst_v4lmjpegsink_play_frame (v4lmjpegsink, num);
#if 0
}
#endif
g_signal_emit (G_OBJECT (v4lmjpegsink),
gst_v4lmjpegsink_signals[SIGNAL_FRAME_DISPLAYED], 0);
gst_buffer_unref (buf);
}
#if 0
static GstBuffer *
gst_v4lmjpegsink_buffer_new (GstBufferPool * pool,
guint64 offset, guint size, gpointer user_data)
{
GstV4lMjpegSink *v4lmjpegsink = GST_V4LMJPEGSINK (user_data);
GstBuffer *buffer = NULL;
guint8 *data;
gint num;
if (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsink)))
return NULL;
if (v4lmjpegsink->breq.size < size) {
GST_DEBUG ("Requested buffer size is too large (%d > %ld)",
size, v4lmjpegsink->breq.size);
return NULL;
}
if (!gst_v4lmjpegsink_wait_frame (v4lmjpegsink, &num))
return NULL;
data = gst_v4lmjpegsink_get_buffer (v4lmjpegsink, num);
if (!data)
return NULL;
buffer = gst_buffer_new ();
GST_BUFFER_DATA (buffer) = data;
GST_BUFFER_MAXSIZE (buffer) = v4lmjpegsink->breq.size;
GST_BUFFER_SIZE (buffer) = size;
GST_BUFFER_POOL (buffer) = pool;
GST_BUFFER_POOL_PRIVATE (buffer) = GINT_TO_POINTER (num);
/* with this flag set, we don't need our own buffer_free() function */
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_DONTFREE);
return buffer;
}
#endif
static void
gst_v4lmjpegsink_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4lMjpegSink *v4lmjpegsink;
g_return_if_fail (GST_IS_V4LMJPEGSINK (object));
v4lmjpegsink = GST_V4LMJPEGSINK (object);
switch (prop_id) {
case ARG_NUMBUFS:
v4lmjpegsink->numbufs = g_value_get_int (value);
break;
case ARG_BUFSIZE:
v4lmjpegsink->bufsize = g_value_get_int (value);
break;
case ARG_X_OFFSET:
v4lmjpegsink->x_offset = g_value_get_int (value);
break;
case ARG_Y_OFFSET:
v4lmjpegsink->y_offset = g_value_get_int (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4lmjpegsink_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4lMjpegSink *v4lmjpegsink;
v4lmjpegsink = GST_V4LMJPEGSINK (object);
switch (prop_id) {
case ARG_FRAMES_DISPLAYED:
g_value_set_int (value, v4lmjpegsink->frames_displayed);
break;
case ARG_FRAME_TIME:
g_value_set_int (value, v4lmjpegsink->frame_time / 1000000);
break;
case ARG_NUMBUFS:
g_value_set_int (value, v4lmjpegsink->numbufs);
break;
case ARG_BUFSIZE:
g_value_set_int (value, v4lmjpegsink->bufsize);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_v4lmjpegsink_change_state (GstElement * element, GstStateChange transition)
{
GstV4lMjpegSink *v4lmjpegsink;
GstStateChangeReturn parent_value;
g_return_val_if_fail (GST_IS_V4LMJPEGSINK (element),
GST_STATE_CHANGE_FAILURE);
v4lmjpegsink = GST_V4LMJPEGSINK (element);
/* set up change state */
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
/* we used to do buffer setup here, but that's now done
* right after capsnego */
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
/* start */
if (!gst_v4lmjpegsink_playback_start (v4lmjpegsink))
return GST_STATE_CHANGE_FAILURE;
break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
/* de-queue all queued buffers */
if (!gst_v4lmjpegsink_playback_stop (v4lmjpegsink))
return GST_STATE_CHANGE_FAILURE;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
/* stop playback, unmap all buffers */
if (!gst_v4lmjpegsink_playback_deinit (v4lmjpegsink))
return GST_STATE_CHANGE_FAILURE;
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state) {
parent_value =
GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
} else {
parent_value = GST_STATE_CHANGE_FAILURE;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
return parent_value;
return GST_STATE_CHANGE_SUCCESS;
}

View file

@ -1,97 +0,0 @@
/* GStreamer
*
* gstv4lmjpegsink.h: hardware MJPEG video sink element
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4LMJPEGSINK_H__
#define __GST_V4LMJPEGSINK_H__
#include <gstv4lelement.h>
#include <sys/time.h>
#include <videodev_mjpeg.h>
G_BEGIN_DECLS
#define GST_TYPE_V4LMJPEGSINK \
(gst_v4lmjpegsink_get_type())
#define GST_V4LMJPEGSINK(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4LMJPEGSINK,GstV4lMjpegSink))
#define GST_V4LMJPEGSINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4LMJPEGSINK,GstV4lMjpegSinkClass))
#define GST_IS_V4LMJPEGSINK(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4LMJPEGSINK))
#define GST_IS_V4LMJPEGSINK_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4LMJPEGSINK))
typedef struct _GstV4lMjpegSink GstV4lMjpegSink;
typedef struct _GstV4lMjpegSinkClass GstV4lMjpegSinkClass;
struct _GstV4lMjpegSink {
GstV4lElement v4lelement;
/* the sink pas */
GstPad *sinkpad;
/* frame properties for common players */
gint frames_displayed;
guint64 frame_time;
/* system clock object */
GstClock *clock;
/* buffer/capture info */
struct mjpeg_sync bsync;
struct mjpeg_requestbuffers breq;
/* thread to keep track of synced frames */
gint8 *isqueued_queued_frames; /* 1 = queued, 0 = unqueued, -1 = error */
GThread *thread_queued_frames;
GMutex *mutex_queued_frames;
GCond **cond_queued_frames;
gint current_frame;
/* width/height/norm of the jpeg stream */
gint width;
gint height;
gint norm;
/* cache values */
gint x_offset;
gint y_offset;
gint numbufs;
gint bufsize; /* in KB */
};
struct _GstV4lMjpegSinkClass {
GstV4lElementClass parent_class;
/* signals */
void (*frame_displayed) (GstElement *element);
};
GType gst_v4lmjpegsink_get_type(void);
G_END_DECLS
#endif /* __GST_SDLVIDEOSINK_H__ */

View file

@ -1,868 +0,0 @@
/* GStreamer
*
* gstv4lmjpegsrc.c: hardware MJPEG video source plugin
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <string.h>
#include "v4lmjpegsrc_calls.h"
GST_DEBUG_CATEGORY (v4lmjpegsrc_debug);
#define GST_CAT_DEFAULT v4lmjpegsrc_debug
/* V4lMjpegSrc signals and args */
enum
{
SIGNAL_FRAME_CAPTURE,
SIGNAL_FRAME_DROP,
SIGNAL_FRAME_INSERT,
SIGNAL_FRAME_LOST,
LAST_SIGNAL
};
/* arguments */
enum
{
ARG_0,
#if 0
ARG_X_OFFSET,
ARG_Y_OFFSET,
ARG_F_WIDTH,
ARG_F_HEIGHT,
/* normally, we would want to use subframe capture, however,
* for the time being it's easier if we disable it first */
#endif
ARG_QUALITY,
ARG_NUMBUFS,
ARG_BUFSIZE,
ARG_USE_FIXED_FPS
};
GST_FORMATS_FUNCTION (GstPad *, gst_v4lmjpegsrc_get_formats,
GST_FORMAT_TIME, GST_FORMAT_DEFAULT);
GST_QUERY_TYPE_FUNCTION (GstPad *, gst_v4lmjpegsrc_get_query_types,
GST_QUERY_POSITION);
/* init functions */
static void gst_v4lmjpegsrc_base_init (gpointer g_class);
static void gst_v4lmjpegsrc_class_init (GstV4lMjpegSrcClass * klass);
static void gst_v4lmjpegsrc_init (GstV4lMjpegSrc * v4lmjpegsrc);
/* pad/info functions */
static gboolean gst_v4lmjpegsrc_src_convert (GstPad * pad,
GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
static gboolean gst_v4lmjpegsrc_src_query (GstPad * pad,
GstQueryType type, GstFormat * format, gint64 * value);
/* buffer functions */
static GstPadLinkReturn gst_v4lmjpegsrc_srcconnect (GstPad * pad,
const GstCaps * caps);
static GstData *gst_v4lmjpegsrc_get (GstPad * pad);
static GstCaps *gst_v4lmjpegsrc_getcaps (GstPad * pad);
/* get/set params */
static void gst_v4lmjpegsrc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_v4lmjpegsrc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
/* set_clock function for A/V sync */
static void gst_v4lmjpegsrc_set_clock (GstElement * element, GstClock * clock);
/* state handling */
static GstStateChangeReturn gst_v4lmjpegsrc_change_state (GstElement * element);
/* requeue buffer after use */
static void gst_v4lmjpegsrc_buffer_free (GstBuffer * buffer);
static GstElementClass *parent_class = NULL;
static guint gst_v4lmjpegsrc_signals[LAST_SIGNAL] = { 0 };
GType
gst_v4lmjpegsrc_get_type (void)
{
static GType v4lmjpegsrc_type = 0;
if (!v4lmjpegsrc_type) {
static const GTypeInfo v4lmjpegsrc_info = {
sizeof (GstV4lMjpegSrcClass),
gst_v4lmjpegsrc_base_init,
NULL,
(GClassInitFunc) gst_v4lmjpegsrc_class_init,
NULL,
NULL,
sizeof (GstV4lMjpegSrc),
0,
(GInstanceInitFunc) gst_v4lmjpegsrc_init,
NULL
};
v4lmjpegsrc_type =
g_type_register_static (GST_TYPE_V4LELEMENT, "GstV4lMjpegSrc",
&v4lmjpegsrc_info, 0);
}
return v4lmjpegsrc_type;
}
static void
gst_v4lmjpegsrc_base_init (gpointer g_class)
{
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("image/jpeg, "
"width = (int) [ 0, MAX ], "
"height = (int) [ 0, MAX ], " "framerate = (fraction) [ 0, MAX ]")
);
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details_sinmple (gstelement_class,
"Video (video4linux/MJPEG) Source", "Source/Video",
"Reads MJPEG-encoded frames from a zoran MJPEG/video4linux device",
"GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
gst_element_class_add_pad_template (gstelement_class,
gst_static_pad_template_get (&src_template));
}
static void
gst_v4lmjpegsrc_class_init (GstV4lMjpegSrcClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_peek_parent (klass);
#if 0
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_X_OFFSET,
g_param_spec_int ("x-offset", "x_offset", "x_offset",
G_MININT, G_MAXINT, 0, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_Y_OFFSET,
g_param_spec_int ("y-offset", "y_offset", "y_offset",
G_MININT, G_MAXINT, 0, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_F_WIDTH,
g_param_spec_int ("frame-width", "frame_width", "frame_width",
G_MININT, G_MAXINT, 0, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_F_HEIGHT,
g_param_spec_int ("frame-height", "frame_height", "frame_height",
G_MININT, G_MAXINT, 0, G_PARAM_WRITABLE | G_PARAM_STATIC_STRINGS));
#endif
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_QUALITY,
g_param_spec_int ("quality", "Quality", "JPEG frame quality",
1, 100, 50, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_NUMBUFS,
g_param_spec_int ("num-buffers", "Num Buffers", "Number of Buffers",
1, 256, 64, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BUFSIZE,
g_param_spec_int ("buffer-size", "Buffer Size", "Size of buffers",
0, 512 * 1024, 128 * 1024,
G_PARAM_READABLE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_USE_FIXED_FPS,
g_param_spec_boolean ("use-fixed-fps", "Use Fixed FPS",
"Drop/Insert frames to reach a certain FPS (TRUE) "
"or adapt FPS to suit the number of grabbed frames",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/* signals */
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_CAPTURE] =
g_signal_new ("frame-capture", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstV4lMjpegSrcClass, frame_capture),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_DROP] =
g_signal_new ("frame-drop", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstV4lMjpegSrcClass, frame_drop), NULL, NULL,
g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_INSERT] =
g_signal_new ("frame-insert", G_TYPE_FROM_CLASS (klass),
G_SIGNAL_RUN_LAST, G_STRUCT_OFFSET (GstV4lMjpegSrcClass, frame_insert),
NULL, NULL, g_cclosure_marshal_VOID__VOID, G_TYPE_NONE, 0);
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_LOST] =
g_signal_new ("frame-lost", G_TYPE_FROM_CLASS (klass), G_SIGNAL_RUN_LAST,
G_STRUCT_OFFSET (GstV4lMjpegSrcClass, frame_lost), NULL, NULL,
g_cclosure_marshal_VOID__INT, G_TYPE_NONE, 1, G_TYPE_INT);
GST_DEBUG_CATEGORY_INIT (v4lmjpegsrc_debug, "v4lmjpegsrc", 0,
"V4L MJPEG source element");
gobject_class->set_property = gst_v4lmjpegsrc_set_property;
gobject_class->get_property = gst_v4lmjpegsrc_get_property;
gstelement_class->change_state = gst_v4lmjpegsrc_change_state;
gstelement_class->set_clock = gst_v4lmjpegsrc_set_clock;
}
static void
gst_v4lmjpegsrc_init (GstV4lMjpegSrc * v4lmjpegsrc)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (v4lmjpegsrc);
GST_OBJECT_FLAG_SET (GST_ELEMENT (v4lmjpegsrc), GST_ELEMENT_THREAD_SUGGESTED);
v4lmjpegsrc->srcpad =
gst_pad_new_from_template (gst_element_class_get_pad_template (klass,
"src"), "src");
gst_element_add_pad (GST_ELEMENT (v4lmjpegsrc), v4lmjpegsrc->srcpad);
gst_pad_set_get_function (v4lmjpegsrc->srcpad, gst_v4lmjpegsrc_get);
gst_pad_set_getcaps_function (v4lmjpegsrc->srcpad, gst_v4lmjpegsrc_getcaps);
gst_pad_set_link_function (v4lmjpegsrc->srcpad, gst_v4lmjpegsrc_srcconnect);
gst_pad_set_convert_function (v4lmjpegsrc->srcpad,
gst_v4lmjpegsrc_src_convert);
gst_pad_set_formats_function (v4lmjpegsrc->srcpad,
gst_v4lmjpegsrc_get_formats);
gst_pad_set_query_function (v4lmjpegsrc->srcpad, gst_v4lmjpegsrc_src_query);
gst_pad_set_query_type_function (v4lmjpegsrc->srcpad,
gst_v4lmjpegsrc_get_query_types);
#if 0
v4lmjpegsrc->frame_width = 0;
v4lmjpegsrc->frame_height = 0;
v4lmjpegsrc->x_offset = -1;
v4lmjpegsrc->y_offset = -1;
#endif
v4lmjpegsrc->quality = 50;
v4lmjpegsrc->numbufs = 64;
/* no clock */
v4lmjpegsrc->clock = NULL;
/* fps */
v4lmjpegsrc->use_fixed_fps = TRUE;
v4lmjpegsrc->is_capturing = FALSE;
}
static gboolean
gst_v4lmjpegsrc_get_fps (GstV4lMjpegSrc * v4lmjpegsrc, GValue * fps)
{
gint norm;
g_return_val_if_fail (GST_VALUE_HOLDS_FRACTION (fps), FALSE);
if (!v4lmjpegsrc->use_fixed_fps &&
v4lmjpegsrc->clock != NULL && v4lmjpegsrc->handled > 0) {
/* try to get time from clock master and calculate fps */
GstClockTime time =
gst_clock_get_time (v4lmjpegsrc->clock) - v4lmjpegsrc->substract_time;
return v4lmjpegsrc->handled * GST_SECOND / time;
}
/* if that failed ... */
if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lmjpegsrc)))
return FALSE;
if (!gst_v4l_get_chan_norm (GST_V4LELEMENT (v4lmjpegsrc), NULL, &norm))
return FALSE;
if (norm == VIDEO_MODE_NTSC)
gst_value_set_fraction (fps, 30000, 1001);
else
gst_value_set_fraction (fps, 25, 1);
return TRUE;
}
static gboolean
gst_v4lmjpegsrc_src_convert (GstPad * pad,
GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
{
GstV4lMjpegSrc *v4lmjpegsrc;
GValue fps = { 0 };
gboolean result = TRUE;
v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
g_value_init (&fps, GST_VALUE_FRACTION);
if (!gst_v4lmjpegsrc_get_fps (v4lmjpegsrc, &fps))
return FALSE;
switch (src_format) {
case GST_FORMAT_TIME:
switch (*dest_format) {
case GST_FORMAT_DEFAULT:
*dest_value = gst_util_uint64_scale (src_value,
gst_value_get_fraction_numerator (&fps),
gst_value_get_fraction_denominator (&fps) * GST_SECOND);
break;
default:
result = FALSE;
}
break;
case GST_FORMAT_DEFAULT:
switch (*dest_format) {
case GST_FORMAT_TIME:
*dest_value = src_value * gst_util_uint64_scale_int (GST_SECOND,
gst_value_get_fraction_denominator (&fps),
gst_value_get_fraction_numerator (&fps));
break;
default:
result = FALSE;
}
break;
default:
result = FALSE;
}
g_value_unset (&fps);
return result;
}
static gboolean
gst_v4lmjpegsrc_src_query (GstPad * pad,
GstQueryType type, GstFormat * format, gint64 * value)
{
GstV4lMjpegSrc *v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
gboolean res = TRUE;
GValue fps = { 0 };
g_value_init (&fps, GST_VALUE_FRACTION);
if (!gst_v4lmjpegsrc_get_fps (v4lmjpegsrc, &fps))
return FALSE;
switch (type) {
case GST_QUERY_POSITION:
switch (*format) {
case GST_FORMAT_TIME:
*value = v4lmjpegsrc->handled * gst_util_uint64_scale_int (GST_SECOND,
gst_value_get_fraction_denominator (&fps),
gst_value_get_fraction_numerator (&fps));
break;
case GST_FORMAT_DEFAULT:
*value = v4lmjpegsrc->handled;
break;
default:
res = FALSE;
break;
}
break;
default:
res = FALSE;
break;
}
g_value_unset (&fps);
return res;
}
static inline gulong
calc_bufsize (int hor_dec, int ver_dec)
{
guint8 div = hor_dec * ver_dec;
guint32 num = (1024 * 512) / (div);
guint32 result = 2;
num--;
while (num) {
num >>= 1;
result <<= 1;
}
if (result > (512 * 1024))
return (512 * 1024);
if (result < 8192)
return 8192;
return result;
}
static GstPadLinkReturn
gst_v4lmjpegsrc_srcconnect (GstPad * pad, const GstCaps * caps)
{
GstV4lMjpegSrc *v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
gint hor_dec, ver_dec;
gint w, h;
gint max_w = GST_V4LELEMENT (v4lmjpegsrc)->vcap.maxwidth,
max_h = GST_V4LELEMENT (v4lmjpegsrc)->vcap.maxheight;
gulong bufsize;
GstStructure *structure;
gboolean was_capturing;
/* in case the buffers are active (which means that we already
* did capsnego before and didn't clean up), clean up anyways */
if ((was_capturing = v4lmjpegsrc->is_capturing)) {
if (!gst_v4lmjpegsrc_capture_stop (v4lmjpegsrc))
return GST_PAD_LINK_REFUSED;
}
if (GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc))) {
if (!gst_v4lmjpegsrc_capture_deinit (v4lmjpegsrc))
return GST_PAD_LINK_REFUSED;
} else if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lmjpegsrc))) {
return GST_PAD_LINK_DELAYED;
}
/* Note: basically, we don't give a damn about the opposite caps here.
* that might seem odd, but it isn't. we know that the opposite caps is
* either NULL or has mime type image/jpeg, and in both cases, we'll set
* our own mime type back and it'll work. Other properties are to be set
* by the src, not by the opposite caps */
structure = gst_caps_get_structure (caps, 0);
gst_structure_get_int (structure, "width", &w);
gst_structure_get_int (structure, "height", &h);
/* figure out decimation */
if (w >= max_w) {
hor_dec = 1;
} else if (w * 2 >= max_w) {
hor_dec = 2;
} else {
hor_dec = 4;
}
if (h >= max_h) {
ver_dec = 1;
} else if (h * 2 >= max_h) {
ver_dec = 2;
} else {
ver_dec = 4;
}
/* calculate bufsize */
bufsize = calc_bufsize (hor_dec, ver_dec);
/* set buffer info */
if (!gst_v4lmjpegsrc_set_buffer (v4lmjpegsrc, v4lmjpegsrc->numbufs, bufsize)) {
return GST_PAD_LINK_REFUSED;
}
/* set capture parameters and mmap the buffers */
if (hor_dec == ver_dec) {
if (!gst_v4lmjpegsrc_set_capture (v4lmjpegsrc,
hor_dec, v4lmjpegsrc->quality)) {
return GST_PAD_LINK_REFUSED;
}
} else {
if (!gst_v4lmjpegsrc_set_capture_m (v4lmjpegsrc,
0, 0, max_w, max_h, hor_dec, ver_dec, v4lmjpegsrc->quality)) {
return GST_PAD_LINK_REFUSED;
}
}
#if 0
if (!v4lmjpegsrc->frame_width && !v4lmjpegsrc->frame_height &&
v4lmjpegsrc->x_offset < 0 && v4lmjpegsrc->y_offset < 0 &&
v4lmjpegsrc->horizontal_decimation == v4lmjpegsrc->vertical_decimation) {
if (!gst_v4lmjpegsrc_set_capture (v4lmjpegsrc,
v4lmjpegsrc->horizontal_decimation, v4lmjpegsrc->quality))
return GST_PAD_LINK_REFUSED;
} else {
if (!gst_v4lmjpegsrc_set_capture_m (v4lmjpegsrc,
v4lmjpegsrc->x_offset, v4lmjpegsrc->y_offset,
v4lmjpegsrc->frame_width, v4lmjpegsrc->frame_height,
v4lmjpegsrc->horizontal_decimation,
v4lmjpegsrc->vertical_decimation, v4lmjpegsrc->quality))
return GST_PAD_LINK_REFUSED;
}
#endif
if (!gst_v4lmjpegsrc_capture_init (v4lmjpegsrc))
return GST_PAD_LINK_REFUSED;
if (was_capturing || GST_STATE (v4lmjpegsrc) == GST_STATE_PLAYING)
if (!gst_v4lmjpegsrc_capture_start (v4lmjpegsrc))
return GST_PAD_LINK_REFUSED;
return GST_PAD_LINK_OK;
}
static GstData *
gst_v4lmjpegsrc_get (GstPad * pad)
{
GstV4lMjpegSrc *v4lmjpegsrc;
GstBuffer *buf;
gint num;
GValue fps = { 0 };
GstClockTime duration;
GstClockTime cur_frame_time;
g_return_val_if_fail (pad != NULL, NULL);
v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
if (v4lmjpegsrc->use_fixed_fps) {
g_value_init (&fps, GST_VALUE_FRACTION);
duration = gst_util_uint64_scale_int (GST_SECOND,
gst_value_get_fraction_denominator (&fps),
gst_value_get_fraction_numerator (&fps));
cur_frame_time =
gst_util_uint64_scale_int (v4lmjpegsrc->handled * GST_SECOND,
gst_value_get_fraction_denominator (&fps),
gst_value_get_fraction_numerator (&fps));
if (!gst_v4lmjpegsrc_get_fps (v4lmjpegsrc, &fps)) {
g_value_unset (&fps);
return NULL;
}
}
if (v4lmjpegsrc->need_writes > 0) {
/* use last frame */
num = v4lmjpegsrc->last_frame;
v4lmjpegsrc->need_writes--;
} else if (v4lmjpegsrc->clock && v4lmjpegsrc->use_fixed_fps) {
GstClockTime time;
gboolean have_frame = FALSE;
do {
/* by default, we use the frame once */
v4lmjpegsrc->need_writes = 1;
/* grab a frame from the device */
if (!gst_v4lmjpegsrc_grab_frame (v4lmjpegsrc, &num,
&v4lmjpegsrc->last_size))
return NULL;
v4lmjpegsrc->last_frame = num;
time = GST_TIMEVAL_TO_TIME (v4lmjpegsrc->bsync.timestamp) -
v4lmjpegsrc->substract_time;
/* first check whether we lost any frames according to the device */
if (v4lmjpegsrc->last_seq != 0) {
if (v4lmjpegsrc->bsync.seq - v4lmjpegsrc->last_seq > 1) {
v4lmjpegsrc->need_writes =
v4lmjpegsrc->bsync.seq - v4lmjpegsrc->last_seq;
g_signal_emit (G_OBJECT (v4lmjpegsrc),
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_LOST], 0,
v4lmjpegsrc->bsync.seq - v4lmjpegsrc->last_seq - 1);
}
}
v4lmjpegsrc->last_seq = v4lmjpegsrc->bsync.seq;
/* decide how often we're going to write the frame - set
* v4lmjpegsrc->need_writes to (that-1) and have_frame to TRUE
* if we're going to write it - else, just continue.
*
* time is generally the system or audio clock. Let's
* say that we've written one second of audio, then we want
* to have written one second of video too, within the same
* timeframe. This means that if time - begin_time = X sec,
* we want to have written X*fps frames. If we've written
* more - drop, if we've written less - dup... */
if (cur_frame_time - time > 1.5 * duration) {
/* yo dude, we've got too many frames here! Drop! DROP! */
v4lmjpegsrc->need_writes--; /* -= (v4lmjpegsrc->handled - (time / fps)); */
g_signal_emit (G_OBJECT (v4lmjpegsrc),
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_DROP], 0);
} else if (cur_frame_time - time < -1.5 * duration) {
/* this means we're lagging far behind */
v4lmjpegsrc->need_writes++; /* += ((time / fps) - v4lmjpegsrc->handled); */
g_signal_emit (G_OBJECT (v4lmjpegsrc),
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_INSERT], 0);
}
if (v4lmjpegsrc->need_writes > 0) {
have_frame = TRUE;
v4lmjpegsrc->use_num_times[num] = v4lmjpegsrc->need_writes;
v4lmjpegsrc->need_writes--;
} else {
gst_v4lmjpegsrc_requeue_frame (v4lmjpegsrc, num);
}
} while (!have_frame);
} else {
/* grab a frame from the device */
if (!gst_v4lmjpegsrc_grab_frame (v4lmjpegsrc, &num,
&v4lmjpegsrc->last_size))
return NULL;
v4lmjpegsrc->use_num_times[num] = 1;
}
buf = gst_buffer_new ();
GST_BUFFER_FREE_DATA_FUNC (buf) = gst_v4lmjpegsrc_buffer_free;
GST_BUFFER_PRIVATE (buf) = v4lmjpegsrc;
GST_BUFFER_DATA (buf) = gst_v4lmjpegsrc_get_buffer (v4lmjpegsrc, num);
GST_BUFFER_SIZE (buf) = v4lmjpegsrc->last_size;
GST_BUFFER_MAXSIZE (buf) = v4lmjpegsrc->breq.size;
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_READONLY);
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_DONTFREE);
if (v4lmjpegsrc->use_fixed_fps)
GST_BUFFER_TIMESTAMP (buf) = cur_frame_time;
else /* calculate time based on our own clock */
GST_BUFFER_TIMESTAMP (buf) =
GST_TIMEVAL_TO_TIME (v4lmjpegsrc->bsync.timestamp) -
v4lmjpegsrc->substract_time;
v4lmjpegsrc->handled++;
g_signal_emit (G_OBJECT (v4lmjpegsrc),
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_CAPTURE], 0);
return GST_DATA (buf);
}
static GstCaps *
gst_v4lmjpegsrc_getcaps (GstPad * pad)
{
GstV4lMjpegSrc *v4lmjpegsrc = GST_V4LMJPEGSRC (gst_pad_get_parent (pad));
struct video_capability *vcap = &GST_V4LELEMENT (v4lmjpegsrc)->vcap;
GstCaps *caps;
GstStructure *str;
gint i;
GValue w = { 0 }, h = {
0}, w1 = {
0}, h1 = {
0}, fps = {
0};
if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lmjpegsrc))) {
return gst_pad_get_pad_template_caps (pad);
}
g_value_init (&fps, GST_TYPE_FRACTION);
gst_return_val_if_fail (gst_v4lmjpegsrc_get_fps (v4lmjpegsrc, &fps), NULL);
caps = gst_caps_new_simple ("image/jpeg", NULL);
str = gst_caps_get_structure (caps, 0);
gst_structure_set_value (str, "framerate", &fps);
g_value_unset (&fps);
g_value_init (&w, GST_TYPE_LIST);
g_value_init (&h, GST_TYPE_LIST);
g_value_init (&w1, G_TYPE_INT);
g_value_init (&h1, G_TYPE_INT);
for (i = 0; i <= 2; i++) {
g_value_set_int (&w1, vcap->maxwidth / (1 << i));
g_value_set_int (&h1, vcap->maxheight / (1 << i));
gst_value_list_append_value (&w, &w1);
gst_value_list_append_value (&h, &h1);
}
g_value_unset (&h1);
g_value_unset (&w1);
gst_structure_set_value (str, "width", &w);
gst_structure_set_value (str, "height", &h);
g_value_unset (&w);
g_value_unset (&h);
return caps;
}
static void
gst_v4lmjpegsrc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4lMjpegSrc *v4lmjpegsrc;
g_return_if_fail (GST_IS_V4LMJPEGSRC (object));
v4lmjpegsrc = GST_V4LMJPEGSRC (object);
switch (prop_id) {
#if 0
case ARG_X_OFFSET:
v4lmjpegsrc->x_offset = g_value_get_int (value);
break;
case ARG_Y_OFFSET:
v4lmjpegsrc->y_offset = g_value_get_int (value);
break;
case ARG_F_WIDTH:
v4lmjpegsrc->frame_width = g_value_get_int (value);
break;
case ARG_F_HEIGHT:
v4lmjpegsrc->frame_height = g_value_get_int (value);
break;
#endif
case ARG_QUALITY:
v4lmjpegsrc->quality = g_value_get_int (value);
break;
case ARG_NUMBUFS:
v4lmjpegsrc->numbufs = g_value_get_int (value);
break;
case ARG_USE_FIXED_FPS:
if (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc))) {
v4lmjpegsrc->use_fixed_fps = g_value_get_boolean (value);
}
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4lmjpegsrc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4lMjpegSrc *v4lmjpegsrc;
g_return_if_fail (GST_IS_V4LMJPEGSRC (object));
v4lmjpegsrc = GST_V4LMJPEGSRC (object);
switch (prop_id) {
#if 0
case ARG_X_OFFSET:
g_value_set_int (value, v4lmjpegsrc->x_offset);
break;
case ARG_Y_OFFSET:
g_value_set_int (value, v4lmjpegsrc->y_offset);
break;
case ARG_F_WIDTH:
g_value_set_int (value, v4lmjpegsrc->frame_width);
break;
case ARG_F_HEIGHT:
g_value_set_int (value, v4lmjpegsrc->frame_height);
break;
#endif
case ARG_QUALITY:
g_value_set_int (value, v4lmjpegsrc->quality);
break;
case ARG_NUMBUFS:
if (GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc)))
g_value_set_int (value, v4lmjpegsrc->breq.count);
else
g_value_set_int (value, v4lmjpegsrc->numbufs);
break;
case ARG_BUFSIZE:
g_value_set_int (value, v4lmjpegsrc->breq.size);
break;
case ARG_USE_FIXED_FPS:
g_value_set_boolean (value, v4lmjpegsrc->use_fixed_fps);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_v4lmjpegsrc_change_state (GstElement * element, GstStateChange transition)
{
GstV4lMjpegSrc *v4lmjpegsrc;
GTimeVal time;
g_return_val_if_fail (GST_IS_V4LMJPEGSRC (element), GST_STATE_CHANGE_FAILURE);
v4lmjpegsrc = GST_V4LMJPEGSRC (element);
switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED:
/* actual buffer set-up used to be done here - but I moved
* it to capsnego itself */
v4lmjpegsrc->handled = 0;
v4lmjpegsrc->need_writes = 0;
v4lmjpegsrc->last_frame = 0;
v4lmjpegsrc->substract_time = 0;
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
/* queue all buffer, start streaming capture */
if (GST_V4LELEMENT (v4lmjpegsrc)->buffer &&
!gst_v4lmjpegsrc_capture_start (v4lmjpegsrc))
return GST_STATE_CHANGE_FAILURE;
g_get_current_time (&time);
v4lmjpegsrc->substract_time = GST_TIMEVAL_TO_TIME (time) -
v4lmjpegsrc->substract_time;
v4lmjpegsrc->last_seq = 0;
break;
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
g_get_current_time (&time);
v4lmjpegsrc->substract_time = GST_TIMEVAL_TO_TIME (time) -
v4lmjpegsrc->substract_time;
/* de-queue all queued buffers */
if (v4lmjpegsrc->is_capturing &&
!gst_v4lmjpegsrc_capture_stop (v4lmjpegsrc))
return GST_STATE_CHANGE_FAILURE;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
/* stop capturing, unmap all buffers */
if (GST_V4LELEMENT (v4lmjpegsrc)->buffer &&
!gst_v4lmjpegsrc_capture_deinit (v4lmjpegsrc))
return GST_STATE_CHANGE_FAILURE;
break;
}
if (GST_ELEMENT_CLASS (parent_class)->change_state)
return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
return GST_STATE_CHANGE_SUCCESS;
}
static void
gst_v4lmjpegsrc_set_clock (GstElement * element, GstClock * clock)
{
GST_V4LMJPEGSRC (element)->clock = clock;
}
#if 0
static GstBuffer *
gst_v4lmjpegsrc_buffer_new (GstBufferPool * pool,
guint64 offset, guint size, gpointer user_data)
{
GstBuffer *buffer;
GstV4lMjpegSrc *v4lmjpegsrc = GST_V4LMJPEGSRC (user_data);
if (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc)))
return NULL;
buffer = gst_buffer_new ();
if (!buffer)
return NULL;
/* TODO: add interlacing info to buffer as metadata */
GST_BUFFER_MAXSIZE (buffer) = v4lmjpegsrc->breq.size;
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_DONTFREE);
return buffer;
}
#endif
static void
gst_v4lmjpegsrc_buffer_free (GstBuffer * buf)
{
GstV4lMjpegSrc *v4lmjpegsrc = GST_V4LMJPEGSRC (GST_BUFFER_PRIVATE (buf));
int n;
if (gst_element_get_state (GST_ELEMENT (v4lmjpegsrc)) != GST_STATE_PLAYING)
return; /* we've already cleaned up ourselves */
for (n = 0; n < v4lmjpegsrc->breq.count; n++)
if (GST_BUFFER_DATA (buf) == gst_v4lmjpegsrc_get_buffer (v4lmjpegsrc, n)) {
v4lmjpegsrc->use_num_times[n]--;
if (v4lmjpegsrc->use_num_times[n] <= 0) {
gst_v4lmjpegsrc_requeue_frame (v4lmjpegsrc, n);
}
break;
}
if (n == v4lmjpegsrc->breq.count)
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, TOO_LAZY, (NULL),
("Couldn't find the buffer"));
}

View file

@ -1,116 +0,0 @@
/* GStreamer
*
* gstv4lmjpegsrc.h: hardware MJPEG video source element
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4LMJPEGSRC_H__
#define __GST_V4LMJPEGSRC_H__
#include <gstv4lelement.h>
#include <sys/time.h>
#include <videodev_mjpeg.h>
G_BEGIN_DECLS
#define GST_TYPE_V4LMJPEGSRC \
(gst_v4lmjpegsrc_get_type())
#define GST_V4LMJPEGSRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4LMJPEGSRC,GstV4lMjpegSrc))
#define GST_V4LMJPEGSRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4LMJPEGSRC,GstV4lMjpegSrcClass))
#define GST_IS_V4LMJPEGSRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4LMJPEGSRC))
#define GST_IS_V4LMJPEGSRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4LMJPEGSRC))
typedef struct _GstV4lMjpegSrc GstV4lMjpegSrc;
typedef struct _GstV4lMjpegSrcClass GstV4lMjpegSrcClass;
struct _GstV4lMjpegSrc {
GstV4lElement v4lelement;
/* pads */
GstPad *srcpad;
/* buffer/capture info */
struct mjpeg_sync bsync;
struct mjpeg_requestbuffers breq;
/* num of queued frames and some GThread stuff
* to wait if there's not enough */
gint8 *frame_queue_state;
GMutex *mutex_queue_state;
GCond *cond_queue_state;
gint num_queued;
gint queue_frame;
/* True if we want to stop */
gboolean quit, is_capturing;
/* A/V sync... frame counter and internal cache */
gulong handled;
gint last_frame;
gint last_size;
gint need_writes;
gulong last_seq;
/* clock */
GstClock *clock;
/* time to substract from clock time to get back to timestamp */
GstClockTime substract_time;
/* how often are we going to use each frame? */
gint *use_num_times;
/* how are we going to push buffers? */
gboolean use_fixed_fps;
/* end size */
gint end_width, end_height;
/* caching values */
#if 0
gint x_offset;
gint y_offset;
gint frame_width;
gint frame_height;
#endif
gint quality;
gint numbufs;
};
struct _GstV4lMjpegSrcClass {
GstV4lElementClass parent_class;
void (*frame_capture) (GObject *object);
void (*frame_drop) (GObject *object);
void (*frame_insert) (GObject *object);
void (*frame_lost) (GObject *object,
gint num_lost);
};
GType gst_v4lmjpegsrc_get_type(void);
G_END_DECLS
#endif /* __GST_V4LMJPEGSRC_H__ */

View file

@ -1,749 +0,0 @@
/* GStreamer
*
* gstv4lsrc.c: BT8x8/V4L source element
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <string.h>
#include <sys/time.h>
#include "v4lsrc_calls.h"
#include <sys/ioctl.h>
GST_DEBUG_CATEGORY_STATIC (v4lsrc_debug);
#define GST_CAT_DEFAULT v4lsrc_debug
enum
{
PROP_0,
PROP_AUTOPROBE,
PROP_AUTOPROBE_FPS,
PROP_COPY_MODE,
PROP_TIMESTAMP_OFFSET
};
#define gst_v4lsrc_parent_class parent_class
G_DEFINE_TYPE (GstV4lSrc, gst_v4lsrc, GST_TYPE_V4LELEMENT);
static GstStaticPadTemplate v4l_src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("ANY")
);
/* basesrc methods */
static gboolean gst_v4lsrc_start (GstBaseSrc * src);
static gboolean gst_v4lsrc_stop (GstBaseSrc * src);
static gboolean gst_v4lsrc_set_caps (GstBaseSrc * src, GstCaps * caps);
static GstCaps *gst_v4lsrc_get_caps (GstBaseSrc * src, GstCaps * filter);
static GstFlowReturn gst_v4lsrc_create (GstPushSrc * src, GstBuffer ** out);
static gboolean gst_v4lsrc_query (GstBaseSrc * bsrc, GstQuery * query);
static void gst_v4lsrc_fixate (GstBaseSrc * bsrc, GstCaps * caps);
static void gst_v4lsrc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_v4lsrc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static void
gst_v4lsrc_class_init (GstV4lSrcClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseSrcClass *basesrc_class;
GstPushSrcClass *pushsrc_class;
gobject_class = (GObjectClass *) klass;
element_class = (GstElementClass *) klass;
basesrc_class = (GstBaseSrcClass *) klass;
pushsrc_class = (GstPushSrcClass *) klass;
GST_DEBUG_CATEGORY_INIT (v4lsrc_debug, "v4lsrc", 0, "V4L source element");
gobject_class->set_property = gst_v4lsrc_set_property;
gobject_class->get_property = gst_v4lsrc_get_property;
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_AUTOPROBE,
g_param_spec_boolean ("autoprobe", "Autoprobe",
"Whether the device should be probed for all possible features",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_AUTOPROBE_FPS,
g_param_spec_boolean ("autoprobe-fps", "Autoprobe FPS",
"Whether the device should be probed for framerates",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass), PROP_COPY_MODE,
g_param_spec_boolean ("copy-mode", "Copy mode",
"Whether to send out copies of buffers, or direct pointers to the mmap region",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (G_OBJECT_CLASS (klass),
PROP_TIMESTAMP_OFFSET, g_param_spec_int64 ("timestamp-offset",
"Timestamp offset",
"A time offset subtracted from timestamps set on buffers (in ns)",
G_MININT64, G_MAXINT64, 0,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"Video (video4linux/raw) Source", "Source/Video",
"Reads raw frames from a video4linux device",
"GStreamer maintainers <gstreamer-devel@lists.sourceforge.net>");
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&v4l_src_template));
basesrc_class->get_caps = gst_v4lsrc_get_caps;
basesrc_class->set_caps = gst_v4lsrc_set_caps;
basesrc_class->start = gst_v4lsrc_start;
basesrc_class->stop = gst_v4lsrc_stop;
basesrc_class->fixate = gst_v4lsrc_fixate;
basesrc_class->query = gst_v4lsrc_query;
pushsrc_class->create = gst_v4lsrc_create;
}
static void
gst_v4lsrc_init (GstV4lSrc * v4lsrc)
{
v4lsrc->buffer_size = 0;
/* no colorspaces */
v4lsrc->colorspaces = NULL;
v4lsrc->is_capturing = FALSE;
v4lsrc->autoprobe = TRUE;
v4lsrc->autoprobe_fps = TRUE;
v4lsrc->copy_mode = TRUE;
v4lsrc->timestamp_offset = 0;
v4lsrc->fps_list = NULL;
gst_base_src_set_format (GST_BASE_SRC (v4lsrc), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (v4lsrc), TRUE);
}
static void
gst_v4lsrc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstV4lSrc *v4lsrc = GST_V4LSRC (object);
switch (prop_id) {
case PROP_AUTOPROBE:
g_return_if_fail (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lsrc)));
v4lsrc->autoprobe = g_value_get_boolean (value);
break;
case PROP_AUTOPROBE_FPS:
g_return_if_fail (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lsrc)));
v4lsrc->autoprobe_fps = g_value_get_boolean (value);
break;
case PROP_COPY_MODE:
v4lsrc->copy_mode = g_value_get_boolean (value);
break;
case PROP_TIMESTAMP_OFFSET:
v4lsrc->timestamp_offset = g_value_get_int64 (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_v4lsrc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstV4lSrc *v4lsrc = GST_V4LSRC (object);
switch (prop_id) {
case PROP_AUTOPROBE:
g_value_set_boolean (value, v4lsrc->autoprobe);
break;
case PROP_AUTOPROBE_FPS:
g_value_set_boolean (value, v4lsrc->autoprobe_fps);
break;
case PROP_COPY_MODE:
g_value_set_boolean (value, v4lsrc->copy_mode);
break;
case PROP_TIMESTAMP_OFFSET:
g_value_set_int64 (value, v4lsrc->timestamp_offset);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
/* this function is a bit of a last resort */
static void
gst_v4lsrc_fixate (GstBaseSrc * bsrc, GstCaps * caps)
{
GstStructure *structure;
int i;
int targetwidth, targetheight;
GstV4lSrc *v4lsrc = GST_V4LSRC (bsrc);
struct video_capability *vcap = &GST_V4LELEMENT (v4lsrc)->vcap;
struct video_window *vwin = &GST_V4LELEMENT (v4lsrc)->vwin;
if (GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc))) {
GST_DEBUG_OBJECT (v4lsrc, "device reported w: %d-%d, h: %d-%d",
vcap->minwidth, vcap->maxwidth, vcap->minheight, vcap->maxheight);
targetwidth = vcap->minwidth;
targetheight = vcap->minheight;
/* if we can get the current vwin settings, we use those to fixate */
if (!gst_v4l_get_capabilities (GST_V4LELEMENT (v4lsrc)))
GST_DEBUG_OBJECT (v4lsrc, "failed getting capabilities");
else {
targetwidth = vwin->width;
targetheight = vwin->height;
}
} else {
GST_DEBUG_OBJECT (v4lsrc, "device closed, guessing");
targetwidth = 320;
targetheight = 200;
}
GST_DEBUG_OBJECT (v4lsrc, "targetting %dx%d", targetwidth, targetheight);
for (i = 0; i < gst_caps_get_size (caps); ++i) {
const GValue *v;
structure = gst_caps_get_structure (caps, i);
gst_structure_fixate_field_nearest_int (structure, "width", targetwidth);
gst_structure_fixate_field_nearest_int (structure, "height", targetheight);
gst_structure_fixate_field_nearest_fraction (structure, "framerate", 15, 2);
v = gst_structure_get_value (structure, "format");
if (v && G_VALUE_TYPE (v) != GST_TYPE_FOURCC) {
guint32 fourcc;
g_return_if_fail (G_VALUE_TYPE (v) == GST_TYPE_LIST);
fourcc = gst_value_get_fourcc (gst_value_list_get_value (v, 0));
gst_structure_set (structure, "format", GST_TYPE_FOURCC, fourcc, NULL);
}
}
}
static gint all_palettes[] = {
VIDEO_PALETTE_YUV422,
VIDEO_PALETTE_YUV420P,
VIDEO_PALETTE_UYVY,
VIDEO_PALETTE_YUV411P,
VIDEO_PALETTE_YUV422P,
VIDEO_PALETTE_YUV410P,
VIDEO_PALETTE_YUV411,
VIDEO_PALETTE_RGB555,
VIDEO_PALETTE_RGB565,
VIDEO_PALETTE_RGB24,
VIDEO_PALETTE_RGB32,
-1
};
static GstCaps *
gst_v4lsrc_palette_to_caps (int palette)
{
guint32 fourcc;
GstCaps *caps;
switch (palette) {
case VIDEO_PALETTE_YUV422:
case VIDEO_PALETTE_YUYV:
fourcc = GST_MAKE_FOURCC ('Y', 'U', 'Y', '2');
break;
case VIDEO_PALETTE_YUV420P:
fourcc = GST_MAKE_FOURCC ('I', '4', '2', '0');
break;
case VIDEO_PALETTE_UYVY:
fourcc = GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y');
break;
case VIDEO_PALETTE_YUV411P:
fourcc = GST_MAKE_FOURCC ('Y', '4', '1', 'B');
break;
case VIDEO_PALETTE_YUV411:
fourcc = GST_MAKE_FOURCC ('Y', '4', '1', 'P');
break;
case VIDEO_PALETTE_YUV422P:
fourcc = GST_MAKE_FOURCC ('Y', '4', '2', 'B');
break;
case VIDEO_PALETTE_YUV410P:
fourcc = GST_MAKE_FOURCC ('Y', 'U', 'V', '9');
break;
case VIDEO_PALETTE_RGB555:
case VIDEO_PALETTE_RGB565:
case VIDEO_PALETTE_RGB24:
case VIDEO_PALETTE_RGB32:
fourcc = GST_MAKE_FOURCC ('R', 'G', 'B', ' ');
break;
default:
return NULL;
}
if (fourcc == GST_MAKE_FOURCC ('R', 'G', 'B', ' ')) {
switch (palette) {
case VIDEO_PALETTE_RGB555:
caps = gst_caps_from_string ("video/x-raw-rgb, "
"bpp = (int) 16, "
"depth = (int) 15, "
"endianness = (int) BYTE_ORDER, "
"red_mask = 0x7c00, " "green_mask = 0x03e0, " "blue_mask = 0x001f");
break;
case VIDEO_PALETTE_RGB565:
caps = gst_caps_from_string ("video/x-raw-rgb, "
"bpp = (int) 16, "
"depth = (int) 16, "
"endianness = (int) BYTE_ORDER, "
"red_mask = 0xf800, " "green_mask = 0x07f0, " "blue_mask = 0x001f");
break;
case VIDEO_PALETTE_RGB24:
caps = gst_caps_from_string ("video/x-raw-rgb, "
"bpp = (int) 24, "
"depth = (int) 24, "
"endianness = (int) BIG_ENDIAN, "
"red_mask = 0xFF0000, "
"green_mask = 0x00FF00, " "blue_mask = 0x0000FF");
break;
case VIDEO_PALETTE_RGB32:
caps = gst_caps_from_string ("video/x-raw-rgb, "
"bpp = (int) 32, "
"depth = (int) 24, "
"endianness = (int) BIG_ENDIAN, "
"red_mask = 0xFF000000, "
"green_mask = 0x00FF0000, " "blue_mask = 0x0000FF00");
break;
default:
g_assert_not_reached ();
return NULL;
}
} else {
caps = gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, fourcc, NULL);
}
return caps;
}
static GstCaps *
gst_v4lsrc_get_any_caps (void)
{
gint i;
GstCaps *caps = gst_caps_new_empty (), *one;
for (i = 0; all_palettes[i] != -1; i++) {
one = gst_v4lsrc_palette_to_caps (all_palettes[i]);
gst_caps_append (caps, one);
}
return caps;
}
static GstCaps *
gst_v4lsrc_get_caps (GstBaseSrc * src, GstCaps * filter)
{
GstCaps *list;
GstV4lSrc *v4lsrc = GST_V4LSRC (src);
struct video_capability *vcap = &GST_V4LELEMENT (v4lsrc)->vcap;
gint width = GST_V4LELEMENT (src)->vcap.minwidth;
gint height = GST_V4LELEMENT (src)->vcap.minheight;
gint i;
gint fps_n, fps_d;
GList *item;
if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc))) {
GstCaps *caps, *intersection;
caps = gst_v4lsrc_get_any_caps ();
if (filter) {
intersection =
gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = intersection;
}
return caps;
}
if (!v4lsrc->autoprobe) {
GstCaps *caps, *intersection;
/* FIXME: query current caps and return those, with _any appended */
caps = gst_v4lsrc_get_any_caps ();
if (filter) {
intersection =
gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = intersection;
}
return caps;
}
if (!v4lsrc->colorspaces) {
GST_DEBUG_OBJECT (v4lsrc, "Checking supported palettes");
for (i = 0; all_palettes[i] != -1; i++) {
/* try palette out */
if (!gst_v4lsrc_try_capture (v4lsrc, width, height, all_palettes[i]))
continue;
GST_DEBUG_OBJECT (v4lsrc, "Added palette %d (%s) to supported list",
all_palettes[i], gst_v4lsrc_palette_name (all_palettes[i]));
v4lsrc->colorspaces = g_list_append (v4lsrc->colorspaces,
GINT_TO_POINTER (all_palettes[i]));
}
GST_DEBUG_OBJECT (v4lsrc, "%d palette(s) supported",
g_list_length (v4lsrc->colorspaces));
if (v4lsrc->autoprobe_fps) {
GST_DEBUG_OBJECT (v4lsrc, "autoprobing framerates");
v4lsrc->fps_list = gst_v4lsrc_get_fps_list (v4lsrc);
}
}
if (!gst_v4lsrc_get_fps (v4lsrc, &fps_n, &fps_d)) {
fps_n = 0;
fps_d = 1;
}
list = gst_caps_new_empty ();
for (item = v4lsrc->colorspaces; item != NULL; item = item->next) {
GstCaps *one;
one = gst_v4lsrc_palette_to_caps (GPOINTER_TO_INT (item->data));
if (!one) {
GST_WARNING_OBJECT (v4lsrc, "Palette %d gave no caps\n",
GPOINTER_TO_INT (item->data));
continue;
}
GST_DEBUG_OBJECT (v4lsrc,
"Device reports w: %d-%d, h: %d-%d, fps: %d/%d for palette %d",
vcap->minwidth, vcap->maxwidth, vcap->minheight, vcap->maxheight,
fps_n, fps_d, GPOINTER_TO_INT (item->data));
if (vcap->minwidth < vcap->maxwidth) {
gst_caps_set_simple (one, "width", GST_TYPE_INT_RANGE, vcap->minwidth,
vcap->maxwidth, NULL);
} else {
gst_caps_set_simple (one, "width", G_TYPE_INT, vcap->minwidth, NULL);
}
if (vcap->minheight < vcap->maxheight) {
gst_caps_set_simple (one, "height", GST_TYPE_INT_RANGE, vcap->minheight,
vcap->maxheight, NULL);
} else {
gst_caps_set_simple (one, "height", G_TYPE_INT, vcap->minheight, NULL);
}
if (v4lsrc->autoprobe_fps) {
GstStructure *structure = gst_caps_get_structure (one, 0);
if (v4lsrc->fps_list) {
gst_structure_set_value (structure, "framerate", v4lsrc->fps_list);
} else {
gst_structure_set (structure, "framerate", GST_TYPE_FRACTION,
fps_n, fps_d, NULL);
}
} else {
gst_caps_set_simple (one, "framerate", GST_TYPE_FRACTION_RANGE,
1, 1, 100, 1, NULL);
}
GST_DEBUG_OBJECT (v4lsrc, "caps: %" GST_PTR_FORMAT, one);
gst_caps_append (list, one);
}
if (filter) {
GstCaps *intersection;
intersection =
gst_caps_intersect_full (filter, list, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (list);
list = intersection;
}
return list;
}
static gboolean
gst_v4lsrc_set_caps (GstBaseSrc * src, GstCaps * caps)
{
GstV4lSrc *v4lsrc;
guint32 fourcc;
gint bpp, depth, w, h, palette = -1;
const GValue *new_fps;
gint cur_fps_n, cur_fps_d;
GstStructure *structure;
struct video_window *vwin;
v4lsrc = GST_V4LSRC (src);
vwin = &GST_V4LELEMENT (v4lsrc)->vwin;
/* if we're not open, punt -- we'll get setcaps'd later via negotiate */
if (!GST_V4L_IS_OPEN (v4lsrc))
return FALSE;
/* make sure we stop capturing and dealloc buffers */
if (GST_V4L_IS_ACTIVE (v4lsrc)) {
if (!gst_v4lsrc_capture_stop (v4lsrc))
return FALSE;
if (!gst_v4lsrc_capture_deinit (v4lsrc))
return FALSE;
}
/* it's fixed, one struct */
structure = gst_caps_get_structure (caps, 0);
if (strcmp (gst_structure_get_name (structure), "video/x-raw-yuv") == 0)
gst_structure_get_fourcc (structure, "format", &fourcc);
else
fourcc = GST_MAKE_FOURCC ('R', 'G', 'B', ' ');
gst_structure_get_int (structure, "width", &w);
gst_structure_get_int (structure, "height", &h);
new_fps = gst_structure_get_value (structure, "framerate");
/* set framerate if it's not already correct */
if (!gst_v4lsrc_get_fps (v4lsrc, &cur_fps_n, &cur_fps_d))
return FALSE;
if (new_fps) {
GST_DEBUG_OBJECT (v4lsrc, "linking with %dx%d at %d/%d fps", w, h,
gst_value_get_fraction_numerator (new_fps),
gst_value_get_fraction_denominator (new_fps));
if (gst_value_get_fraction_numerator (new_fps) != cur_fps_n ||
gst_value_get_fraction_denominator (new_fps) != cur_fps_d) {
int fps_index = (gst_value_get_fraction_numerator (new_fps) * 16) /
(gst_value_get_fraction_denominator (new_fps) * 15);
GST_DEBUG_OBJECT (v4lsrc, "Trying to set fps index %d", fps_index);
/* set bits 16 to 21 to 0 */
vwin->flags &= (0x3F00 - 1);
/* set bits 16 to 21 to the index */
vwin->flags |= fps_index << 16;
if (!gst_v4l_set_window_properties (GST_V4LELEMENT (v4lsrc))) {
return FALSE;
}
}
}
switch (fourcc) {
case GST_MAKE_FOURCC ('I', '4', '2', '0'):
palette = VIDEO_PALETTE_YUV420P;
v4lsrc->buffer_size = ((w + 1) & ~1) * ((h + 1) & ~1) * 1.5;
break;
case GST_MAKE_FOURCC ('Y', 'U', 'Y', '2'):
palette = VIDEO_PALETTE_YUV422;
v4lsrc->buffer_size = ((w + 1) & ~1) * h * 2;
break;
case GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y'):
palette = VIDEO_PALETTE_UYVY;
v4lsrc->buffer_size = ((w + 1) & ~1) * h * 2;
break;
case GST_MAKE_FOURCC ('Y', '4', '1', 'B'):
palette = VIDEO_PALETTE_YUV411P;
v4lsrc->buffer_size = ((w + 3) & ~3) * h * 1.5;
break;
case GST_MAKE_FOURCC ('Y', '4', '1', 'P'):
palette = VIDEO_PALETTE_YUV411;
v4lsrc->buffer_size = ((w + 3) & ~3) * h * 1.5;
break;
case GST_MAKE_FOURCC ('Y', 'U', 'V', '9'):
palette = VIDEO_PALETTE_YUV410P;
v4lsrc->buffer_size = ((w + 3) & ~3) * ((h + 3) & ~3) * 1.125;
break;
case GST_MAKE_FOURCC ('Y', '4', '2', 'B'):
palette = VIDEO_PALETTE_YUV422P;
v4lsrc->buffer_size = ((w + 1) & ~1) * h * 2;
break;
case GST_MAKE_FOURCC ('R', 'G', 'B', ' '):
gst_structure_get_int (structure, "depth", &depth);
switch (depth) {
case 15:
palette = VIDEO_PALETTE_RGB555;
v4lsrc->buffer_size = w * h * 2;
break;
case 16:
palette = VIDEO_PALETTE_RGB565;
v4lsrc->buffer_size = w * h * 2;
break;
case 24:
gst_structure_get_int (structure, "bpp", &bpp);
switch (bpp) {
case 24:
palette = VIDEO_PALETTE_RGB24;
v4lsrc->buffer_size = w * h * 3;
break;
case 32:
palette = VIDEO_PALETTE_RGB32;
v4lsrc->buffer_size = w * h * 4;
break;
default:
break;
}
break;
default:
break;
}
break;
default:
break;
}
if (palette == -1) {
GST_WARNING_OBJECT (v4lsrc, "palette for fourcc %" GST_FOURCC_FORMAT
" is -1, refusing link", GST_FOURCC_ARGS (fourcc));
return FALSE;
}
GST_DEBUG_OBJECT (v4lsrc, "trying to set_capture %dx%d, palette %d",
w, h, palette);
/* this only fills in v4lsrc->mmap values */
if (!gst_v4lsrc_set_capture (v4lsrc, w, h, palette)) {
GST_WARNING_OBJECT (v4lsrc, "could not set_capture %dx%d, palette %d",
w, h, palette);
return FALSE;
}
/* first try the negotiated settings using try_capture */
if (!gst_v4lsrc_try_capture (v4lsrc, w, h, palette)) {
GST_DEBUG_OBJECT (v4lsrc, "failed trying palette %d for %dx%d", palette,
w, h);
return FALSE;
}
if (!gst_v4lsrc_capture_init (v4lsrc))
return FALSE;
if (!gst_v4lsrc_capture_start (v4lsrc))
return FALSE;
return TRUE;
}
static gboolean
gst_v4lsrc_query (GstBaseSrc * bsrc, GstQuery * query)
{
GstV4lSrc *v4lsrc;
gboolean res = FALSE;
v4lsrc = GST_V4LSRC (bsrc);
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:
{
GstClockTime min_latency, max_latency;
gint fps_n, fps_d;
/* device must be open */
if (!GST_V4L_IS_OPEN (v4lsrc))
goto done;
/* we must have a framerate */
if (!(res = gst_v4lsrc_get_fps (v4lsrc, &fps_n, &fps_d)))
goto done;
/* min latency is the time to capture one frame */
min_latency = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
/* max latency is total duration of the frame buffer */
max_latency = v4lsrc->mbuf.frames * min_latency;
GST_DEBUG_OBJECT (bsrc,
"report latency min %" GST_TIME_FORMAT " max %" GST_TIME_FORMAT,
GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
/* we are always live, the min latency is 1 frame and the max latency is
* the complete buffer of frames. */
gst_query_set_latency (query, TRUE, min_latency, max_latency);
res = TRUE;
break;
}
default:
res = GST_BASE_SRC_CLASS (parent_class)->query (bsrc, query);
break;
}
done:
return res;
}
/* start and stop are not symmetric -- start will open the device, but not start
capture. it's setcaps that will start capture, which is called via basesrc's
negotiate method. stop will both stop capture and close the device.
*/
static gboolean
gst_v4lsrc_start (GstBaseSrc * src)
{
GstV4lSrc *v4lsrc = GST_V4LSRC (src);
v4lsrc->offset = 0;
return TRUE;
}
static gboolean
gst_v4lsrc_stop (GstBaseSrc * src)
{
GstV4lSrc *v4lsrc = GST_V4LSRC (src);
if (GST_V4L_IS_ACTIVE (v4lsrc) && !gst_v4lsrc_capture_stop (v4lsrc))
return FALSE;
if (GST_V4LELEMENT (v4lsrc)->buffer != NULL) {
if (!gst_v4lsrc_capture_deinit (v4lsrc))
return FALSE;
}
g_list_free (v4lsrc->colorspaces);
v4lsrc->colorspaces = NULL;
if (v4lsrc->fps_list) {
g_value_unset (v4lsrc->fps_list);
g_free (v4lsrc->fps_list);
v4lsrc->fps_list = NULL;
}
return TRUE;
}
static GstFlowReturn
gst_v4lsrc_create (GstPushSrc * src, GstBuffer ** buf)
{
GstV4lSrc *v4lsrc;
gint num;
v4lsrc = GST_V4LSRC (src);
/* grab a frame from the device */
if (!gst_v4lsrc_grab_frame (v4lsrc, &num))
return GST_FLOW_ERROR;
*buf = gst_v4lsrc_buffer_new (v4lsrc, num);
if (v4lsrc->copy_mode) {
GstBuffer *copy = gst_buffer_copy (*buf);
gst_buffer_unref (*buf);
*buf = copy;
}
return GST_FLOW_OK;
}

View file

@ -1,108 +0,0 @@
/* GStreamer
*
* gstv4lsrc.h: BT8x8/V4L video source element
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4LSRC_H__
#define __GST_V4LSRC_H__
#include <gstv4lelement.h>
G_BEGIN_DECLS
#define GST_TYPE_V4LSRC \
(gst_v4lsrc_get_type())
#define GST_V4LSRC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_V4LSRC,GstV4lSrc))
#define GST_V4LSRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_V4LSRC,GstV4lSrcClass))
#define GST_IS_V4LSRC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_V4LSRC))
#define GST_IS_V4LSRC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4LSRC))
typedef struct _GstV4lSrc GstV4lSrc;
typedef struct _GstV4lSrcClass GstV4lSrcClass;
enum
{
QUEUE_STATE_ERROR = -1,
QUEUE_STATE_READY_FOR_QUEUE, /* the frame is ready to be queued for capture */
QUEUE_STATE_QUEUED, /* the frame is queued for capture */
QUEUE_STATE_SYNCED /* the frame is captured */
};
struct _GstV4lSrc
{
GstV4lElement v4lelement;
/* pads */
GstPad *srcpad;
/* capture/buffer info */
struct video_mmap mmap;
struct video_mbuf mbuf;
guint buffer_size;
GstClockTime timestamp_sync;
/* num of queued frames and some GThread stuff
* to wait if there's not enough */
gint8 *frame_queue_state;
GMutex *mutex_queue_state;
GCond *cond_queue_state;
gint num_queued;
gint sync_frame, queue_frame;
gboolean is_capturing;
GstClockTimeDiff timestamp_offset;
/* True if we want to stop */
gboolean quit;
gint offset;
/* list of supported colorspaces (as integers) */
GList *colorspaces;
gboolean autoprobe; /* probe features on startup ? */
gboolean autoprobe_fps; /* probe fps on startup ? */
gboolean copy_mode;
GValue *fps_list; /* list of fps probed */
};
struct _GstV4lSrcClass
{
GstV4lElementClass parent_class;
};
GType gst_v4lsrc_get_type (void);
G_END_DECLS
#endif /* __GST_V4LSRC_H__ */

View file

@ -1,328 +0,0 @@
/* GStreamer
*
* gstv4ltuner.c: tuner interface implementation for V4L
*
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include "gstv4ltuner.h"
#include "gstv4lelement.h"
#include "v4l_calls.h"
static void gst_v4l_tuner_channel_class_init (GstV4lTunerChannelClass * klass);
static void gst_v4l_tuner_channel_init (GstV4lTunerChannel * channel);
static void gst_v4l_tuner_norm_class_init (GstV4lTunerNormClass * klass);
static void gst_v4l_tuner_norm_init (GstV4lTunerNorm * norm);
static const GList *gst_v4l_tuner_list_channels (GstTuner * tuner);
static void gst_v4l_tuner_set_channel (GstTuner * tuner,
GstTunerChannel * channel);
static GstTunerChannel *gst_v4l_tuner_get_channel (GstTuner * tuner);
static const GList *gst_v4l_tuner_list_norms (GstTuner * tuner);
static void gst_v4l_tuner_set_norm (GstTuner * tuner, GstTunerNorm * norm);
static GstTunerNorm *gst_v4l_tuner_get_norm (GstTuner * tuner);
static void gst_v4l_tuner_set_frequency (GstTuner * tuner,
GstTunerChannel * channel, gulong frequency);
static gulong gst_v4l_tuner_get_frequency (GstTuner * tuner,
GstTunerChannel * channel);
static gint gst_v4l_tuner_signal_strength (GstTuner * tuner,
GstTunerChannel * channel);
static GstTunerNormClass *norm_parent_class = NULL;
static GstTunerChannelClass *channel_parent_class = NULL;
GType
gst_v4l_tuner_channel_get_type (void)
{
static GType gst_v4l_tuner_channel_type = 0;
if (!gst_v4l_tuner_channel_type) {
static const GTypeInfo v4l_tuner_channel_info = {
sizeof (GstV4lTunerChannelClass),
NULL,
NULL,
(GClassInitFunc) gst_v4l_tuner_channel_class_init,
NULL,
NULL,
sizeof (GstV4lTunerChannel),
0,
(GInstanceInitFunc) gst_v4l_tuner_channel_init,
NULL
};
gst_v4l_tuner_channel_type =
g_type_register_static (GST_TYPE_TUNER_CHANNEL,
"GstV4lTunerChannel", &v4l_tuner_channel_info, 0);
}
return gst_v4l_tuner_channel_type;
}
static void
gst_v4l_tuner_channel_class_init (GstV4lTunerChannelClass * klass)
{
channel_parent_class = g_type_class_peek_parent (klass);
}
static void
gst_v4l_tuner_channel_init (GstV4lTunerChannel * channel)
{
channel->index = 0;
channel->audio = 0;
channel->tuner = 0;
}
GType
gst_v4l_tuner_norm_get_type (void)
{
static GType gst_v4l_tuner_norm_type = 0;
if (!gst_v4l_tuner_norm_type) {
static const GTypeInfo v4l_tuner_norm_info = {
sizeof (GstV4lTunerNormClass),
NULL,
NULL,
(GClassInitFunc) gst_v4l_tuner_norm_class_init,
NULL,
NULL,
sizeof (GstV4lTunerNorm),
0,
(GInstanceInitFunc) gst_v4l_tuner_norm_init,
NULL
};
gst_v4l_tuner_norm_type =
g_type_register_static (GST_TYPE_TUNER_NORM,
"GstV4lTunerNorm", &v4l_tuner_norm_info, 0);
}
return gst_v4l_tuner_norm_type;
}
static void
gst_v4l_tuner_norm_class_init (GstV4lTunerNormClass * klass)
{
norm_parent_class = g_type_class_peek_parent (klass);
}
static void
gst_v4l_tuner_norm_init (GstV4lTunerNorm * norm)
{
norm->index = 0;
}
void
gst_v4l_tuner_interface_init (GstTunerClass * klass)
{
/* default virtual functions */
klass->list_channels = gst_v4l_tuner_list_channels;
klass->set_channel = gst_v4l_tuner_set_channel;
klass->get_channel = gst_v4l_tuner_get_channel;
klass->list_norms = gst_v4l_tuner_list_norms;
klass->set_norm = gst_v4l_tuner_set_norm;
klass->get_norm = gst_v4l_tuner_get_norm;
klass->set_frequency = gst_v4l_tuner_set_frequency;
klass->get_frequency = gst_v4l_tuner_get_frequency;
klass->signal_strength = gst_v4l_tuner_signal_strength;
}
static G_GNUC_UNUSED gboolean
gst_v4l_tuner_contains_channel (GstV4lElement * v4lelement,
GstV4lTunerChannel * v4lchannel)
{
const GList *item;
for (item = v4lelement->channels; item != NULL; item = item->next)
if (item->data == v4lchannel)
return TRUE;
return FALSE;
}
static const GList *
gst_v4l_tuner_list_channels (GstTuner * tuner)
{
return GST_V4LELEMENT (tuner)->channels;
}
static void
gst_v4l_tuner_set_channel (GstTuner * tuner, GstTunerChannel * channel)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (tuner);
GstV4lTunerChannel *v4lchannel = GST_V4L_TUNER_CHANNEL (channel);
gint norm;
/* assert that we're opened and that we're using a known item */
g_return_if_fail (GST_V4L_IS_OPEN (v4lelement));
g_return_if_fail (gst_v4l_tuner_contains_channel (v4lelement, v4lchannel));
gst_v4l_get_chan_norm (v4lelement, NULL, &norm);
gst_v4l_set_chan_norm (v4lelement, v4lchannel->index, norm);
}
static GstTunerChannel *
gst_v4l_tuner_get_channel (GstTuner * tuner)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (tuner);
GList *item;
gint channel;
/* assert that we're opened */
g_return_val_if_fail (GST_V4L_IS_OPEN (v4lelement), NULL);
gst_v4l_get_chan_norm (v4lelement, &channel, NULL);
for (item = v4lelement->channels; item != NULL; item = item->next) {
if (channel == GST_V4L_TUNER_CHANNEL (item->data)->index)
return GST_TUNER_CHANNEL (item->data);
}
return NULL;
}
static G_GNUC_UNUSED gboolean
gst_v4l_tuner_contains_norm (GstV4lElement * v4lelement,
GstV4lTunerNorm * v4lnorm)
{
const GList *item;
for (item = v4lelement->norms; item != NULL; item = item->next)
if (item->data == v4lnorm)
return TRUE;
return FALSE;
}
static const GList *
gst_v4l_tuner_list_norms (GstTuner * tuner)
{
return GST_V4LELEMENT (tuner)->norms;
}
static void
gst_v4l_tuner_set_norm (GstTuner * tuner, GstTunerNorm * norm)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (tuner);
GstV4lTunerNorm *v4lnorm = GST_V4L_TUNER_NORM (norm);
gint channel;
/* assert that we're opened and that we're using a known item */
g_return_if_fail (GST_V4L_IS_OPEN (v4lelement));
g_return_if_fail (gst_v4l_tuner_contains_norm (v4lelement, v4lnorm));
gst_v4l_get_chan_norm (v4lelement, &channel, NULL);
gst_v4l_set_chan_norm (v4lelement, channel, v4lnorm->index);
}
static GstTunerNorm *
gst_v4l_tuner_get_norm (GstTuner * tuner)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (tuner);
GList *item;
gint norm;
/* assert that we're opened */
g_return_val_if_fail (GST_V4L_IS_OPEN (v4lelement), NULL);
gst_v4l_get_chan_norm (v4lelement, NULL, &norm);
for (item = v4lelement->norms; item != NULL; item = item->next) {
if (norm == GST_V4L_TUNER_NORM (item->data)->index)
return GST_TUNER_NORM (item->data);
}
return NULL;
}
static void
gst_v4l_tuner_set_frequency (GstTuner * tuner,
GstTunerChannel * channel, gulong frequency)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (tuner);
GstV4lTunerChannel *v4lchannel = GST_V4L_TUNER_CHANNEL (channel);
gint chan;
/* assert that we're opened and that we're using a known item */
g_return_if_fail (GST_V4L_IS_OPEN (v4lelement));
g_return_if_fail (GST_TUNER_CHANNEL_HAS_FLAG (channel,
GST_TUNER_CHANNEL_FREQUENCY));
g_return_if_fail (gst_v4l_tuner_contains_channel (v4lelement, v4lchannel));
gst_v4l_get_chan_norm (v4lelement, &chan, NULL);
if (chan == GST_V4L_TUNER_CHANNEL (channel)->index) {
gst_v4l_set_frequency (v4lelement, v4lchannel->tuner, frequency);
}
}
static gulong
gst_v4l_tuner_get_frequency (GstTuner * tuner, GstTunerChannel * channel)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (tuner);
GstV4lTunerChannel *v4lchannel = GST_V4L_TUNER_CHANNEL (channel);
gint chan;
gulong frequency = 0;
/* assert that we're opened and that we're using a known item */
g_return_val_if_fail (GST_V4L_IS_OPEN (v4lelement), 0);
g_return_val_if_fail (GST_TUNER_CHANNEL_HAS_FLAG (channel,
GST_TUNER_CHANNEL_FREQUENCY), 0);
g_return_val_if_fail (gst_v4l_tuner_contains_channel (v4lelement,
v4lchannel), 0);
gst_v4l_get_chan_norm (v4lelement, &chan, NULL);
if (chan == GST_V4L_TUNER_CHANNEL (channel)->index) {
gst_v4l_get_frequency (v4lelement, v4lchannel->tuner, &frequency);
}
return frequency;
}
static gint
gst_v4l_tuner_signal_strength (GstTuner * tuner, GstTunerChannel * channel)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (tuner);
GstV4lTunerChannel *v4lchannel = GST_V4L_TUNER_CHANNEL (channel);
gint chan;
guint signal = 0;
/* assert that we're opened and that we're using a known item */
g_return_val_if_fail (GST_V4L_IS_OPEN (v4lelement), 0);
g_return_val_if_fail (GST_TUNER_CHANNEL_HAS_FLAG (channel,
GST_TUNER_CHANNEL_FREQUENCY), 0);
g_return_val_if_fail (gst_v4l_tuner_contains_channel (v4lelement,
v4lchannel), 0);
gst_v4l_get_chan_norm (v4lelement, &chan, NULL);
if (chan == GST_V4L_TUNER_CHANNEL (channel)->index &&
GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
gst_v4l_get_signal (v4lelement, v4lchannel->tuner, &signal);
}
return (gint) signal;
}

View file

@ -1,84 +0,0 @@
/* GStreamer
*
* gstv4ltuner.h: tuner interface implementation for V4L
*
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4L_TUNER_H__
#define __GST_V4L_TUNER_H__
#include <gst/gst.h>
#include <gst/interfaces/tuner.h>
G_BEGIN_DECLS
#define GST_TYPE_V4L_TUNER_CHANNEL \
(gst_v4l_tuner_channel_get_type ())
#define GST_V4L_TUNER_CHANNEL(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L_TUNER_CHANNEL, \
GstV4lTunerChannel))
#define GST_V4L_TUNER_CHANNEL_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_V4L_TUNER_CHANNEL, \
GstV4lTunerChannelClass))
#define GST_IS_V4L_TUNER_CHANNEL(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L_TUNER_CHANNEL))
#define GST_IS_V4L_TUNER_CHANNEL_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_V4L_TUNER_CHANNEL))
typedef struct _GstV4lTunerChannel {
GstTunerChannel parent;
gint index;
gint tuner;
gint audio;
} GstV4lTunerChannel;
typedef struct _GstV4lTunerChannelClass {
GstTunerChannelClass parent;
} GstV4lTunerChannelClass;
#define GST_TYPE_V4L_TUNER_NORM \
(gst_v4l_tuner_norm_get_type ())
#define GST_V4L_TUNER_NORM(obj) \
(G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_V4L_TUNER_NORM, \
GstV4lTunerNorm))
#define GST_V4L_TUNER_NORM_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_V4L_TUNER_NORM, \
GstV4lTunerNormClass))
#define GST_IS_V4L_TUNER_NORM(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_V4L_TUNER_NORM))
#define GST_IS_V4L_TUNER_NORM_CLASS(klass) \
(G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_V4L_TUNER_NORM))
typedef struct _GstV4lTunerNorm {
GstTunerNorm parent;
gint index;
} GstV4lTunerNorm;
typedef struct _GstV4lTunerNormClass {
GstTunerNormClass parent;
} GstV4lTunerNormClass;
GType gst_v4l_tuner_channel_get_type (void);
GType gst_v4l_tuner_norm_get_type (void);
void gst_v4l_tuner_interface_init (GstTunerClass *klass);
#endif /* __GST_V4L_TUNER_H__ */

View file

@ -1,246 +0,0 @@
/* GStreamer
*
* gstv4lxoverlay.c: X-based overlay interface implementation for V4L
*
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <string.h>
#include <sys/stat.h>
#include <X11/X.h>
#include <X11/Xlib.h>
#include <X11/extensions/Xv.h>
#include <X11/extensions/Xvlib.h>
#include "gstv4lxoverlay.h"
#include "gstv4lelement.h"
#include "v4l_calls.h"
GST_DEBUG_CATEGORY_STATIC (v4lxv_debug);
#define GST_CAT_DEFAULT v4lxv_debug
struct _GstV4lXv
{
Display *dpy;
gint port, idle_id;
GMutex *mutex;
};
static void gst_v4l_xoverlay_set_window_handle (GstXOverlay * overlay,
guintptr xwindow_id);
void
gst_v4l_xoverlay_interface_init (GstXOverlayClass * klass)
{
/* default virtual functions */
klass->set_window_handle = gst_v4l_xoverlay_set_window_handle;
GST_DEBUG_CATEGORY_INIT (v4lxv_debug, "v4lxv", 0,
"V4L XOverlay interface debugging");
}
static void
gst_v4l_xoverlay_open (GstV4lElement * v4lelement)
{
struct stat s;
GstV4lXv *v4lxv;
const gchar *name = g_getenv ("DISPLAY");
unsigned int ver, rel, req, ev, err, anum;
int i, id = 0, first_id = 0, min;
XvAdaptorInfo *ai;
Display *dpy;
/* we need a display, obviously */
if (!name || !(dpy = XOpenDisplay (name))) {
GST_WARNING ("No $DISPLAY set or failed to open - no overlay");
return;
}
/* First let's check that XVideo extension is available */
if (!XQueryExtension (dpy, "XVideo", &i, &i, &i)) {
GST_WARNING ("Xv extension not available - no overlay");
XCloseDisplay (dpy);
return;
}
/* find port that belongs to this device */
if (XvQueryExtension (dpy, &ver, &rel, &req, &ev, &err) != Success) {
GST_WARNING ("Xv extension not supported - no overlay");
XCloseDisplay (dpy);
return;
}
if (XvQueryAdaptors (dpy, DefaultRootWindow (dpy), &anum, &ai) != Success) {
GST_WARNING ("Failed to query Xv adaptors");
XCloseDisplay (dpy);
return;
}
if (fstat (v4lelement->video_fd, &s) < 0) {
GST_ERROR ("Failed to stat() file descriptor: %s", g_strerror (errno));
XCloseDisplay (dpy);
return;
}
min = s.st_rdev & 0xff;
for (i = 0; i < anum; i++) {
if (!strcmp (ai[i].name, "video4linux")) {
if (first_id == 0)
first_id = ai[i].base_id;
/* hmm... */
if (first_id != 0 && ai[i].base_id == first_id + min)
id = ai[i].base_id;
}
}
XvFreeAdaptorInfo (ai);
if (id == 0) {
GST_WARNING ("Did not find XvPortID for device - no overlay");
XCloseDisplay (dpy);
return;
}
v4lxv = g_new0 (GstV4lXv, 1);
v4lxv->dpy = dpy;
v4lxv->port = id;
v4lxv->mutex = g_mutex_new ();
v4lxv->idle_id = 0;
v4lelement->xv = v4lxv;
if (v4lelement->xwindow_id) {
gst_v4l_xoverlay_set_window_handle (GST_X_OVERLAY (v4lelement),
v4lelement->xwindow_id);
}
}
static void
gst_v4l_xoverlay_close (GstV4lElement * v4lelement)
{
GstV4lXv *v4lxv = v4lelement->xv;
if (!v4lelement->xv)
return;
if (v4lelement->xwindow_id) {
gst_v4l_xoverlay_set_window_handle (GST_X_OVERLAY (v4lelement), 0);
}
XCloseDisplay (v4lxv->dpy);
g_mutex_free (v4lxv->mutex);
if (v4lxv->idle_id)
g_source_remove (v4lxv->idle_id);
g_free (v4lxv);
v4lelement->xv = NULL;
}
void
gst_v4l_xoverlay_start (GstV4lElement * v4lelement)
{
if (v4lelement->xwindow_id) {
gst_v4l_xoverlay_open (v4lelement);
}
}
void
gst_v4l_xoverlay_stop (GstV4lElement * v4lelement)
{
gst_v4l_xoverlay_close (v4lelement);
}
static gboolean
idle_refresh (gpointer data)
{
GstV4lElement *v4lelement = GST_V4LELEMENT (data);
GstV4lXv *v4lxv = v4lelement->xv;
XWindowAttributes attr;
if (v4lxv) {
g_mutex_lock (v4lxv->mutex);
XGetWindowAttributes (v4lxv->dpy, v4lelement->xwindow_id, &attr);
XvPutVideo (v4lxv->dpy, v4lxv->port, v4lelement->xwindow_id,
DefaultGC (v4lxv->dpy, DefaultScreen (v4lxv->dpy)),
0, 0, attr.width, attr.height, 0, 0, attr.width, attr.height);
v4lxv->idle_id = 0;
g_mutex_unlock (v4lxv->mutex);
}
/* once */
return FALSE;
}
static void
gst_v4l_xoverlay_set_window_handle (GstXOverlay * overlay, guintptr id)
{
XID xwindow_id = id;
GstV4lElement *v4lelement = GST_V4LELEMENT (overlay);
GstV4lXv *v4lxv;
XWindowAttributes attr;
gboolean change = (v4lelement->xwindow_id != xwindow_id);
GST_LOG_OBJECT (v4lelement, "Changing port to %lx", xwindow_id);
if (!v4lelement->xv && GST_V4L_IS_OPEN (v4lelement))
gst_v4l_xoverlay_open (v4lelement);
v4lxv = v4lelement->xv;
if (v4lxv)
g_mutex_lock (v4lxv->mutex);
if (change) {
if (v4lelement->xwindow_id && v4lxv) {
GST_DEBUG_OBJECT (v4lelement,
"Disabling port %lx", v4lelement->xwindow_id);
XvSelectPortNotify (v4lxv->dpy, v4lxv->port, 0);
XvSelectVideoNotify (v4lxv->dpy, v4lelement->xwindow_id, 0);
XvStopVideo (v4lxv->dpy, v4lxv->port, v4lelement->xwindow_id);
}
v4lelement->xwindow_id = xwindow_id;
}
if (!v4lxv || xwindow_id == 0) {
if (v4lxv)
g_mutex_unlock (v4lxv->mutex);
return;
}
if (change) {
GST_DEBUG_OBJECT (v4lelement, "Enabling port %lx", xwindow_id);
/* draw */
XvSelectPortNotify (v4lxv->dpy, v4lxv->port, 1);
XvSelectVideoNotify (v4lxv->dpy, v4lelement->xwindow_id, 1);
}
XGetWindowAttributes (v4lxv->dpy, v4lelement->xwindow_id, &attr);
XvPutVideo (v4lxv->dpy, v4lxv->port, v4lelement->xwindow_id,
DefaultGC (v4lxv->dpy, DefaultScreen (v4lxv->dpy)),
0, 0, attr.width, attr.height, 0, 0, attr.width, attr.height);
if (v4lxv->idle_id)
g_source_remove (v4lxv->idle_id);
v4lxv->idle_id = g_idle_add (idle_refresh, v4lelement);
g_mutex_unlock (v4lxv->mutex);
}

View file

@ -1,40 +0,0 @@
/* GStreamer
*
* gstv4lxoverlay.h: tv mixer interface implementation for V4L
*
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __GST_V4L_X_OVERLAY_H__
#define __GST_V4L_X_OVERLAY_H__
#include <gst/gst.h>
#include <gst/interfaces/xoverlay.h>
#include "gstv4lelement.h"
G_BEGIN_DECLS
void gst_v4l_xoverlay_interface_init (GstXOverlayClass *klass);
void gst_v4l_xoverlay_start (GstV4lElement * v4lelement);
void gst_v4l_xoverlay_stop (GstV4lElement * v4lelement);
G_END_DECLS
#endif /* __GST_V4L_X_OVERLAY_H__ */

View file

@ -1,724 +0,0 @@
/* GStreamer
*
* v4l_calls.c: generic V4L calls
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <string.h>
#include <errno.h>
#include <unistd.h>
#include <gst/gst.h>
#include <gst/interfaces/tuner.h>
#include <gst/interfaces/colorbalance.h>
#include "v4l_calls.h"
#include "gstv4ltuner.h"
#include "gstv4lcolorbalance.h"
#include "gstv4lsrc.h"
/* #include "gstv4lmjpegsrc.h" */
/* #include "gstv4lmjpegsink.h" */
GST_DEBUG_CATEGORY_EXTERN (v4l_debug);
#define GST_CAT_DEFAULT v4l_debug
static const char *picture_name[] = {
"Hue",
"Brightness",
"Contrast",
"Saturation",
NULL
};
G_GNUC_UNUSED static const char *audio_name[] = {
"Volume",
"Mute",
"Mode",
NULL
};
static const char *norm_name[] = {
"PAL",
"NTSC",
"SECAM",
NULL
};
/******************************************************
* gst_v4l_get_capabilities():
* get the device's capturing capabilities
* sets v4lelement->vcap and v4lelement->vwin
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_get_capabilities (GstV4lElement * v4lelement)
{
GST_DEBUG_OBJECT (v4lelement, "getting capabilities");
GST_V4L_CHECK_OPEN (v4lelement);
if (ioctl (v4lelement->video_fd, VIDIOCGCAP, &(v4lelement->vcap)) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("error getting capabilities %s of from device %s",
g_strerror (errno), v4lelement->videodev));
return FALSE;
}
if (ioctl (v4lelement->video_fd, VIDIOCGWIN, &(v4lelement->vwin)) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("error getting window properties %s of from device %s",
g_strerror (errno), v4lelement->videodev));
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4l_set_window_properties():
* set the device's capturing parameters (vwin)
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_set_window_properties (GstV4lElement * v4lelement)
{
struct video_window vwin;
GST_DEBUG_OBJECT (v4lelement, "setting window flags 0x%x to device %s",
v4lelement->vwin.flags, v4lelement->videodev);
GST_V4L_CHECK_OPEN (v4lelement);
if (ioctl (v4lelement->video_fd, VIDIOCSWIN, &(v4lelement->vwin)) < 0) {
GST_DEBUG_OBJECT (v4lelement,
"could not ioctl window properties 0x%x to device %s",
v4lelement->vwin.flags, v4lelement->videodev);
return FALSE;
}
/* get it again to make sure we have it correctly */
if (ioctl (v4lelement->video_fd, VIDIOCGWIN, &(vwin)) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("error getting window properties %s of from device %s",
g_strerror (errno), v4lelement->videodev));
return FALSE;
}
if (vwin.flags != v4lelement->vwin.flags) {
GST_DEBUG_OBJECT (v4lelement, "set 0x%x but got 0x%x back",
v4lelement->vwin.flags, vwin.flags);
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4l_open():
* open the video device (v4lelement->videodev)
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_open (GstV4lElement * v4lelement)
{
int num;
GST_DEBUG_OBJECT (v4lelement, "opening device %s", v4lelement->videodev);
GST_V4L_CHECK_NOT_OPEN (v4lelement);
GST_V4L_CHECK_NOT_ACTIVE (v4lelement);
/* be sure we have a device */
if (!v4lelement->videodev) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, NOT_FOUND,
(_("No device specified.")), (NULL));
return FALSE;
}
/* open the device */
v4lelement->video_fd = open (v4lelement->videodev, O_RDWR);
if (!GST_V4L_IS_OPEN (v4lelement)) {
if (errno == ENODEV || errno == ENOENT) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, NOT_FOUND,
(_("Device \"%s\" does not exist."), v4lelement->videodev), (NULL));
return FALSE;
}
if (errno == EBUSY) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, BUSY,
(_("Device \"%s\" is already being used."), v4lelement->videodev),
(NULL));
return FALSE;
}
GST_ELEMENT_ERROR (v4lelement, RESOURCE, OPEN_READ_WRITE,
(_("Could not open device \"%s\" for reading and writing."),
v4lelement->videodev), GST_ERROR_SYSTEM);
return FALSE;
}
/* get capabilities */
if (!gst_v4l_get_capabilities (v4lelement)) {
close (v4lelement->video_fd);
v4lelement->video_fd = -1;
return FALSE;
}
/* device type check */
if ((GST_IS_V4LSRC (v4lelement) &&
!(v4lelement->vcap.type & VID_TYPE_CAPTURE))) {
/* (GST_IS_V4LMJPEGSRC (v4lelement) && */
/* !(v4lelement->vcap.type & VID_TYPE_MJPEG_ENCODER)) || */
/* (GST_IS_V4LMJPEGSINK (v4lelement) && */
/* !(v4lelement->vcap.type & VID_TYPE_MJPEG_DECODER))) { */
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Device opened, but wrong type (0x%x)", v4lelement->vcap.type));
close (v4lelement->video_fd);
v4lelement->video_fd = -1;
return FALSE;
}
GST_INFO_OBJECT (v4lelement, "Opened device \'%s\' (\'%s\') successfully",
v4lelement->vcap.name, v4lelement->videodev);
/* norms + inputs, for the tuner interface */
for (num = 0; norm_name[num] != NULL; num++) {
GstV4lTunerNorm *v4lnorm = g_object_new (GST_TYPE_V4L_TUNER_NORM,
NULL);
GstTunerNorm *norm = GST_TUNER_NORM (v4lnorm);
norm->label = g_strdup (norm_name[num]);
if (num == 1)
gst_value_set_fraction (&norm->framerate, 30000, 1001);
else
gst_value_set_fraction (&norm->framerate, 25, 1);
v4lnorm->index = num;
v4lelement->norms = g_list_append (v4lelement->norms, (gpointer) norm);
}
v4lelement->channels = gst_v4l_get_chan_names (v4lelement);
for (num = 0; picture_name[num] != NULL; num++) {
GstV4lColorBalanceChannel *v4lchannel =
g_object_new (GST_TYPE_V4L_COLOR_BALANCE_CHANNEL, NULL);
GstColorBalanceChannel *channel = GST_COLOR_BALANCE_CHANNEL (v4lchannel);
channel->label = g_strdup (picture_name[num]);
channel->min_value = 0;
channel->max_value = 65535;
v4lchannel->index = num;
v4lelement->colors = g_list_append (v4lelement->colors, channel);
}
GST_DEBUG_OBJECT (v4lelement, "Setting default norm/input");
gst_v4l_set_chan_norm (v4lelement, 0, 0);
return TRUE;
}
/******************************************************
* gst_v4l_close():
* close the video device (v4lelement->video_fd)
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_close (GstV4lElement * v4lelement)
{
GST_DEBUG_OBJECT (v4lelement, "closing device");
GST_V4L_CHECK_OPEN (v4lelement);
GST_V4L_CHECK_NOT_ACTIVE (v4lelement);
close (v4lelement->video_fd);
v4lelement->video_fd = -1;
g_list_foreach (v4lelement->channels, (GFunc) g_object_unref, NULL);
g_list_free (v4lelement->channels);
v4lelement->channels = NULL;
g_list_foreach (v4lelement->norms, (GFunc) g_object_unref, NULL);
g_list_free (v4lelement->norms);
v4lelement->norms = NULL;
g_list_foreach (v4lelement->colors, (GFunc) g_object_unref, NULL);
g_list_free (v4lelement->colors);
v4lelement->colors = NULL;
return TRUE;
}
/******************************************************
* gst_v4l_get_num_chans()
* return value: the number of video input channels
******************************************************/
static gint
gst_v4l_get_num_chans (GstV4lElement * v4lelement)
{
GST_DEBUG_OBJECT (v4lelement, "getting number of channels");
GST_V4L_CHECK_OPEN (v4lelement);
return v4lelement->vcap.channels;
}
/******************************************************
* gst_v4l_get_chan_names()
* return value: a GList containing the channel names
******************************************************/
GList *
gst_v4l_get_chan_names (GstV4lElement * v4lelement)
{
struct video_channel vchan = { 0 };
GList *list = NULL;
gint i;
GST_DEBUG_OBJECT (v4lelement, "getting channel names");
if (!GST_V4L_IS_OPEN (v4lelement))
return NULL;
for (i = 0; i < gst_v4l_get_num_chans (v4lelement); i++) {
GstV4lTunerChannel *v4lchannel;
GstTunerChannel *channel;
vchan.channel = i;
if (ioctl (v4lelement->video_fd, VIDIOCGCHAN, &vchan) < 0) {
/* Skip this channel */
continue;
}
v4lchannel = g_object_new (GST_TYPE_V4L_TUNER_CHANNEL, NULL);
v4lchannel->index = i;
channel = GST_TUNER_CHANNEL (v4lchannel);
channel->label = g_strdup (vchan.name);
channel->flags = GST_TUNER_CHANNEL_INPUT;
if (vchan.flags & VIDEO_VC_TUNER) {
struct video_tuner vtun;
gint n;
for (n = 0; n < vchan.tuners; n++) {
vtun.tuner = n;
if (ioctl (v4lelement->video_fd, VIDIOCGTUNER, &vtun) < 0)
continue; /* no more tuners */
if (strcmp (vtun.name, vchan.name) != 0) {
continue; /* not this one */
}
v4lchannel->tuner = n;
channel->flags |= GST_TUNER_CHANNEL_FREQUENCY;
channel->freq_multiplicator =
62.5 * ((vtun.flags & VIDEO_TUNER_LOW) ? 1 : 1000);
channel->min_frequency = vtun.rangelow * channel->freq_multiplicator;
channel->max_frequency = vtun.rangehigh * channel->freq_multiplicator;
channel->min_signal = 0;
channel->max_signal = 0xffff;
break;
}
}
if (vchan.flags & VIDEO_VC_AUDIO) {
struct video_audio vaud;
gint n;
for (n = 0; n < v4lelement->vcap.audios; n++) {
vaud.audio = n;
if (ioctl (v4lelement->video_fd, VIDIOCGAUDIO, &vaud) < 0)
continue;
if (!strcmp (vaud.name, vchan.name)) {
v4lchannel->audio = n;
channel->flags |= GST_TUNER_CHANNEL_AUDIO;
break;
}
}
}
list = g_list_prepend (list, (gpointer) channel);
}
return g_list_reverse (list);
}
/******************************************************
* gst_v4l_get_chan_norm():
* get the currently active video-channel and it's
* norm (VIDEO_MODE_{PAL|NTSC|SECAM|AUTO})
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_get_chan_norm (GstV4lElement * v4lelement, gint * channel, gint * norm)
{
GST_DEBUG_OBJECT (v4lelement, "getting current channel and norm");
GST_V4L_CHECK_OPEN (v4lelement);
if (channel)
*channel = v4lelement->vchan.channel;
if (norm)
*norm = v4lelement->vchan.norm;
return TRUE;
}
/******************************************************
* gst_v4l_set_chan_norm():
* set a new active channel and it's norm
* (VIDEO_MODE_{PAL|NTSC|SECAM|AUTO})
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_set_chan_norm (GstV4lElement * v4lelement, gint channel, gint norm)
{
GST_DEBUG_OBJECT (v4lelement, "setting channel = %d, norm = %d (%s)",
channel, norm, norm_name[norm]);
GST_V4L_CHECK_OPEN (v4lelement);
//GST_V4L_CHECK_NOT_ACTIVE (v4lelement);
v4lelement->vchan.channel = channel;
v4lelement->vchan.norm = norm;
if (ioctl (v4lelement->video_fd, VIDIOCSCHAN, &(v4lelement->vchan)) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error setting the channel/norm settings: %s", g_strerror (errno)));
return FALSE;
}
if (ioctl (v4lelement->video_fd, VIDIOCGCHAN, &(v4lelement->vchan)) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting the channel/norm settings: %s", g_strerror (errno)));
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4l_get_signal():
* get the current signal
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_get_signal (GstV4lElement * v4lelement, gint tunernum, guint * signal)
{
struct video_tuner tuner;
GST_DEBUG_OBJECT (v4lelement, "getting tuner signal");
GST_V4L_CHECK_OPEN (v4lelement);
tuner.tuner = tunernum;
if (ioctl (v4lelement->video_fd, VIDIOCGTUNER, &tuner) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting tuner signal: %s", g_strerror (errno)));
return FALSE;
}
*signal = tuner.signal;
return TRUE;
}
/******************************************************
* gst_v4l_get_frequency():
* get the current frequency
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_get_frequency (GstV4lElement * v4lelement,
gint tunernum, gulong * frequency)
{
struct video_tuner vtun;
GstTunerChannel *channel;
GST_DEBUG_OBJECT (v4lelement, "getting tuner frequency");
GST_V4L_CHECK_OPEN (v4lelement);
channel = gst_tuner_get_channel (GST_TUNER (v4lelement));
/* check that this is the current input */
vtun.tuner = tunernum;
if (ioctl (v4lelement->video_fd, VIDIOCGTUNER, &vtun) < 0)
return FALSE;
if (strcmp (vtun.name, v4lelement->vchan.name))
return FALSE;
if (ioctl (v4lelement->video_fd, VIDIOCGFREQ, frequency) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting tuner frequency: %s", g_strerror (errno)));
return FALSE;
}
*frequency = *frequency * channel->freq_multiplicator;
return TRUE;
}
/******************************************************
* gst_v4l_set_frequency():
* set frequency
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_set_frequency (GstV4lElement * v4lelement,
gint tunernum, gulong frequency)
{
struct video_tuner vtun;
GstTunerChannel *channel;
GST_DEBUG_OBJECT (v4lelement, "setting tuner frequency to %lu", frequency);
GST_V4L_CHECK_OPEN (v4lelement);
channel = gst_tuner_get_channel (GST_TUNER (v4lelement));
/* check that this is the current input */
vtun.tuner = tunernum;
if (ioctl (v4lelement->video_fd, VIDIOCGTUNER, &vtun) < 0)
return FALSE;
if (strcmp (vtun.name, v4lelement->vchan.name))
return FALSE;
frequency = frequency / channel->freq_multiplicator;
if (ioctl (v4lelement->video_fd, VIDIOCSFREQ, &frequency) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error setting tuner frequency: %s", g_strerror (errno)));
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4l_get_picture():
* get a picture value
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_get_picture (GstV4lElement * v4lelement,
GstV4lPictureType type, gint * value)
{
struct video_picture vpic;
GST_DEBUG_OBJECT (v4lelement, "getting picture property type %d (%s)", type,
picture_name[type]);
GST_V4L_CHECK_OPEN (v4lelement);
if (ioctl (v4lelement->video_fd, VIDIOCGPICT, &vpic) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting picture parameters: %s", g_strerror (errno)));
return FALSE;
}
switch (type) {
case V4L_PICTURE_HUE:
*value = vpic.hue;
break;
case V4L_PICTURE_BRIGHTNESS:
*value = vpic.brightness;
break;
case V4L_PICTURE_CONTRAST:
*value = vpic.contrast;
break;
case V4L_PICTURE_SATURATION:
*value = vpic.colour;
break;
default:
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting picture parameters: unknown type %d", type));
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4l_set_picture():
* set a picture value
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_set_picture (GstV4lElement * v4lelement,
GstV4lPictureType type, gint value)
{
struct video_picture vpic;
GST_DEBUG_OBJECT (v4lelement, "setting picture type %d (%s) to value %d",
type, picture_name[type], value);
GST_V4L_CHECK_OPEN (v4lelement);
if (ioctl (v4lelement->video_fd, VIDIOCGPICT, &vpic) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting picture parameters: %s", g_strerror (errno)));
return FALSE;
}
switch (type) {
case V4L_PICTURE_HUE:
vpic.hue = value;
break;
case V4L_PICTURE_BRIGHTNESS:
vpic.brightness = value;
break;
case V4L_PICTURE_CONTRAST:
vpic.contrast = value;
break;
case V4L_PICTURE_SATURATION:
vpic.colour = value;
break;
default:
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error setting picture parameters: unknown type %d", type));
return FALSE;
}
if (ioctl (v4lelement->video_fd, VIDIOCSPICT, &vpic) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error setting picture parameters: %s", g_strerror (errno)));
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4l_get_audio():
* get some audio value
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_get_audio (GstV4lElement * v4lelement,
gint audionum, GstV4lAudioType type, gint * value)
{
struct video_audio vau;
GST_DEBUG_OBJECT (v4lelement, "getting audio parameter type %d (%s)", type,
audio_name[type]);
GST_V4L_CHECK_OPEN (v4lelement);
vau.audio = audionum;
if (ioctl (v4lelement->video_fd, VIDIOCGAUDIO, &vau) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting audio parameters: %s", g_strerror (errno)));
return FALSE;
}
switch (type) {
case V4L_AUDIO_MUTE:
*value = (vau.flags & VIDEO_AUDIO_MUTE);
break;
case V4L_AUDIO_VOLUME:
*value = vau.volume;
break;
case V4L_AUDIO_MODE:
*value = vau.mode;
break;
default:
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting audio parameters: unknown type %d", type));
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4l_set_audio():
* set some audio value
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l_set_audio (GstV4lElement * v4lelement,
gint audionum, GstV4lAudioType type, gint value)
{
struct video_audio vau;
GST_DEBUG_OBJECT (v4lelement,
"setting audio parameter type %d (%s) to value %d", type,
audio_name[type], value);
GST_V4L_CHECK_OPEN (v4lelement);
vau.audio = audionum;
if (ioctl (v4lelement->video_fd, VIDIOCGAUDIO, &vau) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error getting audio parameters: %s", g_strerror (errno)));
return FALSE;
}
switch (type) {
case V4L_AUDIO_MUTE:
if (!(vau.flags & VIDEO_AUDIO_MUTABLE)) {
GST_ELEMENT_ERROR (v4lelement, CORE, NOT_IMPLEMENTED, (NULL),
("Error setting audio mute: (un)setting mute is not supported"));
return FALSE;
}
if (value)
vau.flags |= VIDEO_AUDIO_MUTE;
else
vau.flags &= ~VIDEO_AUDIO_MUTE;
break;
case V4L_AUDIO_VOLUME:
if (!(vau.flags & VIDEO_AUDIO_VOLUME)) {
GST_ELEMENT_ERROR (v4lelement, CORE, NOT_IMPLEMENTED, (NULL),
("Error setting audio volume: setting volume is not supported"));
return FALSE;
}
vau.volume = value;
break;
case V4L_AUDIO_MODE:
vau.mode = value;
break;
default:
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error setting audio parameters: unknown type %d", type));
return FALSE;
}
if (ioctl (v4lelement->video_fd, VIDIOCSAUDIO, &vau) < 0) {
GST_ELEMENT_ERROR (v4lelement, RESOURCE, SETTINGS, (NULL),
("Error setting audio parameters: %s", g_strerror (errno)));
return FALSE;
}
return TRUE;
}

View file

@ -1,154 +0,0 @@
/* GStreamer
*
* v4l_calls.h: header for generic V4L calls
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __V4L_CALLS_H__
#define __V4L_CALLS_H__
#include "gstv4lelement.h"
#include "gst/gst-i18n-plugin.h"
G_BEGIN_DECLS
/* simple check whether the device is open */
#define GST_V4L_IS_OPEN(element) \
(GST_V4LELEMENT (element)->video_fd > 0)
/* check whether the device is 'active' */
#define GST_V4L_IS_ACTIVE(element) \
(GST_V4LELEMENT (element)->buffer != NULL)
#define GST_V4L_IS_OVERLAY(element) \
(GST_V4LELEMENT (element)->vcap.type & VID_TYPE_OVERLAY)
/* checks whether the current v4lelement has already been open()'ed or not */
#define GST_V4L_CHECK_OPEN(element) \
if (!GST_V4L_IS_OPEN (element)) \
{ \
GST_ELEMENT_ERROR (element, RESOURCE, TOO_LAZY, \
(_("Device is not open.")), (NULL)); \
return FALSE; \
}
/* checks whether the current v4lelement is close()'ed or whether it is still open */
#define GST_V4L_CHECK_NOT_OPEN(element) \
if (GST_V4L_IS_OPEN (element)) \
{ \
GST_ELEMENT_ERROR (element, RESOURCE, TOO_LAZY, \
(_("Device is open.")), (NULL)); \
return FALSE; \
}
/* checks whether the current v4lelement does video overlay */
#define GST_V4L_CHECK_OVERLAY(element) \
if (!(element->vcap.type & VID_TYPE_OVERLAY)) \
{ \
GST_ELEMENT_ERROR (element, RESOURCE, TOO_LAZY, \
(NULL), ("Device cannot handle overlay")); \
return FALSE; \
}
/* checks whether we're in capture mode or not */
#define GST_V4L_CHECK_ACTIVE(element) \
if (!GST_V4L_IS_ACTIVE (element)) \
{ \
GST_ELEMENT_ERROR (element, RESOURCE, SETTINGS, \
(NULL), ("Device is not in streaming mode")); \
return FALSE; \
}
/* checks whether we're out of capture mode or not */
#define GST_V4L_CHECK_NOT_ACTIVE(element) \
if (GST_V4L_IS_ACTIVE (element)) \
{ \
GST_ELEMENT_ERROR (element, RESOURCE, SETTINGS, \
(NULL), ("Device is in streaming mode")); \
return FALSE; \
}
typedef enum {
V4L_PICTURE_HUE = 0,
V4L_PICTURE_BRIGHTNESS,
V4L_PICTURE_CONTRAST,
V4L_PICTURE_SATURATION,
} GstV4lPictureType;
typedef enum {
V4L_AUDIO_VOLUME = 0,
V4L_AUDIO_MUTE,
V4L_AUDIO_MODE, /* stereo, mono, ... (see videodev.h) */
} GstV4lAudioType;
/* open/close the device */
gboolean gst_v4l_open (GstV4lElement *v4lelement);
gboolean gst_v4l_close (GstV4lElement *v4lelement);
/* norm control (norm = VIDEO_MODE_{PAL|NTSC|SECAM|AUTO}) */
gboolean gst_v4l_get_chan_norm (GstV4lElement *v4lelement,
gint *channel,
gint *norm);
gboolean gst_v4l_set_chan_norm (GstV4lElement *v4lelement,
gint channel,
gint norm);
GList *gst_v4l_get_chan_names (GstV4lElement *v4lelement);
/* frequency control */
gboolean gst_v4l_get_signal (GstV4lElement *v4lelement,
gint tunernum,
guint *signal);
gboolean gst_v4l_get_frequency (GstV4lElement *v4lelement,
gint tunernum,
gulong *frequency);
gboolean gst_v4l_set_frequency (GstV4lElement *v4lelement,
gint tunernum,
gulong frequency);
/* picture control */
gboolean gst_v4l_get_picture (GstV4lElement *v4lelement,
GstV4lPictureType type,
gint *value);
gboolean gst_v4l_set_picture (GstV4lElement *v4lelement,
GstV4lPictureType type,
gint value);
/* audio control */
gboolean gst_v4l_get_audio (GstV4lElement *v4lelement,
gint audionum,
GstV4lAudioType type,
gint *value);
gboolean gst_v4l_set_audio (GstV4lElement *v4lelement,
gint audionum,
GstV4lAudioType type,
gint value);
/* functions that v4lsrc needs */
gboolean gst_v4l_set_window_properties (GstV4lElement * v4lelement);
gboolean gst_v4l_get_capabilities (GstV4lElement * v4lelement);
G_END_DECLS
#endif /* __V4L_CALLS_H__ */

View file

@ -1,525 +0,0 @@
/* GStreamer
*
* v4lmjpegsink_calls.c: functions for hardware MJPEG video sink
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <string.h>
#include <errno.h>
#include "v4lmjpegsink_calls.h"
/* On some systems MAP_FAILED seems to be missing */
#ifndef MAP_FAILED
#define MAP_FAILED ( (caddr_t) -1 )
#endif
GST_DEBUG_CATEGORY_EXTERN (v4lmjpegsink_debug);
#define GST_CAT_DEFAULT v4lmjpegsink_debug
/******************************************************
* gst_v4lmjpegsink_sync_thread()
* thread keeps track of played frames
******************************************************/
static void *
gst_v4lmjpegsink_sync_thread (void *arg)
{
GstV4lMjpegSink *v4lmjpegsink = GST_V4LMJPEGSINK (arg);
gint frame = 0; /* frame that we're currently syncing on */
GST_DEBUG_OBJECT (v4lmjpegsink, "starting sync thread");
#if 0
/* Allow easy shutting down by other processes... */
pthread_setcancelstate (PTHREAD_CANCEL_ENABLE, NULL);
pthread_setcanceltype (PTHREAD_CANCEL_ASYNCHRONOUS, NULL);
#endif
while (1) {
g_mutex_lock (v4lmjpegsink->mutex_queued_frames);
if (!v4lmjpegsink->isqueued_queued_frames[frame]) {
g_cond_wait (v4lmjpegsink->cond_queued_frames[frame],
v4lmjpegsink->mutex_queued_frames);
}
if (v4lmjpegsink->isqueued_queued_frames[frame] != 1) {
g_mutex_unlock (v4lmjpegsink->mutex_queued_frames);
goto end;
}
g_mutex_unlock (v4lmjpegsink->mutex_queued_frames);
GST_DEBUG_OBJECT (v4lmjpegsink, "thread-syncing on next frame");
if (ioctl (GST_V4LELEMENT (v4lmjpegsink)->video_fd, MJPIOC_SYNC,
&(v4lmjpegsink->bsync)) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, SYNC, (NULL),
("Failed to sync on frame %d: %s", frame, g_strerror (errno)));
g_mutex_lock (v4lmjpegsink->mutex_queued_frames);
v4lmjpegsink->isqueued_queued_frames[frame] = -1;
g_cond_broadcast (v4lmjpegsink->cond_queued_frames[frame]);
g_mutex_unlock (v4lmjpegsink->mutex_queued_frames);
goto end;
} else {
/* be sure that we're not confusing */
if (frame != v4lmjpegsink->bsync.frame) {
GST_ELEMENT_ERROR (v4lmjpegsink, CORE, TOO_LAZY, (NULL),
("Internal error: frame number confusion"));
goto end;
}
g_mutex_lock (v4lmjpegsink->mutex_queued_frames);
v4lmjpegsink->isqueued_queued_frames[frame] = 0;
g_cond_broadcast (v4lmjpegsink->cond_queued_frames[frame]);
g_mutex_unlock (v4lmjpegsink->mutex_queued_frames);
}
frame = (frame + 1) % v4lmjpegsink->breq.count;
}
end:
GST_DEBUG_OBJECT (v4lmjpegsink, "Sync thread got signalled to exit");
g_thread_exit (NULL);
return NULL;
}
/******************************************************
* gst_v4lmjpegsink_queue_frame()
* queue a frame for playback
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4lmjpegsink_queue_frame (GstV4lMjpegSink * v4lmjpegsink, gint num)
{
GST_DEBUG_OBJECT (v4lmjpegsink, "queueing frame %d", num);
/* queue on this frame */
if (ioctl (GST_V4LELEMENT (v4lmjpegsink)->video_fd, MJPIOC_QBUF_PLAY,
&num) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, WRITE, (NULL),
("Failed to queue frame %d: %s", num, g_strerror (errno)));
return FALSE;
}
g_mutex_lock (v4lmjpegsink->mutex_queued_frames);
v4lmjpegsink->isqueued_queued_frames[num] = 1;
g_cond_broadcast (v4lmjpegsink->cond_queued_frames[num]);
g_mutex_unlock (v4lmjpegsink->mutex_queued_frames);
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_sync_frame()
* wait for a frame to be finished playing
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4lmjpegsink_sync_frame (GstV4lMjpegSink * v4lmjpegsink, gint * num)
{
GST_DEBUG_OBJECT (v4lmjpegsink, "syncing on next frame");
/* calculate next frame */
v4lmjpegsink->current_frame =
(v4lmjpegsink->current_frame + 1) % v4lmjpegsink->breq.count;
*num = v4lmjpegsink->current_frame;
g_mutex_lock (v4lmjpegsink->mutex_queued_frames);
if (v4lmjpegsink->isqueued_queued_frames[*num] == 1) {
g_cond_wait (v4lmjpegsink->cond_queued_frames[*num],
v4lmjpegsink->mutex_queued_frames);
}
if (v4lmjpegsink->isqueued_queued_frames[*num] != 0) {
g_mutex_unlock (v4lmjpegsink->mutex_queued_frames);
return FALSE;
} else
g_mutex_unlock (v4lmjpegsink->mutex_queued_frames);
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_set_buffer()
* set buffer options
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_set_buffer (GstV4lMjpegSink * v4lmjpegsink,
gint numbufs, gint bufsize)
{
GST_DEBUG_OBJECT (v4lmjpegsink,
"setting buffer info to numbufs = %d, bufsize = %d KB", numbufs, bufsize);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lmjpegsink));
v4lmjpegsink->breq.size = bufsize * 1024;
v4lmjpegsink->breq.count = numbufs;
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_set_playback()
* set playback options (video, interlacing, etc.)
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_set_playback (GstV4lMjpegSink * v4lmjpegsink,
gint width,
gint height, gint x_offset, gint y_offset, gint norm, gint interlacing)
{
gint mw, mh;
struct mjpeg_params bparm;
GST_DEBUG_OBJECT (v4lmjpegsink,
"setting size=%dx%d, X/Y offsets=%d/%d, norm=%d, interlacing=%d\n",
width, height, x_offset, y_offset, norm, interlacing);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
/*GST_V4L_CHECK_NOT_ACTIVE(GST_V4LELEMENT(v4lmjpegsink)); */
if (ioctl (GST_V4LELEMENT (v4lmjpegsink)->video_fd, MJPIOC_G_PARAMS,
&bparm) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, SETTINGS, (NULL),
GST_ERROR_SYSTEM);
return FALSE;
}
bparm.input = 0;
bparm.norm = norm;
bparm.decimation = 0; /* we'll set proper values later on */
/* maxwidth is broken on marvel cards */
mw = GST_V4LELEMENT (v4lmjpegsink)->vcap.maxwidth;
if (mw != 768 && mw != 640)
mw = 720;
mh = (norm == VIDEO_MODE_NTSC ? 480 : 576);
if (width > mw || height > mh) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, TOO_LAZY, (NULL),
("Video dimensions (%dx%d) are larger than device max (%dx%d)",
width, height, mw, mh));
return FALSE;
}
if (width <= mw / 4)
bparm.HorDcm = 4;
else if (width <= mw / 2)
bparm.HorDcm = 2;
else
bparm.HorDcm = 1;
/* TODO: add proper interlacing handling */
#if 0
if (interlacing != INTERLACING_NOT_INTERLACED) {
bparm.field_per_buff = 2;
bparm.TmpDcm = 1;
if (height <= mh / 2)
bparm.VerDcm = 2;
else
bparm.VerDcm = 1;
} else
#endif
{
if (height > mh / 2) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, TOO_LAZY, (NULL),
("Video dimensions (%dx%d) too large for non-interlaced playback (%dx%d)",
width, height, mw, mh / 2));
return FALSE;
}
bparm.field_per_buff = 1;
bparm.TmpDcm = 2;
if (height <= mh / 4)
bparm.VerDcm = 2;
else
bparm.VerDcm = 1;
}
/* TODO: add proper interlacing handling */
#if 0
bparm.odd_even = (interlacing == INTERLACING_TOP_FIRST);
#endif
bparm.quality = 100;
bparm.img_width = bparm.HorDcm * width;
bparm.img_height = bparm.VerDcm * height / bparm.field_per_buff;
/* image X/Y offset on device */
if (x_offset < 0)
bparm.img_x = (mw - bparm.img_width) / 2;
else {
if (x_offset + bparm.img_width > mw)
bparm.img_x = mw - bparm.img_width;
else
bparm.img_x = x_offset;
}
if (y_offset < 0)
bparm.img_y = (mh / 2 - bparm.img_height) / 2;
else {
if (y_offset + bparm.img_height * 2 > mh)
bparm.img_y = mh / 2 - bparm.img_height;
else
bparm.img_y = y_offset / 2;
}
if (ioctl (GST_V4LELEMENT (v4lmjpegsink)->video_fd, MJPIOC_S_PARAMS,
&bparm) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, SETTINGS, (NULL),
GST_ERROR_SYSTEM);
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_playback_init()
* initialize playback system, set up buffer, etc.
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_playback_init (GstV4lMjpegSink * v4lmjpegsink)
{
gint n;
GST_DEBUG_OBJECT (v4lmjpegsink, "initting playback subsystem");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lmjpegsink));
/* Request buffers */
if (ioctl (GST_V4LELEMENT (v4lmjpegsink)->video_fd, MJPIOC_REQBUFS,
&(v4lmjpegsink->breq)) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM);
return FALSE;
}
GST_INFO_OBJECT (v4lmjpegsink, "Got %ld buffers of size %ld KB",
v4lmjpegsink->breq.count, v4lmjpegsink->breq.size / 1024);
/* Map the buffers */
GST_V4LELEMENT (v4lmjpegsink)->buffer = mmap (0,
v4lmjpegsink->breq.count * v4lmjpegsink->breq.size,
PROT_READ | PROT_WRITE, MAP_SHARED,
GST_V4LELEMENT (v4lmjpegsink)->video_fd, 0);
if (GST_V4LELEMENT (v4lmjpegsink)->buffer == MAP_FAILED) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, TOO_LAZY, (NULL),
("Error mapping video buffers: %s", g_strerror (errno)));
GST_V4LELEMENT (v4lmjpegsink)->buffer = NULL;
return FALSE;
}
/* allocate/init the GThread thingies */
v4lmjpegsink->mutex_queued_frames = g_mutex_new ();
v4lmjpegsink->isqueued_queued_frames = (gint8 *)
malloc (sizeof (gint8) * v4lmjpegsink->breq.count);
if (!v4lmjpegsink->isqueued_queued_frames) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, TOO_LAZY, (NULL),
("Failed to create queue tracker: %s", g_strerror (errno)));
return FALSE;
}
v4lmjpegsink->cond_queued_frames = (GCond **)
malloc (sizeof (GCond *) * v4lmjpegsink->breq.count);
if (!v4lmjpegsink->cond_queued_frames) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, TOO_LAZY, (NULL),
("Failed to create queue condition holders: %s", g_strerror (errno)));
return FALSE;
}
for (n = 0; n < v4lmjpegsink->breq.count; n++)
v4lmjpegsink->cond_queued_frames[n] = g_cond_new ();
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_playback_start()
* start playback system
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_playback_start (GstV4lMjpegSink * v4lmjpegsink)
{
GError *error;
gint n;
GST_DEBUG_OBJECT (v4lmjpegsink, "starting playback");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsink));
/* mark all buffers as unqueued */
for (n = 0; n < v4lmjpegsink->breq.count; n++)
v4lmjpegsink->isqueued_queued_frames[n] = 0;
v4lmjpegsink->current_frame = -1;
/* create sync() thread */
v4lmjpegsink->thread_queued_frames =
g_thread_create (gst_v4lmjpegsink_sync_thread, (void *) v4lmjpegsink,
TRUE, &error);
if (!v4lmjpegsink->thread_queued_frames) {
GST_ELEMENT_ERROR (v4lmjpegsink, RESOURCE, TOO_LAZY, (NULL),
("Failed to create sync thread: %s", error->message));
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_get_buffer()
* get address of a buffer
* return value: buffer's address or NULL
******************************************************/
guint8 *
gst_v4lmjpegsink_get_buffer (GstV4lMjpegSink * v4lmjpegsink, gint num)
{
/*GST_DEBUG_OBJECT (v4lmjpegsink, gst_v4lmjpegsink_get_buffer(), num = %d", num); */
if (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsink)) ||
!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lmjpegsink)))
return NULL;
if (num < 0 || num >= v4lmjpegsink->breq.count)
return NULL;
return GST_V4LELEMENT (v4lmjpegsink)->buffer +
(v4lmjpegsink->breq.size * num);
}
/******************************************************
* gst_v4lmjpegsink_play_frame()
* queue a new buffer
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_play_frame (GstV4lMjpegSink * v4lmjpegsink, gint num)
{
GST_DEBUG_OBJECT (v4lmjpegsink, "playing frame %d", num);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsink));
if (!gst_v4lmjpegsink_queue_frame (v4lmjpegsink, num))
return FALSE;
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_wait_frame()
* wait for buffer to be actually played
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_wait_frame (GstV4lMjpegSink * v4lmjpegsink, gint * num)
{
GST_DEBUG_OBJECT (v4lmjpegsink,
"waiting for next frame to be finished playing");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsink));
if (!gst_v4lmjpegsink_sync_frame (v4lmjpegsink, num))
return FALSE;
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_playback_stop()
* stop playback system and sync on remaining frames
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_playback_stop (GstV4lMjpegSink * v4lmjpegsink)
{
gint num;
GST_DEBUG_OBJECT (v4lmjpegsink, "stopping playback");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsink));
/* mark next buffer as wrong */
if (!gst_v4lmjpegsink_sync_frame (v4lmjpegsink, &num) ||
!gst_v4lmjpegsink_queue_frame (v4lmjpegsink, num)) {
return FALSE;
}
/* .. and wait for all buffers to be queued on */
g_thread_join (v4lmjpegsink->thread_queued_frames);
return TRUE;
}
/******************************************************
* gst_v4lmjpegsink_playback_deinit()
* deinitialize the playback system and unmap buffer
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsink_playback_deinit (GstV4lMjpegSink * v4lmjpegsink)
{
int n;
GST_DEBUG_OBJECT (v4lmjpegsink, "quitting playback subsystem");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsink));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsink));
/* free GThread thingies */
g_mutex_free (v4lmjpegsink->mutex_queued_frames);
for (n = 0; n < v4lmjpegsink->breq.count; n++)
g_cond_free (v4lmjpegsink->cond_queued_frames[n]);
free (v4lmjpegsink->cond_queued_frames);
free (v4lmjpegsink->isqueued_queued_frames);
/* unmap the buffer */
munmap (GST_V4LELEMENT (v4lmjpegsink)->buffer,
v4lmjpegsink->breq.size * v4lmjpegsink->breq.count);
GST_V4LELEMENT (v4lmjpegsink)->buffer = NULL;
return TRUE;
}

View file

@ -1,62 +0,0 @@
/* GStreamer
*
* v4lmjpegsink_calls.c: functions for hardware MJPEG video sink
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __V4L_MJPEG_SINK_CALLS_H__
#define __V4L_MJPEG_SINK_CALLS_H__
#include "gstv4lmjpegsink.h"
#include "v4l_calls.h"
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
/* frame playback on device */
gboolean gst_v4lmjpegsink_set_buffer (GstV4lMjpegSink *v4lmjpegsink,
gint numbufs,
gint bufsize);
gboolean gst_v4lmjpegsink_set_playback (GstV4lMjpegSink *v4lmjpegsink,
gint width,
gint height,
gint x_offset,
gint y_offset,
gint norm,
gint interlacing);
gboolean gst_v4lmjpegsink_playback_init (GstV4lMjpegSink *v4lmjpegsink);
gboolean gst_v4lmjpegsink_playback_start (GstV4lMjpegSink *v4lmjpegsink);
guint8 * gst_v4lmjpegsink_get_buffer (GstV4lMjpegSink *v4lmjpegsink,
gint num);
gboolean gst_v4lmjpegsink_play_frame (GstV4lMjpegSink *v4lmjpegsink,
gint num);
gboolean gst_v4lmjpegsink_wait_frame (GstV4lMjpegSink *v4lmjpegsink,
gint *num);
gboolean gst_v4lmjpegsink_playback_stop (GstV4lMjpegSink *v4lmjpegsink);
gboolean gst_v4lmjpegsink_playback_deinit (GstV4lMjpegSink *v4lmjpegsink);
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* __V4L_MJPEG_SINK_CALLS_H__ */

View file

@ -1,577 +0,0 @@
/* GStreamer
*
* v4lmjpegsrc_calls.c: functions for hardware MJPEG video source
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <string.h>
#include <errno.h>
#include "v4lmjpegsrc_calls.h"
/* On some systems MAP_FAILED seems to be missing */
#ifndef MAP_FAILED
#define MAP_FAILED ( (caddr_t) -1 )
#endif
#define MIN_BUFFERS_QUEUED 2
GST_DEBUG_CATEGORY_EXTERN (v4lmjpegsrc_debug);
#define GST_CAT_DEFAULT v4lmjpegsrc_debug
enum
{
QUEUE_STATE_ERROR = -1,
QUEUE_STATE_READY_FOR_QUEUE,
QUEUE_STATE_QUEUED,
QUEUE_STATE_SYNCED,
};
/******************************************************
* gst_v4lmjpegsrc_queue_frame():
* queue a frame for capturing
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4lmjpegsrc_queue_frame (GstV4lMjpegSrc * v4lmjpegsrc, gint num)
{
GST_DEBUG_OBJECT (v4lmjpegsrc, "queueing frame %d", num);
if (v4lmjpegsrc->frame_queue_state[num] != QUEUE_STATE_READY_FOR_QUEUE) {
return FALSE;
}
if (ioctl (GST_V4LELEMENT (v4lmjpegsrc)->video_fd, MJPIOC_QBUF_CAPT,
&num) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, READ, (NULL),
("Error queueing a buffer (%d): %s", num, g_strerror (errno)));
return FALSE;
}
v4lmjpegsrc->frame_queue_state[num] = QUEUE_STATE_QUEUED;
v4lmjpegsrc->num_queued++;
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_sync_next_frame():
* sync on the next frame for capturing
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4lmjpegsrc_sync_next_frame (GstV4lMjpegSrc * v4lmjpegsrc, gint * num)
{
GST_DEBUG_OBJECT (v4lmjpegsrc, "syncing on next frame");
if (v4lmjpegsrc->num_queued <= 0) {
return FALSE;
}
while (ioctl (GST_V4LELEMENT (v4lmjpegsrc)->video_fd,
MJPIOC_SYNC, &(v4lmjpegsrc->bsync)) < 0) {
if (errno != EINTR) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, SYNC, (NULL), GST_ERROR_SYSTEM);
return FALSE;
}
GST_DEBUG_OBJECT (v4lmjpegsrc, "Sync got interrupted");
}
*num = v4lmjpegsrc->bsync.frame;
v4lmjpegsrc->frame_queue_state[*num] = QUEUE_STATE_SYNCED;
v4lmjpegsrc->num_queued--;
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_set_buffer():
* set buffer parameters (size/count)
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_set_buffer (GstV4lMjpegSrc * v4lmjpegsrc,
gint numbufs, gint bufsize)
{
GST_DEBUG_OBJECT (v4lmjpegsrc,
"setting buffer info to numbufs = %d, bufsize = %d KB", numbufs, bufsize);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
v4lmjpegsrc->breq.size = bufsize * 1024;
v4lmjpegsrc->breq.count = numbufs;
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_set_capture():
* set capture parameters (simple)
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_set_capture (GstV4lMjpegSrc * v4lmjpegsrc,
gint decimation, gint quality)
{
int norm, input, mw;
struct mjpeg_params bparm;
GST_DEBUG_OBJECT (v4lmjpegsrc, "setting decimation = %d, quality = %d",
decimation, quality);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
gst_v4l_get_chan_norm (GST_V4LELEMENT (v4lmjpegsrc), &input, &norm);
/* Query params for capture */
if (ioctl (GST_V4LELEMENT (v4lmjpegsrc)->video_fd, MJPIOC_G_PARAMS,
&bparm) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, SETTINGS, (NULL),
GST_ERROR_SYSTEM);
return FALSE;
}
bparm.decimation = decimation;
bparm.quality = quality;
bparm.norm = norm;
bparm.input = input;
bparm.APP_len = 0; /* no JPEG markers - TODO: this is definately not right for decimation==1 */
mw = GST_V4LELEMENT (v4lmjpegsrc)->vcap.maxwidth;
if (mw != 768 && mw != 640) {
if (decimation == 1)
mw = 720;
else
mw = 704;
}
v4lmjpegsrc->end_width = mw / decimation;
v4lmjpegsrc->end_height = (norm == VIDEO_MODE_NTSC ? 480 : 576) / decimation;
/* TODO: interlacing */
/* Set params for capture */
if (ioctl (GST_V4LELEMENT (v4lmjpegsrc)->video_fd, MJPIOC_S_PARAMS,
&bparm) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, SETTINGS, (NULL),
GST_ERROR_SYSTEM);
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_set_capture_m():
* set capture parameters (advanced)
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_set_capture_m (GstV4lMjpegSrc * v4lmjpegsrc,
gint x_offset,
gint y_offset,
gint width, gint height, gint h_decimation, gint v_decimation, gint quality)
{
gint norm, input;
gint maxwidth;
struct mjpeg_params bparm;
GST_DEBUG_OBJECT (v4lmjpegsrc, "setting x_offset = %d, y_offset = %d, "
"width = %d, height = %d, h_decimation = %d, v_decimation = %d, quality = %d\n",
x_offset, y_offset, width, height, h_decimation, v_decimation, quality);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
gst_v4l_get_chan_norm (GST_V4LELEMENT (v4lmjpegsrc), &input, &norm);
if (GST_V4LELEMENT (v4lmjpegsrc)->vcap.maxwidth != 768 &&
GST_V4LELEMENT (v4lmjpegsrc)->vcap.maxwidth != 640)
maxwidth = 720;
else
maxwidth = GST_V4LELEMENT (v4lmjpegsrc)->vcap.maxwidth;
/* Query params for capture */
if (ioctl (GST_V4LELEMENT (v4lmjpegsrc)->video_fd, MJPIOC_G_PARAMS,
&bparm) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, SETTINGS, (NULL),
GST_ERROR_SYSTEM);
return FALSE;
}
bparm.decimation = 0;
bparm.quality = quality;
bparm.norm = norm;
bparm.input = input;
bparm.APP_len = 0; /* no JPEG markers - TODO: this is definately
* not right for decimation==1 */
if (width <= 0) {
if (x_offset < 0)
x_offset = 0;
width = (maxwidth == 720
&& h_decimation != 1) ? 704 : maxwidth - 2 * x_offset;
} else {
if (x_offset < 0)
x_offset = (maxwidth - width) / 2;
}
if (height <= 0) {
if (y_offset < 0)
y_offset = 0;
height = (norm == VIDEO_MODE_NTSC) ? 480 : 576 - 2 * y_offset;
} else {
if (y_offset < 0)
y_offset = ((norm == VIDEO_MODE_NTSC) ? 480 : 576 - height) / 2;
}
if (width + x_offset > maxwidth) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, TOO_LAZY, (NULL),
("Image width+offset (%d) bigger than maximum (%d)",
width + x_offset, maxwidth));
return FALSE;
}
if ((width % (bparm.HorDcm * 16)) != 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, STREAM, FORMAT, (NULL),
("Image width (%d) not multiple of %d (required for JPEG)",
width, bparm.HorDcm * 16));
return FALSE;
}
if (height + y_offset > (norm == VIDEO_MODE_NTSC ? 480 : 576)) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, TOO_LAZY, (NULL),
("Image height+offset (%d) bigger than maximum (%d)",
height + y_offset, (norm == VIDEO_MODE_NTSC ? 480 : 576)));
return FALSE;
}
/* RJ: Image height must only be a multiple of 8, but geom_height
* is double the field height
*/
if ((height % (bparm.VerDcm * 16)) != 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, STREAM, FORMAT, (NULL),
("Image height (%d) not multiple of %d (required for JPEG)",
height, bparm.VerDcm * 16));
return FALSE;
}
bparm.img_x = x_offset;
bparm.img_width = width;
bparm.img_y = y_offset;
bparm.img_height = height;
bparm.HorDcm = h_decimation;
bparm.VerDcm = (v_decimation == 4) ? 2 : 1;
bparm.TmpDcm = (v_decimation == 1) ? 1 : 2;
bparm.field_per_buff = (v_decimation == 1) ? 2 : 1;
v4lmjpegsrc->end_width = width / h_decimation;
v4lmjpegsrc->end_width = height / v_decimation;
/* TODO: interlacing */
/* Set params for capture */
if (ioctl (GST_V4LELEMENT (v4lmjpegsrc)->video_fd, MJPIOC_S_PARAMS,
&bparm) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, SETTINGS, (NULL),
GST_ERROR_SYSTEM);
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_capture_init():
* initialize the capture system
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_capture_init (GstV4lMjpegSrc * v4lmjpegsrc)
{
GST_DEBUG_OBJECT (v4lmjpegsrc, "initting capture subsystem");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
/* Request buffers */
if (ioctl (GST_V4LELEMENT (v4lmjpegsrc)->video_fd,
MJPIOC_REQBUFS, &(v4lmjpegsrc->breq)) < 0) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, READ, (NULL), GST_ERROR_SYSTEM);
return FALSE;
}
if (v4lmjpegsrc->breq.count < MIN_BUFFERS_QUEUED) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, READ, (NULL),
("Too little buffers. We got %ld, we want at least %d",
v4lmjpegsrc->breq.count, MIN_BUFFERS_QUEUED));
return FALSE;
}
GST_INFO_OBJECT (v4lmjpegsrc, "Got %ld buffers of size %ld KB",
v4lmjpegsrc->breq.count, v4lmjpegsrc->breq.size / 1024);
/* keep track of queued buffers */
v4lmjpegsrc->frame_queue_state = (gint8 *)
g_malloc (sizeof (gint8) * v4lmjpegsrc->breq.count);
/* track how often to use each frame */
v4lmjpegsrc->use_num_times = (gint *)
g_malloc (sizeof (gint) * v4lmjpegsrc->breq.count);
/* lock for the frame_state */
v4lmjpegsrc->mutex_queue_state = g_mutex_new ();
v4lmjpegsrc->cond_queue_state = g_cond_new ();
/* Map the buffers */
GST_V4LELEMENT (v4lmjpegsrc)->buffer = mmap (0,
v4lmjpegsrc->breq.count * v4lmjpegsrc->breq.size,
PROT_READ | PROT_WRITE, MAP_SHARED,
GST_V4LELEMENT (v4lmjpegsrc)->video_fd, 0);
if (GST_V4LELEMENT (v4lmjpegsrc)->buffer == MAP_FAILED) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, TOO_LAZY, (NULL),
("Error mapping video buffers: %s", g_strerror (errno)));
GST_V4LELEMENT (v4lmjpegsrc)->buffer = NULL;
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_capture_start():
* start streaming capture
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_capture_start (GstV4lMjpegSrc * v4lmjpegsrc)
{
int n;
GST_DEBUG_OBJECT (v4lmjpegsrc, "starting capture");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
g_mutex_lock (v4lmjpegsrc->mutex_queue_state);
v4lmjpegsrc->quit = FALSE;
v4lmjpegsrc->num_queued = 0;
v4lmjpegsrc->queue_frame = 0;
/* set all buffers ready to queue , this starts streaming capture */
for (n = 0; n < v4lmjpegsrc->breq.count; n++) {
v4lmjpegsrc->frame_queue_state[n] = QUEUE_STATE_READY_FOR_QUEUE;
if (!gst_v4lmjpegsrc_queue_frame (v4lmjpegsrc, n)) {
g_mutex_unlock (v4lmjpegsrc->mutex_queue_state);
gst_v4lmjpegsrc_capture_stop (v4lmjpegsrc);
return FALSE;
}
}
v4lmjpegsrc->is_capturing = TRUE;
g_mutex_unlock (v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_grab_frame():
* grab one frame during streaming capture
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_grab_frame (GstV4lMjpegSrc * v4lmjpegsrc,
gint * num, gint * size)
{
GST_DEBUG_OBJECT (v4lmjpegsrc, "grabbing frame");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
g_mutex_lock (v4lmjpegsrc->mutex_queue_state);
/* do we have enough frames? */
while (v4lmjpegsrc->num_queued < MIN_BUFFERS_QUEUED ||
v4lmjpegsrc->frame_queue_state[v4lmjpegsrc->queue_frame] ==
QUEUE_STATE_READY_FOR_QUEUE) {
while (v4lmjpegsrc->frame_queue_state[v4lmjpegsrc->queue_frame] !=
QUEUE_STATE_READY_FOR_QUEUE && !v4lmjpegsrc->quit) {
GST_DEBUG_OBJECT (v4lmjpegsrc,
"Waiting for frames to become available (%d < %d)",
v4lmjpegsrc->num_queued, MIN_BUFFERS_QUEUED);
g_cond_wait (v4lmjpegsrc->cond_queue_state,
v4lmjpegsrc->mutex_queue_state);
}
if (v4lmjpegsrc->quit) {
g_mutex_unlock (v4lmjpegsrc->mutex_queue_state);
return TRUE; /* it won't get through anyway */
}
if (!gst_v4lmjpegsrc_queue_frame (v4lmjpegsrc, v4lmjpegsrc->queue_frame)) {
g_mutex_unlock (v4lmjpegsrc->mutex_queue_state);
return FALSE;
}
v4lmjpegsrc->queue_frame =
(v4lmjpegsrc->queue_frame + 1) % v4lmjpegsrc->breq.count;
}
/* syncing on the buffer grabs it */
if (!gst_v4lmjpegsrc_sync_next_frame (v4lmjpegsrc, num)) {
return FALSE;
}
*size = v4lmjpegsrc->bsync.length;
g_mutex_unlock (v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_get_buffer():
* get the memory address of a single buffer
* return value: TRUE on success, FALSE on error
******************************************************/
guint8 *
gst_v4lmjpegsrc_get_buffer (GstV4lMjpegSrc * v4lmjpegsrc, gint num)
{
/*DEBUG("gst_v4lmjpegsrc_get_buffer(), num = %d", num); */
if (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc)) ||
!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lmjpegsrc)))
return NULL;
if (num < 0 || num >= v4lmjpegsrc->breq.count)
return NULL;
return GST_V4LELEMENT (v4lmjpegsrc)->buffer + (v4lmjpegsrc->breq.size * num);
}
/******************************************************
* gst_v4lmjpegsrc_requeue_frame():
* requeue a frame for capturing
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_requeue_frame (GstV4lMjpegSrc * v4lmjpegsrc, gint num)
{
GST_DEBUG_OBJECT (v4lmjpegsrc, "requeueing frame %d", num);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
/* mark frame as 'ready to requeue' */
g_mutex_lock (v4lmjpegsrc->mutex_queue_state);
if (v4lmjpegsrc->frame_queue_state[num] != QUEUE_STATE_SYNCED) {
GST_ELEMENT_ERROR (v4lmjpegsrc, RESOURCE, TOO_LAZY, (NULL),
("Invalid state %d (expected %d), can't requeue",
v4lmjpegsrc->frame_queue_state[num], QUEUE_STATE_SYNCED));
return FALSE;
}
v4lmjpegsrc->frame_queue_state[num] = QUEUE_STATE_READY_FOR_QUEUE;
/* let an optional wait know */
g_cond_broadcast (v4lmjpegsrc->cond_queue_state);
g_mutex_unlock (v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_capture_stop():
* stop streaming capture
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_capture_stop (GstV4lMjpegSrc * v4lmjpegsrc)
{
int n;
GST_DEBUG_OBJECT (v4lmjpegsrc, "stopping capture");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
g_mutex_lock (v4lmjpegsrc->mutex_queue_state);
/* make an optional pending wait stop */
v4lmjpegsrc->quit = TRUE;
g_cond_broadcast (v4lmjpegsrc->cond_queue_state);
/* sync on remaining frames */
while (v4lmjpegsrc->num_queued > 0) {
gst_v4lmjpegsrc_sync_next_frame (v4lmjpegsrc, &n);
}
v4lmjpegsrc->is_capturing = FALSE;
g_mutex_unlock (v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4lmjpegsrc_capture_deinit():
* deinitialize the capture system
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lmjpegsrc_capture_deinit (GstV4lMjpegSrc * v4lmjpegsrc)
{
GST_DEBUG_OBJECT (v4lmjpegsrc, "quitting capture subsystem");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lmjpegsrc));
/* unmap the buffer */
munmap (GST_V4LELEMENT (v4lmjpegsrc)->buffer,
v4lmjpegsrc->breq.size * v4lmjpegsrc->breq.count);
GST_V4LELEMENT (v4lmjpegsrc)->buffer = NULL;
/* free buffer tracker */
g_mutex_free (v4lmjpegsrc->mutex_queue_state);
g_cond_free (v4lmjpegsrc->cond_queue_state);
g_free (v4lmjpegsrc->frame_queue_state);
g_free (v4lmjpegsrc->use_num_times);
return TRUE;
}

View file

@ -1,66 +0,0 @@
/* GStreamer
*
* v4lmjpegsrc_calls.h: functions for hardware MJPEG video source
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __V4L_MJPEG_SRC_CALLS_H__
#define __V4L_MJPEG_SRC_CALLS_H__
#include "gstv4lmjpegsrc.h"
#include "v4l_calls.h"
#ifdef __cplusplus
extern "C" {
#endif /* __cplusplus */
/* frame grabbing/capture */
gboolean gst_v4lmjpegsrc_set_buffer (GstV4lMjpegSrc *v4lmjpegsrc,
gint numbufs,
gint bufsize);
gboolean gst_v4lmjpegsrc_set_capture (GstV4lMjpegSrc *v4lmjpegsrc,
gint decimation,
gint quality);
gboolean gst_v4lmjpegsrc_set_capture_m (GstV4lMjpegSrc *v4lmjpegsrc,
gint x_offset,
gint y_offset,
gint width,
gint height,
gint h_decimation,
gint v_decimation,
gint quality);
gboolean gst_v4lmjpegsrc_capture_init (GstV4lMjpegSrc *v4lmjpegsrc);
gboolean gst_v4lmjpegsrc_capture_start (GstV4lMjpegSrc *v4lmjpegsrc);
gboolean gst_v4lmjpegsrc_grab_frame (GstV4lMjpegSrc *v4lmjpegsrc,
gint *num,
gint *size);
guint8 * gst_v4lmjpegsrc_get_buffer (GstV4lMjpegSrc *v4lmjpegsrc,
gint num);
gboolean gst_v4lmjpegsrc_requeue_frame (GstV4lMjpegSrc *v4lmjpegsrc,
gint num);
gboolean gst_v4lmjpegsrc_capture_stop (GstV4lMjpegSrc *v4lmjpegsrc);
gboolean gst_v4lmjpegsrc_capture_deinit (GstV4lMjpegSrc *v4lmjpegsrc);
#ifdef __cplusplus
}
#endif /* __cplusplus */
#endif /* __V4L_MJPEG_SRC_CALLS_H__ */

View file

@ -1,731 +0,0 @@
/* GStreamer
*
* v4lsrc_calls.c: generic V4L source functions
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include <config.h>
#endif
#include <stdlib.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <string.h>
#include <errno.h>
#include "v4lsrc_calls.h"
#include <sys/time.h>
/* number of buffers to be queued *at least* before syncing */
#define MIN_BUFFERS_QUEUED 2
/* On some systems MAP_FAILED seems to be missing */
#ifndef MAP_FAILED
#define MAP_FAILED ( (caddr_t) -1 )
#endif
GST_DEBUG_CATEGORY_EXTERN (v4l_debug);
#define GST_CAT_DEFAULT v4l_debug
#ifndef GST_DISABLE_GST_DEBUG
/* palette names */
static const char *v4l_palette_name[] = {
"", /* 0 */
"grayscale", /* VIDEO_PALETTE_GREY */
"Hi-420", /* VIDEO_PALETTE_HI420 */
"16-bit RGB (RGB-565)", /* VIDEO_PALETTE_RB565 */
"24-bit RGB", /* VIDEO_PALETTE_RGB24 */
"32-bit RGB", /* VIDEO_PALETTE_RGB32 */
"15-bit RGB (RGB-555)", /* VIDEO_PALETTE_RGB555 */
"YUV-4:2:2 (packed)", /* VIDEO_PALETTE_YUV422 */
"YUYV", /* VIDEO_PALETTE_YUYV */
"UYVY", /* VIDEO_PALETTE_UYVY */
"YUV-4:2:0 (packed)", /* VIDEO_PALETTE_YUV420 */
"YUV-4:1:1 (packed)", /* VIDEO_PALETTE_YUV411 */
"Raw", /* VIDEO_PALETTE_RAW */
"YUV-4:2:2 (planar)", /* VIDEO_PALETTE_YUV422P */
"YUV-4:1:1 (planar)", /* VIDEO_PALETTE_YUV411P */
"YUV-4:2:0 (planar)/I420", /* VIDEO_PALETTE_YUV420P */
"YUV-4:1:0 (planar)" /* VIDEO_PALETTE_YUV410P */
};
#endif
/******************************************************
* gst_v4lsrc_queue_frame():
* queue a frame for capturing
* (ie. instruct the hardware to start capture)
* Requires queue_state lock to be held!
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4lsrc_queue_frame (GstV4lSrc * v4lsrc, gint num)
{
GST_LOG_OBJECT (v4lsrc, "queueing frame %d", num);
if (v4lsrc->frame_queue_state[num] != QUEUE_STATE_READY_FOR_QUEUE) {
return FALSE;
}
/* instruct the driver to prepare capture using buffer frame num */
v4lsrc->mmap.frame = num;
if (ioctl (GST_V4LELEMENT (v4lsrc)->video_fd,
VIDIOCMCAPTURE, &(v4lsrc->mmap)) < 0) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, WRITE, (NULL),
("Error queueing a buffer (%d): %s", num, g_strerror (errno)));
return FALSE;
}
v4lsrc->frame_queue_state[num] = QUEUE_STATE_QUEUED;
v4lsrc->num_queued++;
return TRUE;
}
/******************************************************
* gst_v4lsrc_hard_sync_frame(GstV4lSrc *v4lsrc,gint num)
* sync a frame and set the timestamp correctly
* Requires queue_state lock to be held
*****************************************************/
static gboolean
gst_v4lsrc_sync_frame (GstV4lSrc * v4lsrc, gint num)
{
GST_LOG_OBJECT (v4lsrc, "VIOIOCSYNC on frame %d", num);
if (v4lsrc->frame_queue_state[num] != QUEUE_STATE_QUEUED) {
return FALSE;
}
while (ioctl (GST_V4LELEMENT (v4lsrc)->video_fd, VIDIOCSYNC, &num) < 0) {
/* if the sync() got interrupted, we can retry */
if (errno != EINTR) {
v4lsrc->frame_queue_state[num] = QUEUE_STATE_ERROR;
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, SYNC, (NULL), GST_ERROR_SYSTEM);
return FALSE;
}
GST_DEBUG_OBJECT (v4lsrc, "Sync got interrupted");
}
GST_LOG_OBJECT (v4lsrc, "VIOIOCSYNC on frame %d done", num);
v4lsrc->frame_queue_state[num] = QUEUE_STATE_SYNCED;
v4lsrc->num_queued--;
return TRUE;
}
/******************************************************
* gst_v4lsrc_set_capture():
* set capture parameters, palette = VIDEO_PALETTE_*
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_set_capture (GstV4lSrc * v4lsrc,
gint width, gint height, gint palette)
{
GST_DEBUG_OBJECT (v4lsrc,
"capture properties set to %dx%d, palette %d", width, height, palette);
v4lsrc->mmap.width = width;
v4lsrc->mmap.height = height;
v4lsrc->mmap.format = palette;
return TRUE;
}
/******************************************************
* gst_v4lsrc_capture_init():
* initialize the capture system
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_capture_init (GstV4lSrc * v4lsrc)
{
GST_DEBUG_OBJECT (v4lsrc, "initting capture subsystem");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lsrc));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lsrc));
/* request the mmap buffer info:
* total size of mmap buffer, number of frames, offsets of frames */
if (ioctl (GST_V4LELEMENT (v4lsrc)->video_fd, VIDIOCGMBUF,
&(v4lsrc->mbuf)) < 0) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, READ, (NULL),
("Error getting buffer information: %s", g_strerror (errno)));
return FALSE;
}
if (v4lsrc->mbuf.frames < MIN_BUFFERS_QUEUED) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, READ, (NULL),
("Not enough buffers. We got %d, we want at least %d",
v4lsrc->mbuf.frames, MIN_BUFFERS_QUEUED));
return FALSE;
}
GST_INFO_OBJECT (v4lsrc, "Got %d buffers (\'%s\') with total size %d KB",
v4lsrc->mbuf.frames, v4l_palette_name[v4lsrc->mmap.format],
v4lsrc->mbuf.size / (v4lsrc->mbuf.frames * 1024));
/* keep track of queued buffers */
v4lsrc->frame_queue_state = (gint8 *)
g_malloc (sizeof (gint8) * v4lsrc->mbuf.frames);
/* lock for the frame_state */
v4lsrc->mutex_queue_state = g_mutex_new ();
v4lsrc->cond_queue_state = g_cond_new ();
/* Map the buffers */
GST_V4LELEMENT (v4lsrc)->buffer = mmap (NULL, v4lsrc->mbuf.size,
PROT_READ | PROT_WRITE, MAP_SHARED, GST_V4LELEMENT (v4lsrc)->video_fd, 0);
if (GST_V4LELEMENT (v4lsrc)->buffer == MAP_FAILED) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, OPEN_READ_WRITE, (NULL),
("Error mapping video buffers: %s", g_strerror (errno)));
GST_V4LELEMENT (v4lsrc)->buffer = NULL;
return FALSE;
}
return TRUE;
}
/******************************************************
* gst_v4lsrc_capture_start():
* start streaming capture
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_capture_start (GstV4lSrc * v4lsrc)
{
int n;
GST_DEBUG_OBJECT (v4lsrc, "starting capture");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lsrc));
g_mutex_lock (v4lsrc->mutex_queue_state);
v4lsrc->quit = FALSE;
v4lsrc->num_queued = 0;
v4lsrc->sync_frame = 0;
v4lsrc->queue_frame = 0;
/* set all buffers ready to queue, and queue captures to the device.
* This starts streaming capture */
for (n = 0; n < v4lsrc->mbuf.frames; n++) {
v4lsrc->frame_queue_state[n] = QUEUE_STATE_READY_FOR_QUEUE;
if (!gst_v4lsrc_queue_frame (v4lsrc, n)) {
g_mutex_unlock (v4lsrc->mutex_queue_state);
gst_v4lsrc_capture_stop (v4lsrc);
return FALSE;
}
}
v4lsrc->is_capturing = TRUE;
g_mutex_unlock (v4lsrc->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4lsrc_grab_frame():
* capture one frame during streaming capture
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_grab_frame (GstV4lSrc * v4lsrc, gint * num)
{
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lsrc));
GST_LOG_OBJECT (v4lsrc, "grabbing frame");
g_mutex_lock (v4lsrc->mutex_queue_state);
/* do we have enough frames? */
while (v4lsrc->num_queued < MIN_BUFFERS_QUEUED ||
v4lsrc->frame_queue_state[v4lsrc->queue_frame] ==
QUEUE_STATE_READY_FOR_QUEUE) {
while (v4lsrc->frame_queue_state[v4lsrc->queue_frame] !=
QUEUE_STATE_READY_FOR_QUEUE && !v4lsrc->quit) {
GST_DEBUG_OBJECT (v4lsrc,
"Waiting for frames to become available (queued %d < minimum %d)",
v4lsrc->num_queued, MIN_BUFFERS_QUEUED);
g_cond_wait (v4lsrc->cond_queue_state, v4lsrc->mutex_queue_state);
}
if (v4lsrc->quit) {
g_mutex_unlock (v4lsrc->mutex_queue_state);
return FALSE;
}
if (!gst_v4lsrc_queue_frame (v4lsrc, v4lsrc->queue_frame)) {
g_mutex_unlock (v4lsrc->mutex_queue_state);
return FALSE;
}
v4lsrc->queue_frame = (v4lsrc->queue_frame + 1) % v4lsrc->mbuf.frames;
}
/* syncing on the buffer grabs it */
*num = v4lsrc->sync_frame;
if (!gst_v4lsrc_sync_frame (v4lsrc, *num)) {
g_mutex_unlock (v4lsrc->mutex_queue_state);
return FALSE;
}
v4lsrc->sync_frame = (v4lsrc->sync_frame + 1) % v4lsrc->mbuf.frames;
g_mutex_unlock (v4lsrc->mutex_queue_state);
GST_LOG_OBJECT (v4lsrc, "grabbed frame %d", *num);
return TRUE;
}
/******************************************************
* gst_v4lsrc_get_buffer():
* get the address of the given frame number in the mmap'd buffer
* return value: the buffer's address or NULL
******************************************************/
guint8 *
gst_v4lsrc_get_buffer (GstV4lSrc * v4lsrc, gint num)
{
if (!GST_V4L_IS_ACTIVE (GST_V4LELEMENT (v4lsrc)) ||
!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc)))
return NULL;
if (num < 0 || num >= v4lsrc->mbuf.frames)
return NULL;
return GST_V4LELEMENT (v4lsrc)->buffer + v4lsrc->mbuf.offsets[num];
}
/******************************************************
* gst_v4lsrc_requeue_frame():
* re-queue a frame after we're done with the buffer
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_requeue_frame (GstV4lSrc * v4lsrc, gint num)
{
GST_LOG_OBJECT (v4lsrc, "requeueing frame %d", num);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lsrc));
/* mark frame as 'ready to requeue' */
g_mutex_lock (v4lsrc->mutex_queue_state);
if (v4lsrc->frame_queue_state[num] != QUEUE_STATE_SYNCED) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, TOO_LAZY, (NULL),
("Invalid state %d (expected %d), can't requeue",
v4lsrc->frame_queue_state[num], QUEUE_STATE_SYNCED));
return FALSE;
}
v4lsrc->frame_queue_state[num] = QUEUE_STATE_READY_FOR_QUEUE;
/* let an optional wait know */
g_cond_broadcast (v4lsrc->cond_queue_state);
g_mutex_unlock (v4lsrc->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4lsrc_capture_stop():
* stop streaming capture
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_capture_stop (GstV4lSrc * v4lsrc)
{
GST_DEBUG_OBJECT (v4lsrc, "stopping capture");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lsrc));
g_mutex_lock (v4lsrc->mutex_queue_state);
v4lsrc->is_capturing = FALSE;
/* make an optional pending wait stop */
v4lsrc->quit = TRUE;
g_cond_broadcast (v4lsrc->cond_queue_state);
/* sync on remaining frames */
while (1) {
if (v4lsrc->frame_queue_state[v4lsrc->sync_frame] == QUEUE_STATE_QUEUED) {
gst_v4lsrc_sync_frame (v4lsrc, v4lsrc->sync_frame);
v4lsrc->sync_frame = (v4lsrc->sync_frame + 1) % v4lsrc->mbuf.frames;
} else {
break;
}
}
g_mutex_unlock (v4lsrc->mutex_queue_state);
return TRUE;
}
/******************************************************
* gst_v4lsrc_capture_deinit():
* deinitialize the capture system
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_capture_deinit (GstV4lSrc * v4lsrc)
{
GST_DEBUG_OBJECT (v4lsrc, "quitting capture subsystem");
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lsrc));
GST_V4L_CHECK_ACTIVE (GST_V4LELEMENT (v4lsrc));
/* free buffer tracker */
g_mutex_free (v4lsrc->mutex_queue_state);
v4lsrc->mutex_queue_state = NULL;
g_cond_free (v4lsrc->cond_queue_state);
v4lsrc->cond_queue_state = NULL;
g_free (v4lsrc->frame_queue_state);
v4lsrc->frame_queue_state = NULL;
/* unmap the buffer */
if (munmap (GST_V4LELEMENT (v4lsrc)->buffer, v4lsrc->mbuf.size) == -1) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, CLOSE, (NULL),
("error munmap'ing capture buffer: %s", g_strerror (errno)));
return FALSE;
}
GST_V4LELEMENT (v4lsrc)->buffer = NULL;
return TRUE;
}
/******************************************************
* gst_v4lsrc_try_capture():
* try out a capture on the device
* This has to be done before initializing the
* actual capture system, to make sure we don't
* mess up anything. So we need to mini-mmap()
* a buffer here, queue and sync on one buffer,
* and unmap it.
* This is ugly, yes, I know - but it's a major
* design flaw of v4l1 that you don't know in
* advance which formats will be supported...
* This is better than "just assuming that it'll
* work"...
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4lsrc_try_capture (GstV4lSrc * v4lsrc, gint width, gint height,
gint palette)
{
/* so, we need a buffer and some more stuff */
int frame = 0;
guint8 *buffer;
struct video_mbuf vmbuf;
struct video_mmap vmmap;
GST_DEBUG_OBJECT (v4lsrc, "try out %dx%d, palette format %d (%s)",
width, height, palette, v4l_palette_name[palette]);
GST_V4L_CHECK_OPEN (GST_V4LELEMENT (v4lsrc));
GST_V4L_CHECK_NOT_ACTIVE (GST_V4LELEMENT (v4lsrc));
/* let's start by requesting a buffer and mmap()'ing it */
if (ioctl (GST_V4LELEMENT (v4lsrc)->video_fd, VIDIOCGMBUF, &vmbuf) < 0) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, READ, (NULL),
("Error getting buffer information: %s", g_strerror (errno)));
return FALSE;
}
/* Map the buffers */
buffer = mmap (NULL, vmbuf.size, PROT_READ | PROT_WRITE,
MAP_SHARED, GST_V4LELEMENT (v4lsrc)->video_fd, 0);
if (buffer == MAP_FAILED) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, OPEN_READ_WRITE, (NULL),
("Error mapping our try-out buffer: %s", g_strerror (errno)));
return FALSE;
}
/* now that we have a buffer, let's try out our format */
vmmap.width = width;
vmmap.height = height;
vmmap.format = palette;
vmmap.frame = frame;
if (ioctl (GST_V4LELEMENT (v4lsrc)->video_fd, VIDIOCMCAPTURE, &vmmap) < 0) {
if (errno != EINVAL) /* our format failed! */
GST_ERROR_OBJECT (v4lsrc,
"Error queueing our try-out buffer: %s", g_strerror (errno));
munmap (buffer, vmbuf.size);
return FALSE;
}
if (ioctl (GST_V4LELEMENT (v4lsrc)->video_fd, VIDIOCSYNC, &frame) < 0) {
GST_ELEMENT_ERROR (v4lsrc, RESOURCE, SYNC, (NULL), GST_ERROR_SYSTEM);
munmap (buffer, vmbuf.size);
return FALSE;
}
munmap (buffer, vmbuf.size);
/* if we got here, it worked! woohoo, the format is supported! */
return TRUE;
}
#ifndef GST_DISABLE_GST_DEBUG
const char *
gst_v4lsrc_palette_name (int i)
{
return v4l_palette_name[i];
}
#endif
gboolean
gst_v4lsrc_get_fps (GstV4lSrc * v4lsrc, gint * fps_n, gint * fps_d)
{
gint norm;
gint fps_index;
struct video_window *vwin = &GST_V4LELEMENT (v4lsrc)->vwin;
/* check if we have vwin window properties giving a framerate,
* as is done for webcams
* See http://www.smcc.demon.nl/webcam/api.html
* which is used for the Philips and qce-ga drivers */
fps_index = (vwin->flags >> 16) & 0x3F; /* 6 bit index for framerate */
/* webcams have a non-zero fps_index */
if (fps_index != 0) {
/* index of 16 corresponds to 15 fps */
GST_DEBUG_OBJECT (v4lsrc, "device reports fps of %d/%d (%.4f)",
fps_index * 15, 16, fps_index * 15.0 / 16);
if (fps_n)
*fps_n = fps_index * 15;
if (fps_d)
*fps_d = 16;
return TRUE;
}
/* removed fps estimation code here */
/* if that failed ... */
if (!GST_V4L_IS_OPEN (GST_V4LELEMENT (v4lsrc)))
return FALSE;
if (!gst_v4l_get_chan_norm (GST_V4LELEMENT (v4lsrc), NULL, &norm))
return FALSE;
if (norm == VIDEO_MODE_NTSC) {
if (fps_n)
*fps_n = 30000;
if (fps_d)
*fps_d = 1001;
} else {
if (fps_n)
*fps_n = 25;
if (fps_d)
*fps_d = 1;
}
return TRUE;
}
/* get a list of possible framerates
* this is only done for webcams;
* other devices return NULL here.
* this function takes a LONG time to execute.
*/
GValue *
gst_v4lsrc_get_fps_list (GstV4lSrc * v4lsrc)
{
gint fps_index;
struct video_window *vwin = &GST_V4LELEMENT (v4lsrc)->vwin;
GstV4lElement *v4lelement = GST_V4LELEMENT (v4lsrc);
/* check if we have vwin window properties giving a framerate,
* as is done for webcams
* See http://www.smcc.demon.nl/webcam/api.html
* which is used for the Philips and qce-ga drivers */
fps_index = (vwin->flags >> 16) & 0x3F; /* 6 bit index for framerate */
/* webcams have a non-zero fps_index */
if (fps_index == 0) {
GST_DEBUG_OBJECT (v4lsrc, "fps_index is 0, no webcam");
return NULL;
}
GST_DEBUG_OBJECT (v4lsrc, "fps_index is %d, so webcam", fps_index);
{
int i;
GValue *list = NULL;
GValue value = { 0 };
/* webcam detected, so try all framerates and return a list */
list = g_new0 (GValue, 1);
g_value_init (list, GST_TYPE_LIST);
/* index of 16 corresponds to 15 fps */
GST_DEBUG_OBJECT (v4lsrc, "device reports fps of %d/%d (%.4f)",
fps_index * 15, 16, fps_index * 15.0 / 16);
for (i = 0; i < 63; ++i) {
/* set bits 16 to 21 to 0 */
vwin->flags &= (0x3F00 - 1);
/* set bits 16 to 21 to the index */
vwin->flags |= i << 16;
if (gst_v4l_set_window_properties (v4lelement)) {
/* setting it succeeded. FIXME: get it and check. */
g_value_init (&value, GST_TYPE_FRACTION);
gst_value_set_fraction (&value, i * 15, 16);
gst_value_list_append_value (list, &value);
g_value_unset (&value);
}
}
/* FIXME: set back the original fps_index */
vwin->flags &= (0x3F00 - 1);
vwin->flags |= fps_index << 16;
gst_v4l_set_window_properties (v4lelement);
return list;
}
}
typedef struct _GstMetaV4lSrc
{
GstMeta meta;
GstV4lSrc *v4lsrc;
gint num;
} GstMetaV4lSrc;
#define GST_META_V4LSRC_GET(buf) ((GstMetaV4lSrc *)gst_buffer_get_meta(buf,gst_meta_v4lsrc_get_info()))
#define GST_META_V4LSRC_ADD(buf) ((GstMetaV4lSrc *)gst_buffer_add_meta(buf,gst_meta_v4lsrc_get_info(), NULL))
static void
meta_v4lsrc_free (GstMetaV4lSrc * meta, GstBuffer * buffer)
{
GstV4lSrc *v4lsrc;
gint num;
v4lsrc = meta->v4lsrc;
num = meta->num;
GST_LOG_OBJECT (v4lsrc, "freeing buffer %p for frame %d", buffer, num);
/* only requeue if we still have an mmap buffer */
if (GST_V4LELEMENT (v4lsrc)->buffer) {
GST_LOG_OBJECT (v4lsrc, "requeueing frame %d", num);
gst_v4lsrc_requeue_frame (v4lsrc, num);
}
gst_object_unref (v4lsrc);
}
static const GstMetaInfo *
gst_meta_v4lsrc_get_info (void)
{
static const GstMetaInfo *meta_v4lsrc_info = NULL;
if (meta_v4lsrc_info == NULL) {
meta_v4lsrc_info = gst_meta_register ("GstMetaV4lSrc", "GstMetaV4lSrc",
sizeof (GstMetaV4lSrc),
(GstMetaInitFunction) NULL,
(GstMetaFreeFunction) meta_v4lsrc_free,
(GstMetaCopyFunction) NULL, (GstMetaTransformFunction) NULL);
}
return meta_v4lsrc_info;
}
/* Create a V4lSrc buffer from our mmap'd data area */
GstBuffer *
gst_v4lsrc_buffer_new (GstV4lSrc * v4lsrc, gint num)
{
GstMetaV4lSrc *meta;
GstClockTime duration, timestamp, latency;
GstBuffer *buf;
GstClock *clock;
gint fps_n, fps_d;
GST_DEBUG_OBJECT (v4lsrc, "creating buffer for frame %d", num);
if (!(gst_v4lsrc_get_fps (v4lsrc, &fps_n, &fps_d)))
return NULL;
buf = gst_buffer_new ();
/* attach private metadata with the frame num and v4lsrc element */
meta = GST_META_V4LSRC_ADD (buf);
meta->num = num;
meta->v4lsrc = gst_object_ref (v4lsrc);
gst_buffer_take_memory (buf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_READONLY,
gst_v4lsrc_get_buffer (v4lsrc, num), NULL, v4lsrc->buffer_size,
0, v4lsrc->buffer_size));
GST_BUFFER_OFFSET (buf) = v4lsrc->offset++;
GST_BUFFER_OFFSET_END (buf) = v4lsrc->offset;
/* timestamps, LOCK to get clock and base time. */
GST_OBJECT_LOCK (v4lsrc);
if ((clock = GST_ELEMENT_CLOCK (v4lsrc))) {
/* we have a clock, get base time and ref clock */
timestamp = GST_ELEMENT_CAST (v4lsrc)->base_time;
gst_object_ref (clock);
} else {
/* no clock, can't set timestamps */
timestamp = GST_CLOCK_TIME_NONE;
}
GST_OBJECT_UNLOCK (v4lsrc);
duration =
gst_util_uint64_scale_int (GST_SECOND, fps_d * v4lsrc->offset, fps_n) -
gst_util_uint64_scale_int (GST_SECOND, fps_d * (v4lsrc->offset - 1),
fps_n);
latency = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
if (clock) {
/* the time now is the time of the clock minus the base time */
timestamp = gst_clock_get_time (clock) - timestamp;
gst_object_unref (clock);
/* adjust timestamp for frame latency (we assume we have a framerate) */
if (timestamp > latency)
timestamp -= latency;
else
timestamp = 0;
}
GST_BUFFER_TIMESTAMP (buf) = timestamp;
GST_BUFFER_DURATION (buf) = duration;
return buf;
}

View file

@ -1,58 +0,0 @@
/* GStreamer
*
* v4lsrc_calls.h: functions for V4L video source
*
* Copyright (C) 2001-2002 Ronald Bultje <rbultje@ronald.bitfreak.net>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef __V4L_SRC_CALLS_H__
#define __V4L_SRC_CALLS_H__
#include "gstv4lsrc.h"
#include "v4l_calls.h"
G_BEGIN_DECLS
/* frame grabbing/capture (palette = VIDEO_PALETTE_* - see videodev.h) */
gboolean gst_v4lsrc_set_capture (GstV4lSrc *v4lsrc, gint width, gint height, gint palette);
gboolean gst_v4lsrc_capture_init (GstV4lSrc *v4lsrc);
gboolean gst_v4lsrc_capture_start (GstV4lSrc *v4lsrc);
gboolean gst_v4lsrc_grab_frame (GstV4lSrc *v4lsrc, gint *num);
guint8 * gst_v4lsrc_get_buffer (GstV4lSrc *v4lsrc, gint num);
gboolean gst_v4lsrc_requeue_frame (GstV4lSrc *v4lsrc, gint num);
gboolean gst_v4lsrc_capture_stop (GstV4lSrc *v4lsrc);
gboolean gst_v4lsrc_capture_deinit (GstV4lSrc *v4lsrc);
gboolean gst_v4lsrc_get_fps (GstV4lSrc * v4lsrc, gint *fps_n, gint *fps_d);
GValue * gst_v4lsrc_get_fps_list (GstV4lSrc * v4lsrc);
GstBuffer *gst_v4lsrc_buffer_new (GstV4lSrc * v4lsrc, gint num);
/* "the ugliest hack ever, now available at your local mirror" */
gboolean gst_v4lsrc_try_capture (GstV4lSrc *v4lsrc, gint width, gint height, gint palette);
/* For debug purposes, share the palette names */
#ifndef GST_DISABLE_GST_DEBUG
const char *gst_v4lsrc_palette_name (int i);
#endif
G_END_DECLS
#endif /* __V4L_SRC_CALLS_H__ */

View file

@ -1,123 +0,0 @@
/* These are the MJPEG API extensions for the Video4Linux API,
first introduced by the Iomega Buz driver by Rainer Johanni
<rainer@johanni.de>
*/
#ifndef __VIDEODEV_MJPEG_H__
#define __VIDEODEV_MJPEG_H__
/* This is identical with the mgavideo internal params struct,
please tell me if you change this struct here ! <gz@lysator.liu.se) */
struct mjpeg_params
{
/* The following parameters can only be queried */
int major_version; /* Major version number of driver */
int minor_version; /* Minor version number of driver */
/* Main control parameters */
int input; /* Input channel: 0 = Composite, 1 = S-VHS */
int norm; /* Norm: VIDEO_MODE_PAL or VIDEO_MODE_NTSC */
int decimation; /* decimation of captured video,
enlargement of video played back.
Valid values are 1, 2, 4 or 0.
0 is a special value where the user
has full control over video scaling */
/* The following parameters only have to be set if decimation==0,
for other values of decimation they provide the data how the image is captured */
int HorDcm; /* Horizontal decimation: 1, 2 or 4 */
int VerDcm; /* Vertical decimation: 1 or 2 */
int TmpDcm; /* Temporal decimation: 1 or 2,
if TmpDcm==2 in capture every second frame is dropped,
in playback every frame is played twice */
int field_per_buff; /* Number of fields per buffer: 1 or 2 */
int img_x; /* start of image in x direction */
int img_y; /* start of image in y direction */
int img_width; /* image width BEFORE decimation,
must be a multiple of HorDcm*16 */
int img_height; /* image height BEFORE decimation,
must be a multiple of VerDcm*8 */
/* --- End of parameters for decimation==0 only --- */
/* JPEG control parameters */
int quality; /* Measure for quality of compressed images.
Scales linearly with the size of the compressed images.
Must be beetween 0 and 100, 100 is a compression
ratio of 1:4 */
int odd_even; /* Which field should come first ???
This is more aptly named "top_first",
i.e. (odd_even==1) --> top-field-first */
int APPn; /* Number of APP segment to be written, must be 0..15 */
int APP_len; /* Length of data in JPEG APPn segment */
char APP_data[60]; /* Data in the JPEG APPn segment. */
int COM_len; /* Length of data in JPEG COM segment */
char COM_data[60]; /* Data in JPEG COM segment */
unsigned long jpeg_markers; /* Which markers should go into the JPEG output.
Unless you exactly know what you do, leave them untouched.
Inluding less markers will make the resulting code
smaller, but there will be fewer applications
which can read it.
The presence of the APP and COM marker is
influenced by APP0_len and COM_len ONLY! */
#define JPEG_MARKER_DHT (1<<3) /* Define Huffman Tables */
#define JPEG_MARKER_DQT (1<<4) /* Define Quantization Tables */
#define JPEG_MARKER_DRI (1<<5) /* Define Restart Interval */
#define JPEG_MARKER_COM (1<<6) /* Comment segment */
#define JPEG_MARKER_APP (1<<7) /* App segment, driver will allways use APP0 */
int VFIFO_FB; /* Flag for enabling Video Fifo Feedback.
If this flag is turned on and JPEG decompressing
is going to the screen, the decompress process
is stopped every time the Video Fifo is full.
This enables a smooth decompress to the screen
but the video output signal will get scrambled */
/* Misc */
char reserved[312]; /* Makes 512 bytes for this structure */
};
struct mjpeg_requestbuffers
{
unsigned long count; /* Number of buffers for MJPEG grabbing */
unsigned long size; /* Size PER BUFFER in bytes */
};
struct mjpeg_sync
{
unsigned long frame; /* Frame (0 - n) for double buffer */
unsigned long length; /* number of code bytes in buffer (capture only) */
unsigned long seq; /* frame sequence number */
struct timeval timestamp; /* timestamp */
};
struct mjpeg_status
{
int input; /* Input channel, has to be set prior to BUZIOC_G_STATUS */
int signal; /* Returned: 1 if valid video signal detected */
int norm; /* Returned: VIDEO_MODE_PAL or VIDEO_MODE_NTSC */
int color; /* Returned: 1 if color signal detected */
};
/*
Private IOCTL to set up for displaying MJPEG
*/
#define MJPIOC_G_PARAMS _IOR ('v', BASE_VIDIOCPRIVATE+0, struct mjpeg_params)
#define MJPIOC_S_PARAMS _IOWR('v', BASE_VIDIOCPRIVATE+1, struct mjpeg_params)
#define MJPIOC_REQBUFS _IOWR('v', BASE_VIDIOCPRIVATE+2, struct mjpeg_requestbuffers)
#define MJPIOC_QBUF_CAPT _IOW ('v', BASE_VIDIOCPRIVATE+3, int)
#define MJPIOC_QBUF_PLAY _IOW ('v', BASE_VIDIOCPRIVATE+4, int)
#define MJPIOC_SYNC _IOR ('v', BASE_VIDIOCPRIVATE+5, struct mjpeg_sync)
#define MJPIOC_G_STATUS _IOWR('v', BASE_VIDIOCPRIVATE+6, struct mjpeg_status)
#endif /* __VIDEODEV_MJPEG_H__ */

View file

@ -8,8 +8,8 @@ if USE_GIO
GIO_SUBDIRS = gio GIO_SUBDIRS = gio
endif endif
SUBDIRS = app audio dynamic $(FT2_SUBDIRS) $(GIO_SUBDIRS) overlay playrec volume v4l encoding SUBDIRS = app audio dynamic $(FT2_SUBDIRS) $(GIO_SUBDIRS) overlay playrec volume encoding
DIST_SUBDIRS = app dynamic gio overlay seek snapshot playrec volume v4l encoding DIST_SUBDIRS = app dynamic gio overlay seek snapshot playrec volume encoding
include $(top_srcdir)/common/parallel-subdirs.mak include $(top_srcdir)/common/parallel-subdirs.mak

View file

@ -1 +0,0 @@
probe

View file

@ -1,10 +0,0 @@
noinst_PROGRAMS = probe
probe_SOURCES = probe.c
probe_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) $(GST_CFLAGS)
probe_LDFLAGS = \
$(top_builddir)/gst-libs/gst/interfaces/libgstinterfaces-@GST_MAJORMINOR@.la \
$(GST_PLUGINS_BASE_LIBS) \
$(GST_BASE_LIBS) $(GST_LIBS)

View file

@ -1,85 +0,0 @@
/* GStreamer
* Copyright (C) 2009 Filippo Argiolas <filippo.argiolas@gmail.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#include <stdlib.h>
#include <gst/gst.h>
#include <gst/interfaces/propertyprobe.h>
int
main (int argc, char *argv[])
{
GstElement *src, *sink;
GstElement *bin;
GstPropertyProbe *probe = NULL;
const GParamSpec *pspec = NULL;
GValueArray *array = NULL;
gint i, ret;
GValue *value;
const gchar *device;
gchar *name;
guint flags;
gst_init (&argc, &argv);
bin = gst_pipeline_new ("pipeline");
g_assert (bin);
src = gst_element_factory_make ("v4lsrc", "v4l_source");
g_assert (src);
sink = gst_element_factory_make ("fakesink", "fake_sink");
g_assert (sink);
/* add objects to the main pipeline */
gst_bin_add_many (GST_BIN (bin), src, sink, NULL);
/* link the elements */
gst_element_link_many (src, sink, NULL);
/* probe devices */
g_print ("Probing devices with propertyprobe...\n");
probe = GST_PROPERTY_PROBE (src);
pspec = gst_property_probe_get_property (probe, "device");
array = gst_property_probe_probe_and_get_values (probe, pspec);
if (!array) {
g_print ("No device found\n");
exit (1);
}
for (i = 0; i < array->n_values; i++) {
value = g_value_array_get_nth (array, i);
device = g_value_get_string (value);
g_print ("Device: %s\n", device);
g_object_set_property (G_OBJECT (src), "device", value);
gst_element_set_state (bin, GST_STATE_READY);
ret = gst_element_get_state (bin, NULL, NULL, 10 * GST_SECOND);
if (ret != GST_STATE_CHANGE_SUCCESS) {
g_print ("Couldn't set STATE_READY\n");
continue;
}
g_object_get (G_OBJECT (src), "device-name", &name, NULL);
g_print ("Name: %s\n", name);
g_free (name);
g_object_get (G_OBJECT (src), "flags", &flags, NULL);
g_print ("Flags: 0x%08X\n", flags);
gst_element_set_state (bin, GST_STATE_NULL);
g_print ("\n");
}
exit (0);
}