v4l2: Move configuration of framerate to _set_format

Move the configuration of the framerate to where we set the other format
parameters.
Remove hack to check if the device is active.
Store streamparm in the device info.
Use some macros to access the current device configuration.
Remove some duplicate fields in src and sink and use the device configuration
instead.
This commit is contained in:
Wim Taymans 2011-07-13 11:19:28 +02:00
parent 100d46b5b9
commit 9f4cf3be58
8 changed files with 142 additions and 184 deletions

View file

@ -478,7 +478,7 @@ gst_v4l2_object_new (GstElement * element,
v4l2object->video_fd = -1;
v4l2object->poll = gst_poll_new (TRUE);
v4l2object->buffer = NULL;
v4l2object->active = FALSE;
v4l2object->videodev = g_strdup (default_device);
v4l2object->norms = NULL;
@ -2058,12 +2058,18 @@ gst_v4l2_object_get_nearest_size (GstV4l2Object * v4l2object,
return TRUE;
}
/* Note about fraction simplification
* * n1/d1 == n2/d2 is also written as n1 == ( n2 * d1 ) / d2
* */
#define fractions_are_equal(n1,d1,n2,d2) ((n1) == gst_util_uint64_scale_int((n2), (d1), (d2)))
gboolean
gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
{
gint fd = v4l2object->video_fd;
struct v4l2_format *format;
struct v4l2_streamparm *streamparm;
enum v4l2_field field;
guint32 pixelformat;
gint width;
@ -2077,11 +2083,7 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
&fmtdesc, &width, &height, &interlaced, &fps_n, &fps_d, &size))
goto invalid_caps;
v4l2object->fps_n = fps_n;
v4l2object->fps_d = fps_d;
v4l2object->size = size;
v4l2object->width = width;
v4l2object->height = height;
pixelformat = fmtdesc->pixelformat;
@ -2096,7 +2098,7 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
field = V4L2_FIELD_NONE;
}
GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
GST_DEBUG_OBJECT (v4l2object->element, "Desired format %dx%d, format "
"%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
GST_V4L2_CHECK_OPEN (v4l2object);
@ -2105,7 +2107,7 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
/* Only unconditionally accept mpegts for sources */
if ((v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) &&
(pixelformat == GST_MAKE_FOURCC ('M', 'P', 'E', 'G')))
return TRUE;
goto done;
format = &v4l2object->format;
@ -2116,28 +2118,16 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
goto get_fmt_failed;
GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
"%" GST_FOURCC_FORMAT " stride %d", format->fmt.pix.width,
"%" GST_FOURCC_FORMAT " bytesperline %d", format->fmt.pix.width,
format->fmt.pix.height, GST_FOURCC_ARGS (format->fmt.pix.pixelformat),
format->fmt.pix.bytesperline);
if (format->type == v4l2object->type &&
format->fmt.pix.width == width &&
format->fmt.pix.height == height &&
format->fmt.pix.pixelformat == pixelformat &&
format->fmt.pix.field == field) {
GST_DEBUG_OBJECT (v4l2object->element, "format was good");
/* Nothing to do. We want to succeed immediately
* here because setting the same format back
* can still fail due to EBUSY. By short-circuiting
* here, we allow pausing and re-playing pipelines
* with changed caps, as long as the changed caps
* do not change the webcam's format. Otherwise,
* any caps change would require us to go to NULL
* state to close the device and set format.
*/
return TRUE;
}
if (format->type != v4l2object->type ||
GST_V4L2_WIDTH (v4l2object) != width ||
GST_V4L2_HEIGHT (v4l2object) != height ||
GST_V4L2_PIXELFORMAT (v4l2object) != pixelformat ||
GST_V4L2_FIELD (v4l2object) != field) {
/* something different, set the format */
GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
"%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
@ -2160,6 +2150,60 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
if (format->fmt.pix.pixelformat != pixelformat)
goto invalid_pixelformat;
}
/* Is there a reason we require the caller to always specify a framerate? */
GST_DEBUG_OBJECT (v4l2object->element, "Desired framerate: %u/%u", fps_n,
fps_d);
streamparm = &v4l2object->streamparm;
memset (streamparm, 0x00, sizeof (struct v4l2_streamparm));
streamparm->type = v4l2object->type;
if (v4l2_ioctl (fd, VIDIOC_G_PARM, streamparm) < 0)
goto get_parm_failed;
GST_DEBUG_OBJECT (v4l2object->element, "Got framerate: %u/%u",
streamparm->parm.capture.timeperframe.denominator,
streamparm->parm.capture.timeperframe.numerator);
/* Note: V4L2 provides the frame interval, we have the frame rate */
if (!fractions_are_equal (streamparm->parm.capture.timeperframe.numerator,
streamparm->parm.capture.timeperframe.denominator, fps_d, fps_n)) {
GST_LOG_OBJECT (v4l2object->element, "Setting framerate to %u/%u", fps_n,
fps_d);
/* We want to change the frame rate, so check whether we can. Some cheap USB
* cameras don't have the capability */
if ((streamparm->parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
GST_DEBUG_OBJECT (v4l2object->element,
"Not setting framerate (not supported)");
goto done;
}
/* Note: V4L2 wants the frame interval, we have the frame rate */
streamparm->parm.capture.timeperframe.numerator = fps_d;
streamparm->parm.capture.timeperframe.denominator = fps_n;
/* some cheap USB cam's won't accept any change */
if (v4l2_ioctl (fd, VIDIOC_S_PARM, streamparm) < 0)
goto set_parm_failed;
/* get new values */
fps_d = streamparm->parm.capture.timeperframe.numerator;
fps_n = streamparm->parm.capture.timeperframe.denominator;
GST_INFO_OBJECT (v4l2object->element, "Set framerate to %u/%u", fps_n,
fps_d);
}
done:
/* if we have a framerate pre-calculate duration */
if (fps_n > 0 && fps_d > 0) {
v4l2object->duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
} else {
v4l2object->duration = GST_CLOCK_TIME_NONE;
}
return TRUE;
@ -2207,6 +2251,20 @@ invalid_pixelformat:
GST_FOURCC_ARGS (format->fmt.pix.pixelformat)));
return FALSE;
}
get_parm_failed:
{
GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
(_("Could not get parameters on device '%s'"),
v4l2object->videodev), GST_ERROR_SYSTEM);
goto done;
}
set_parm_failed:
{
GST_ELEMENT_WARNING (v4l2object->element, RESOURCE, SETTINGS,
(_("Video input device did not accept new frame rate setting.")),
GST_ERROR_SYSTEM);
goto done;
}
}
gboolean

View file

@ -75,9 +75,26 @@ typedef gboolean (*GstV4l2GetInOutFunction) (GstV4l2Object * v4l2object, gint
typedef gboolean (*GstV4l2SetInOutFunction) (GstV4l2Object * v4l2object, gint input);
typedef gboolean (*GstV4l2UpdateFpsFunction) (GstV4l2Object * v4l2object);
#define GST_V4L2_WIDTH(o) ((o)->format.fmt.pix.width)
#define GST_V4L2_HEIGHT(o) ((o)->format.fmt.pix.height)
#define GST_V4L2_PIXELFORMAT(o) ((o)->format.fmt.pix.pixelformat)
#define GST_V4L2_FIELD(o) ((o)->format.fmt.pix.field)
#define GST_V4L2_FPS_N(o) ((o)->streamparm.parm.capture.timeperframe.denominator)
#define GST_V4L2_FPS_D(o) ((o)->streamparm.parm.capture.timeperframe.numerator)
/* simple check whether the device is open */
#define GST_V4L2_IS_OPEN(o) ((o)->video_fd > 0)
/* check whether the device is 'active' */
#define GST_V4L2_IS_ACTIVE(o) ((o)->active)
#define GST_V4L2_SET_ACTIVE(o) ((o)->active = TRUE)
#define GST_V4L2_SET_INACTIVE(o) ((o)->active = FALSE)
struct _GstV4l2Object {
GstElement * element;
enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
/* the video device */
char *videodev;
@ -86,16 +103,13 @@ struct _GstV4l2Object {
GstPoll * poll;
gboolean can_poll_device;
/* the video buffer (mmap()'ed) */
guint8 **buffer;
enum v4l2_buf_type type; /* V4L2_BUF_TYPE_VIDEO_CAPTURE, V4L2_BUF_TYPE_VIDEO_OUTPUT */
gboolean active;
/* the current format */
struct v4l2_format format;
guint width, height;
guint fps_n, fps_d;
struct v4l2_streamparm streamparm;
guint size;
GstClockTime duration;
/* the video device's capabilities */
struct v4l2_capability vcap;

View file

@ -677,8 +677,8 @@ gst_v4l2sink_set_caps (GstBaseSink * bsink, GstCaps * caps)
g_object_notify (G_OBJECT (v4l2sink), "queue-size");
}
v4l2sink->video_width = v4l2sink->v4l2object->width;
v4l2sink->video_height = v4l2sink->v4l2object->height;
v4l2sink->video_width = GST_V4L2_WIDTH (v4l2sink->v4l2object);
v4l2sink->video_height = GST_V4L2_HEIGHT (v4l2sink->v4l2object);
/* TODO: videosink width/height should be scaled according to
* pixel-aspect-ratio

View file

@ -224,12 +224,8 @@ gst_v4l2src_init (GstV4l2Src * v4l2src)
gst_base_src_set_format (GST_BASE_SRC (v4l2src), GST_FORMAT_TIME);
gst_base_src_set_live (GST_BASE_SRC (v4l2src), TRUE);
v4l2src->fps_d = 0;
v4l2src->fps_n = 0;
}
static void
gst_v4l2src_dispose (GObject * object)
{
@ -531,7 +527,7 @@ gst_v4l2src_set_caps (GstBaseSrc * src, GstCaps * caps)
return FALSE;
}
if (!gst_v4l2src_set_capture (v4l2src, caps))
if (!gst_v4l2_object_set_format (v4l2src->v4l2object, caps))
/* error already posted */
return FALSE;
@ -565,6 +561,7 @@ gst_v4l2src_query (GstBaseSrc * bsrc, GstQuery * query)
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_LATENCY:{
GstClockTime min_latency, max_latency;
guint32 fps_n, fps_d;
/* device must be open */
if (!GST_V4L2_IS_OPEN (src->v4l2object)) {
@ -573,16 +570,18 @@ gst_v4l2src_query (GstBaseSrc * bsrc, GstQuery * query)
goto done;
}
fps_n = GST_V4L2_FPS_N (src->v4l2object);
fps_d = GST_V4L2_FPS_D (src->v4l2object);
/* we must have a framerate */
if (src->fps_n <= 0 || src->fps_d <= 0) {
if (fps_n <= 0 || fps_d <= 0) {
GST_WARNING_OBJECT (src,
"Can't give latency since framerate isn't fixated !");
goto done;
}
/* min latency is the time to capture one frame */
min_latency =
gst_util_uint64_scale_int (GST_SECOND, src->fps_d, src->fps_n);
min_latency = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
/* max latency is total duration of the frame buffer */
max_latency = src->num_buffers * min_latency;
@ -653,18 +652,12 @@ gst_v4l2src_stop (GstBaseSrc * src)
{
GstV4l2Src *v4l2src = GST_V4L2SRC (src);
if (GST_V4L2_IS_ACTIVE (v4l2src->v4l2object)
&& !gst_v4l2src_capture_stop (v4l2src))
if (GST_V4L2_IS_ACTIVE (v4l2src->v4l2object)) {
if (!gst_v4l2src_capture_stop (v4l2src))
return FALSE;
if (v4l2src->v4l2object->buffer != NULL) {
if (!gst_v4l2src_capture_deinit (v4l2src))
return FALSE;
}
v4l2src->fps_d = 0;
v4l2src->fps_n = 0;
return TRUE;
}
@ -854,7 +847,7 @@ gst_v4l2src_create (GstPushSrc * src, GstBuffer ** buf)
/* set buffer metadata */
if (G_LIKELY (ret == GST_FLOW_OK && *buf)) {
GstClock *clock;
GstClockTime timestamp;
GstClockTime timestamp, duration;
GST_BUFFER_OFFSET (*buf) = v4l2src->offset++;
GST_BUFFER_OFFSET_END (*buf) = v4l2src->offset;
@ -872,23 +865,25 @@ gst_v4l2src_create (GstPushSrc * src, GstBuffer ** buf)
}
GST_OBJECT_UNLOCK (v4l2src);
duration = v4l2src->v4l2object->duration;
if (G_LIKELY (clock)) {
/* the time now is the time of the clock minus the base time */
timestamp = gst_clock_get_time (clock) - timestamp;
gst_object_unref (clock);
/* if we have a framerate adjust timestamp for frame latency */
if (GST_CLOCK_TIME_IS_VALID (v4l2src->duration)) {
if (timestamp > v4l2src->duration)
timestamp -= v4l2src->duration;
if (GST_CLOCK_TIME_IS_VALID (duration)) {
if (timestamp > duration)
timestamp -= duration;
else
timestamp = 0;
}
}
/* activate settings for next frame */
if (GST_CLOCK_TIME_IS_VALID (v4l2src->duration)) {
v4l2src->ctrl_time += v4l2src->duration;
if (GST_CLOCK_TIME_IS_VALID (duration)) {
v4l2src->ctrl_time += duration;
} else {
/* this is not very good (as it should be the next timestamp),
* still good enough for linear fades (as long as it is not -1)
@ -901,7 +896,7 @@ gst_v4l2src_create (GstPushSrc * src, GstBuffer ** buf)
/* FIXME: use the timestamp from the buffer itself! */
GST_BUFFER_TIMESTAMP (*buf) = timestamp;
GST_BUFFER_DURATION (*buf) = v4l2src->duration;
GST_BUFFER_DURATION (*buf) = duration;
}
return ret;

View file

@ -80,9 +80,6 @@ struct _GstV4l2Src
guint64 offset;
gint fps_d, fps_n; /* framerate if device is open */
GstClockTime duration; /* duration of one frame */
GstClockTime ctrl_time;
GstV4l2SrcGetFunc get_frame;

View file

@ -40,14 +40,6 @@
# define v4l2_munmap munmap
#endif
/* simple check whether the device is open */
#define GST_V4L2_IS_OPEN(v4l2object) \
(v4l2object->video_fd > 0)
/* check whether the device is 'active' */
#define GST_V4L2_IS_ACTIVE(v4l2object) \
(v4l2object->buffer != NULL)
#define GST_V4L2_IS_OVERLAY(v4l2object) \
(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OVERLAY)

View file

@ -50,16 +50,6 @@
#define GST_CAT_DEFAULT v4l2src_debug
GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
/* lalala... */
#define GST_V4L2_SET_ACTIVE(element) (element)->buffer = GINT_TO_POINTER (-1)
#define GST_V4L2_SET_INACTIVE(element) (element)->buffer = NULL
/* On some systems MAP_FAILED seems to be missing */
#ifndef MAP_FAILED
#define MAP_FAILED ((caddr_t) -1)
#endif
/* Local functions */
static gboolean
@ -202,90 +192,6 @@ too_many_trials:
}
}
/* Note about fraction simplification
* n1/d1 == n2/d2 is also written as n1 == ( n2 * d1 ) / d2
*/
#define fractions_are_equal(n1,d1,n2,d2) ((n1) == gst_util_uint64_scale_int((n2), (d1), (d2)))
/******************************************************
* gst_v4l2src_set_capture():
* set capture parameters
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
gst_v4l2src_set_capture (GstV4l2Src * v4l2src, GstCaps * caps)
{
gint fd = v4l2src->v4l2object->video_fd;
struct v4l2_streamparm stream;
guint fps_n, fps_d;
if (!gst_v4l2_object_set_format (v4l2src->v4l2object, caps))
/* error already reported */
return FALSE;
fps_n = v4l2src->v4l2object->fps_n;
fps_d = v4l2src->v4l2object->fps_d;
/* Is there a reason we require the caller to always specify a framerate? */
GST_DEBUG_OBJECT (v4l2src, "Desired framerate: %u/%u", fps_n, fps_d);
memset (&stream, 0x00, sizeof (struct v4l2_streamparm));
stream.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (v4l2_ioctl (fd, VIDIOC_G_PARM, &stream) < 0) {
GST_ELEMENT_WARNING (v4l2src, RESOURCE, SETTINGS,
(_("Could not get parameters on device '%s'"),
v4l2src->v4l2object->videodev), GST_ERROR_SYSTEM);
goto done;
}
/* Note: V4L2 provides the frame interval, we have the frame rate */
if (fractions_are_equal (stream.parm.capture.timeperframe.numerator,
stream.parm.capture.timeperframe.denominator, fps_d, fps_n)) {
GST_DEBUG_OBJECT (v4l2src, "Desired framerate already set");
goto already_set;
}
/* We want to change the frame rate, so check whether we can. Some cheap USB
* cameras don't have the capability */
if ((stream.parm.capture.capability & V4L2_CAP_TIMEPERFRAME) == 0) {
GST_DEBUG_OBJECT (v4l2src, "Not setting framerate (not supported)");
goto done;
}
GST_LOG_OBJECT (v4l2src, "Setting framerate to %u/%u", fps_n, fps_d);
/* Note: V4L2 wants the frame interval, we have the frame rate */
stream.parm.capture.timeperframe.numerator = fps_d;
stream.parm.capture.timeperframe.denominator = fps_n;
/* some cheap USB cam's won't accept any change */
if (v4l2_ioctl (fd, VIDIOC_S_PARM, &stream) < 0) {
GST_ELEMENT_WARNING (v4l2src, RESOURCE, SETTINGS,
(_("Video input device did not accept new frame rate setting.")),
GST_ERROR_SYSTEM);
goto done;
}
already_set:
v4l2src->fps_n = fps_n;
v4l2src->fps_d = fps_d;
/* if we have a framerate pre-calculate duration */
if (fps_n > 0 && fps_d > 0) {
v4l2src->duration = gst_util_uint64_scale_int (GST_SECOND, fps_d, fps_n);
} else {
v4l2src->duration = GST_CLOCK_TIME_NONE;
}
GST_INFO_OBJECT (v4l2src,
"Set framerate to %u/%u and duration to %" GST_TIME_FORMAT, fps_n, fps_d,
GST_TIME_ARGS (v4l2src->duration));
done:
return TRUE;
}
/******************************************************
* gst_v4l2src_capture_init():
* initialize the capture system
@ -386,12 +292,10 @@ gst_v4l2src_capture_stop (GstV4l2Src * v4l2src)
{
GST_DEBUG_OBJECT (v4l2src, "stopping capturing");
if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object)) {
if (!GST_V4L2_IS_OPEN (v4l2src->v4l2object))
goto done;
}
if (!GST_V4L2_IS_ACTIVE (v4l2src->v4l2object)) {
if (!GST_V4L2_IS_ACTIVE (v4l2src->v4l2object))
goto done;
}
if (v4l2src->use_mmap) {
/* we actually need to sync on all queued buffers but not

View file

@ -27,8 +27,6 @@
#include "gstv4l2src.h"
#include "v4l2_calls.h"
gboolean gst_v4l2src_set_capture (GstV4l2Src * v4l2src, GstCaps *caps);
gboolean gst_v4l2src_capture_init (GstV4l2Src * v4l2src);
gboolean gst_v4l2src_capture_start (GstV4l2Src * v4l2src);