Some final fixes for the v4lsrc elements. remove software sync thread (use GST_ELEMENT_THREAD_SUGGESTED instead) make...

Original commit message from CVS:
Some final fixes for the v4lsrc elements.

* remove software sync thread (use GST_ELEMENT_THREAD_SUGGESTED instead)
* make all src elements threadsafe
* fix num_buffer argument setting in v4l2src (VIDIOC_S_PARM)
* re-add bufsize (RO) for v4lmjpegsrc
* fix the A/V sync calculation in all elements (spvf=GST_SECOND/fps, not GST_SECOND*fps)
* probably some more crap....

With all this, it actually works quite well. The TODO files describes the
next steps in order to make a full-featured video recorder based on these
elements and GStreamer (bottom). Making a simple recorder should be fairly
easy now, btw.
This commit is contained in:
Ronald S. Bultje 2003-05-21 06:33:18 +00:00
parent e5d05269ff
commit 6ba3abb3a5
7 changed files with 331 additions and 291 deletions

View file

@ -1,9 +1,6 @@
TODO list (short term):
=======================
* fix clocking issues by using gst clock for v4lsrc and by using it
as a correction for v4lmjpegsrc/v4l2src (pause brokenness etc.)
* v4lsrc/v4lmjpegsrc/v4l2src: fix interlacing (not handled at all...)
* v4lsrc - add a property for capture formats?
TODO list (long term):
======================
@ -23,3 +20,23 @@ Linux/V4L2 API: /usr/include/linux/videodev2.h or
(kernel patches: http://bytesex.org/patches/)
BSD/Meteor API: /usr/include/machine/ioctl_meteor.h
mjpegtools : http://www.sourceforge.net/projects/mjpeg
Capturing:
==========
* sound is the master clock
* it's probably a good idea to create an audiosource element:
- autodetect alsa/oss
- first try alsa, then oss... they work the same internally
* same for videosource:
- autodetect v4l/v4l2 + mjpeg capabilities
- for this, just open device using v4l2element. On success:
+ use v4l2
- on failure:
+ use v4lelement and query for MJPEG capabilities
+ if that's available, combine caps of v4lmjpegsrc and v4lsrc
* both sources run in their own GstThread with a high priority
* an encoder element takes care of encoding + muxing. A toplevel element
(reverse of spider) is probably a good idea here. How? Don't know...
* format negotiation via filtered caps
* statistics via listening to the frame_{lost,inserted,deleted,captures}
signals and GST_PAD_QUERY_POSITION (gst_pad_query())

View file

@ -57,6 +57,7 @@ enum {
#endif
ARG_QUALITY,
ARG_NUMBUFS,
ARG_BUFSIZE,
ARG_USE_FIXED_FPS
};
@ -166,8 +167,11 @@ gst_v4lmjpegsrc_class_init (GstV4lMjpegSrcClass *klass)
G_MININT,G_MAXINT,0,G_PARAM_WRITABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_NUMBUFS,
g_param_spec_int("num_buffers","num_buffers","num_buffers",
g_param_spec_int("num_buffers","Num Buffers","Number of Buffers",
G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BUFSIZE,
g_param_spec_int("buffer_size", "Buffer Size", "Size of buffers",
0, G_MAXINT, 0, G_PARAM_READABLE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_USE_FIXED_FPS,
g_param_spec_boolean("use_fixed_fps", "Use Fixed FPS",
@ -209,6 +213,8 @@ gst_v4lmjpegsrc_class_init (GstV4lMjpegSrcClass *klass)
static void
gst_v4lmjpegsrc_init (GstV4lMjpegSrc *v4lmjpegsrc)
{
GST_FLAG_SET(GST_ELEMENT(v4lmjpegsrc), GST_ELEMENT_THREAD_SUGGESTED);
v4lmjpegsrc->srcpad = gst_pad_new_from_template (src_template, "src");
gst_element_add_pad(GST_ELEMENT(v4lmjpegsrc), v4lmjpegsrc->srcpad);
@ -387,7 +393,7 @@ gst_v4lmjpegsrc_srcconnect (GstPad *pad,
gst_caps_get_int(caps, "width", &w);
} else {
int max;
gst_caps_get_int_range(caps, "width", &w, &max);
gst_caps_get_int_range(caps, "width", &max, &w);
}
}
if (gst_caps_has_property(caps, "height")) {
@ -395,7 +401,7 @@ gst_v4lmjpegsrc_srcconnect (GstPad *pad,
gst_caps_get_int(caps, "height", &h);
} else {
int max;
gst_caps_get_int_range(caps, "height", &h, &max);
gst_caps_get_int_range(caps, "height", &max, &h);
}
}
@ -543,12 +549,12 @@ gst_v4lmjpegsrc_get (GstPad *pad)
* timeframe. This means that if time - begin_time = X sec,
* we want to have written X*fps frames. If we've written
* more - drop, if we've written less - dup... */
if (v4lmjpegsrc->handled * fps * GST_SECOND - time > 1.5 * fps * GST_SECOND) {
if (v4lmjpegsrc->handled * (GST_SECOND/fps) - time > 1.5 * (GST_SECOND/fps)) {
/* yo dude, we've got too many frames here! Drop! DROP! */
v4lmjpegsrc->need_writes--; /* -= (v4lmjpegsrc->handled - (time / fps)); */
g_signal_emit(G_OBJECT(v4lmjpegsrc),
gst_v4lmjpegsrc_signals[SIGNAL_FRAME_DROP], 0);
} else if (v4lmjpegsrc->handled * fps * GST_SECOND - time < - 1.5 * fps * GST_SECOND) {
} else if (v4lmjpegsrc->handled * (GST_SECOND/fps) - time < - 1.5 * (GST_SECOND/fps)) {
/* this means we're lagging far behind */
v4lmjpegsrc->need_writes++; /* += ((time / fps) - v4lmjpegsrc->handled); */
g_signal_emit(G_OBJECT(v4lmjpegsrc),
@ -687,6 +693,9 @@ gst_v4lmjpegsrc_get_property (GObject *object,
case ARG_NUMBUFS:
g_value_set_int(value, v4lmjpegsrc->breq.count);
break;
case ARG_BUFSIZE:
g_value_set_int(value, v4lmjpegsrc->breq.size);
break;
case ARG_USE_FIXED_FPS:
g_value_set_boolean(value, v4lmjpegsrc->use_fixed_fps);
break;

View file

@ -55,6 +55,17 @@ struct _GstV4lMjpegSrc {
struct mjpeg_sync bsync;
struct mjpeg_requestbuffers breq;
/* num of queued frames and some GThread stuff
* to wait if there's not enough */
gint8 *frame_queue_state;
GMutex *mutex_queue_state;
GCond *cond_queue_state;
gint num_queued;
gint queue_frame;
/* True if we want to stop */
gboolean quit;
/* A/V sync... frame counter and internal cache */
gulong handled;
gint last_frame;

View file

@ -178,6 +178,8 @@ gst_v4lsrc_class_init (GstV4lSrcClass *klass)
static void
gst_v4lsrc_init (GstV4lSrc *v4lsrc)
{
GST_FLAG_SET(GST_ELEMENT(v4lsrc), GST_ELEMENT_THREAD_SUGGESTED);
v4lsrc->srcpad = gst_pad_new_from_template (src_template, "src");
gst_element_add_pad(GST_ELEMENT(v4lsrc), v4lsrc->srcpad);
@ -421,16 +423,16 @@ gst_v4lsrc_srcconnect (GstPad *pad,
if (gst_caps_has_fixed_property(caps, "width")) {
gst_caps_get_int(caps, "width", &w);
} else {
int max;
gst_caps_get_int_range(caps, "width", &w, &max);
int min;
gst_caps_get_int_range(caps, "width", &min, &w);
}
}
if (gst_caps_has_property(caps, "height")) {
if (gst_caps_has_fixed_property(caps, "height")) {
gst_caps_get_int(caps, "height", &h);
} else {
int max;
gst_caps_get_int_range(caps, "height", &h, &max);
int min;
gst_caps_get_int_range(caps, "height", &min, &h);
}
}
@ -589,7 +591,7 @@ gst_v4lsrc_get (GstPad *pad)
return NULL;
v4lsrc->last_frame = num;
time = v4lsrc->timestamp_soft_sync[num] - v4lsrc->substract_time;
time = v4lsrc->timestamp_sync - v4lsrc->substract_time;
/* decide how often we're going to write the frame - set
* v4lsrc->need_writes to (that-1) and have_frame to TRUE
@ -601,12 +603,12 @@ gst_v4lsrc_get (GstPad *pad)
* timeframe. This means that if time - begin_time = X sec,
* we want to have written X*fps frames. If we've written
* more - drop, if we've written less - dup... */
if (v4lsrc->handled * fps * GST_SECOND - time > 1.5 * fps * GST_SECOND) {
if (v4lsrc->handled * (GST_SECOND/fps) - time > 1.5 * (GST_SECOND/fps)) {
/* yo dude, we've got too many frames here! Drop! DROP! */
v4lsrc->need_writes--; /* -= (v4lsrc->handled - (time / fps)); */
g_signal_emit(G_OBJECT(v4lsrc),
gst_v4lsrc_signals[SIGNAL_FRAME_DROP], 0);
} else if (v4lsrc->handled * fps * GST_SECOND - time < - 1.5 * fps * GST_SECOND) {
} else if (v4lsrc->handled * (GST_SECOND/fps) - time < - 1.5 * (GST_SECOND/fps)) {
/* this means we're lagging far behind */
v4lsrc->need_writes++; /* += ((time / fps) - v4lsrc->handled); */
g_signal_emit(G_OBJECT(v4lsrc),
@ -634,7 +636,7 @@ gst_v4lsrc_get (GstPad *pad)
if (v4lsrc->use_fixed_fps)
GST_BUFFER_TIMESTAMP(buf) = v4lsrc->handled * GST_SECOND / fps;
else /* calculate time based on our own clock */
GST_BUFFER_TIMESTAMP(buf) = v4lsrc->timestamp_soft_sync[num] - v4lsrc->substract_time;
GST_BUFFER_TIMESTAMP(buf) = v4lsrc->timestamp_sync - v4lsrc->substract_time;
v4lsrc->handled++;
g_signal_emit(G_OBJECT(v4lsrc),

View file

@ -52,21 +52,18 @@ struct _GstV4lSrc {
/* capture/buffer info */
struct video_mmap mmap;
struct video_mbuf mbuf;
gint sync_frame;
gint8 *frame_queued;
guint buffer_size;
GstClockTime timestamp_sync;
/* a seperate GThread for the sync() thread (improves correctness of timestamps) */
gint8 *isready_soft_sync; /* 1 = ok, 0 = waiting, -1 = error */
GstClockTime *timestamp_soft_sync;
GThread * thread_soft_sync;
GMutex * mutex_soft_sync;
GCond ** cond_soft_sync;
/* num of queued frames and some GThread stuff
* to wait if there's not enough */
gint8 *frame_queue_state;
GMutex *mutex_queue_state;
GCond *cond_queue_state;
gint num_queued;
gint sync_frame, queue_frame;
/* num of queued frames and some GThread stuff to wait if there's not enough */
GMutex * mutex_queued_frames;
GCond * cond_queued_frames;
/* True if we want the soft sync thread to stop */
/* True if we want to stop */
gboolean quit;
/* A/V sync... frame counter and internal cache */

View file

@ -35,6 +35,8 @@
#define MAP_FAILED ( (caddr_t) -1 )
#endif
#define MIN_BUFFERS_QUEUED 2
#define DEBUG(format, args...) \
GST_DEBUG_ELEMENT(GST_CAT_PLUGIN_INFO, \
GST_ELEMENT(v4lmjpegsrc), \
@ -43,6 +45,12 @@
char *input_name[] = { "Composite", "S-Video", "TV-Tuner", "Autodetect" };
enum {
QUEUE_STATE_ERROR = -1,
QUEUE_STATE_READY_FOR_QUEUE,
QUEUE_STATE_QUEUED,
QUEUE_STATE_SYNCED,
};
/******************************************************
* gst_v4lmjpegsrc_queue_frame():
@ -56,6 +64,10 @@ gst_v4lmjpegsrc_queue_frame (GstV4lMjpegSrc *v4lmjpegsrc,
{
DEBUG("queueing frame %d", num);
if (v4lmjpegsrc->frame_queue_state[num] != QUEUE_STATE_READY_FOR_QUEUE) {
return FALSE;
}
if (ioctl(GST_V4LELEMENT(v4lmjpegsrc)->video_fd, MJPIOC_QBUF_CAPT, &num) < 0)
{
gst_element_error(GST_ELEMENT(v4lmjpegsrc),
@ -64,6 +76,9 @@ gst_v4lmjpegsrc_queue_frame (GstV4lMjpegSrc *v4lmjpegsrc,
return FALSE;
}
v4lmjpegsrc->frame_queue_state[num] = QUEUE_STATE_QUEUED;
v4lmjpegsrc->num_queued++;
return TRUE;
}
@ -80,16 +95,26 @@ gst_v4lmjpegsrc_sync_next_frame (GstV4lMjpegSrc *v4lmjpegsrc,
{
DEBUG("syncing on next frame");
if (ioctl(GST_V4LELEMENT(v4lmjpegsrc)->video_fd, MJPIOC_SYNC, &(v4lmjpegsrc->bsync)) < 0)
{
gst_element_error(GST_ELEMENT(v4lmjpegsrc),
"Error syncing on a buffer (%ld): %s",
v4lmjpegsrc->bsync.frame, g_strerror(errno));
if (v4lmjpegsrc->num_queued <= 0) {
return FALSE;
}
while (ioctl(GST_V4LELEMENT(v4lmjpegsrc)->video_fd,
MJPIOC_SYNC, &(v4lmjpegsrc->bsync)) < 0) {
if (errno != EINTR) {
gst_element_error(GST_ELEMENT(v4lmjpegsrc),
"Error syncing on a buffer: %s",
g_strerror(errno));
return FALSE;
}
DEBUG("Sync got interrupted");
}
*num = v4lmjpegsrc->bsync.frame;
v4lmjpegsrc->frame_queue_state[*num] = QUEUE_STATE_SYNCED;
v4lmjpegsrc->num_queued--;
return TRUE;
}
@ -318,7 +343,8 @@ gboolean gst_v4lmjpegsrc_set_capture_m (GstV4lMjpegSrc *v4lmjpegsrc,
bparm.quality = quality;
bparm.norm = norm;
bparm.input = input;
bparm.APP_len = 0; /* no JPEG markers - TODO: this is definately not right for decimation==1 */
bparm.APP_len = 0; /* no JPEG markers - TODO: this is definately
* not right for decimation==1 */
if (width <= 0)
{
@ -415,7 +441,8 @@ gst_v4lmjpegsrc_capture_init (GstV4lMjpegSrc *v4lmjpegsrc)
GST_V4L_CHECK_NOT_ACTIVE(GST_V4LELEMENT(v4lmjpegsrc));
/* Request buffers */
if (ioctl(GST_V4LELEMENT(v4lmjpegsrc)->video_fd, MJPIOC_REQBUFS, &(v4lmjpegsrc->breq)) < 0)
if (ioctl(GST_V4LELEMENT(v4lmjpegsrc)->video_fd,
MJPIOC_REQBUFS, &(v4lmjpegsrc->breq)) < 0)
{
gst_element_error(GST_ELEMENT(v4lmjpegsrc),
"Error requesting video buffers: %s",
@ -423,17 +450,28 @@ gst_v4lmjpegsrc_capture_init (GstV4lMjpegSrc *v4lmjpegsrc)
return FALSE;
}
if (v4lmjpegsrc->breq.count < MIN_BUFFERS_QUEUED)
{
gst_element_error(GST_ELEMENT(v4lmjpegsrc),
"Too little buffers. We got %d, we want at least %d",
v4lmjpegsrc->breq.count, MIN_BUFFERS_QUEUED);
return FALSE;
}
gst_info("Got %ld buffers of size %ld KB\n",
v4lmjpegsrc->breq.count, v4lmjpegsrc->breq.size/1024);
v4lmjpegsrc->use_num_times = (gint *) malloc(sizeof(gint) * v4lmjpegsrc->breq.count);
if (!v4lmjpegsrc->use_num_times)
{
gst_element_error(GST_ELEMENT(v4lmjpegsrc),
"Error creating sync-use-time tracker: %s",
g_strerror(errno));
return FALSE;
}
/* keep track of queued buffers */
v4lmjpegsrc->frame_queue_state = (gint8 *)
g_malloc(sizeof(gint8) * v4lmjpegsrc->breq.count);
/* track how often to use each frame */
v4lmjpegsrc->use_num_times = (gint *)
g_malloc(sizeof(gint) * v4lmjpegsrc->breq.count);
/* lock for the frame_state */
v4lmjpegsrc->mutex_queue_state = g_mutex_new();
v4lmjpegsrc->cond_queue_state = g_cond_new();
/* Map the buffers */
GST_V4LELEMENT(v4lmjpegsrc)->buffer = mmap(0,
@ -467,10 +505,23 @@ gst_v4lmjpegsrc_capture_start (GstV4lMjpegSrc *v4lmjpegsrc)
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lmjpegsrc));
/* queue'ing the buffers starts streaming capture */
for (n=0;n<v4lmjpegsrc->breq.count;n++)
if (!gst_v4lmjpegsrc_queue_frame(v4lmjpegsrc, n))
g_mutex_lock(v4lmjpegsrc->mutex_queue_state);
v4lmjpegsrc->quit = FALSE;
v4lmjpegsrc->num_queued = 0;
v4lmjpegsrc->queue_frame = 0;
/* set all buffers ready to queue , this starts streaming capture */
for (n=0;n<v4lmjpegsrc->breq.count;n++) {
v4lmjpegsrc->frame_queue_state[n] = QUEUE_STATE_READY_FOR_QUEUE;
if (!gst_v4lmjpegsrc_queue_frame(v4lmjpegsrc, n)) {
g_mutex_unlock(v4lmjpegsrc->mutex_queue_state);
gst_v4lmjpegsrc_capture_stop(v4lmjpegsrc);
return FALSE;
}
}
g_mutex_unlock(v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
@ -491,12 +542,41 @@ gst_v4lmjpegsrc_grab_frame (GstV4lMjpegSrc *v4lmjpegsrc,
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lmjpegsrc));
g_mutex_lock(v4lmjpegsrc->mutex_queue_state);
/* do we have enough frames? */
while (v4lmjpegsrc->num_queued < MIN_BUFFERS_QUEUED ||
v4lmjpegsrc->frame_queue_state[v4lmjpegsrc->queue_frame] ==
QUEUE_STATE_READY_FOR_QUEUE) {
while (v4lmjpegsrc->frame_queue_state[v4lmjpegsrc->queue_frame] !=
QUEUE_STATE_READY_FOR_QUEUE &&
!v4lmjpegsrc->quit) {
GST_DEBUG(GST_CAT_PLUGIN_INFO,
"Waiting for frames to become available (%d < %d)",
v4lmjpegsrc->num_queued, MIN_BUFFERS_QUEUED);
g_cond_wait(v4lmjpegsrc->cond_queue_state,
v4lmjpegsrc->mutex_queue_state);
}
if (v4lmjpegsrc->quit) {
g_mutex_unlock(v4lmjpegsrc->mutex_queue_state);
return TRUE; /* it won't get through anyway */
}
if (!gst_v4lmjpegsrc_queue_frame(v4lmjpegsrc, v4lmjpegsrc->queue_frame)) {
g_mutex_unlock(v4lmjpegsrc->mutex_queue_state);
return FALSE;
}
v4lmjpegsrc->queue_frame = (v4lmjpegsrc->queue_frame + 1) % v4lmjpegsrc->breq.count;
}
/* syncing on the buffer grabs it */
if (!gst_v4lmjpegsrc_sync_next_frame(v4lmjpegsrc, num))
if (!gst_v4lmjpegsrc_sync_next_frame(v4lmjpegsrc, num)) {
return FALSE;
}
*size = v4lmjpegsrc->bsync.length;
g_mutex_unlock(v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
@ -538,8 +618,23 @@ gst_v4lmjpegsrc_requeue_frame (GstV4lMjpegSrc *v4lmjpegsrc,
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lmjpegsrc));
if (!gst_v4lmjpegsrc_queue_frame(v4lmjpegsrc, num))
/* mark frame as 'ready to requeue' */
g_mutex_lock(v4lmjpegsrc->mutex_queue_state);
if (v4lmjpegsrc->frame_queue_state[num] != QUEUE_STATE_SYNCED) {
gst_element_error(GST_ELEMENT(v4lmjpegsrc),
"Invalid state %d (expected %d), can't requeue",
v4lmjpegsrc->frame_queue_state[num],
QUEUE_STATE_SYNCED);
return FALSE;
}
v4lmjpegsrc->frame_queue_state[num] = QUEUE_STATE_READY_FOR_QUEUE;
/* let an optional wait know */
g_cond_broadcast(v4lmjpegsrc->cond_queue_state);
g_mutex_unlock(v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
@ -554,13 +649,24 @@ gst_v4lmjpegsrc_requeue_frame (GstV4lMjpegSrc *v4lmjpegsrc,
gboolean
gst_v4lmjpegsrc_capture_stop (GstV4lMjpegSrc *v4lmjpegsrc)
{
int n;
DEBUG("stopping capture");
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lmjpegsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lmjpegsrc));
/* unqueue the buffers */
if (!gst_v4lmjpegsrc_queue_frame(v4lmjpegsrc, -1))
return FALSE;
g_mutex_lock(v4lmjpegsrc->mutex_queue_state);
/* make an optional pending wait stop */
v4lmjpegsrc->quit = TRUE;
g_cond_broadcast(v4lmjpegsrc->cond_queue_state);
/* sync on remaining frames */
while (v4lmjpegsrc->num_queued > 0) {
gst_v4lmjpegsrc_sync_next_frame(v4lmjpegsrc, &n);
}
g_mutex_unlock(v4lmjpegsrc->mutex_queue_state);
return TRUE;
}
@ -583,7 +689,11 @@ gst_v4lmjpegsrc_capture_deinit (GstV4lMjpegSrc *v4lmjpegsrc)
munmap(GST_V4LELEMENT(v4lmjpegsrc)->buffer, v4lmjpegsrc->breq.size * v4lmjpegsrc->breq.count);
GST_V4LELEMENT(v4lmjpegsrc)->buffer = NULL;
free(v4lmjpegsrc->use_num_times);
/* free buffer tracker */
g_mutex_free(v4lmjpegsrc->mutex_queue_state);
g_cond_free(v4lmjpegsrc->cond_queue_state);
g_free(v4lmjpegsrc->frame_queue_state);
g_free(v4lmjpegsrc->use_num_times);
return TRUE;
}

View file

@ -66,33 +66,34 @@ static const char *palette_name[] = {
"YUV-4:1:0 (planar)" /* VIDEO_PALETTE_YUV410P */
};
#define FRAME_QUEUE_READY -2
#define FRAME_ERROR -1
#define FRAME_DONE 0
/* FRAME_QUEUED is used in frame_queued array */
#define FRAME_QUEUED 1
/* FRAME_SOFTSYNCED is used in is_ready_soft array */
#define FRAME_SYNCED 1
enum {
QUEUE_STATE_ERROR = -1,
QUEUE_STATE_READY_FOR_QUEUE,
QUEUE_STATE_QUEUED,
QUEUE_STATE_SYNCED,
};
/******************************************************
* gst_v4lsrc_queue_frame():
* queue a frame for capturing
* Requires queue_state lock to be held!
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4lsrc_queue_frame (GstV4lSrc *v4lsrc,
gint num)
gint num)
{
DEBUG("queueing frame %d", num);
v4lsrc->mmap.frame = num;
if (v4lsrc->frame_queued[num] == FRAME_ERROR) {
return TRUE;
if (v4lsrc->frame_queue_state[num] != QUEUE_STATE_READY_FOR_QUEUE) {
return FALSE;
}
if (ioctl(GST_V4LELEMENT(v4lsrc)->video_fd, VIDIOCMCAPTURE, &(v4lsrc->mmap)) < 0)
v4lsrc->mmap.frame = num;
if (ioctl(GST_V4LELEMENT(v4lsrc)->video_fd,
VIDIOCMCAPTURE, &(v4lsrc->mmap)) < 0)
{
gst_element_error(GST_ELEMENT(v4lsrc),
"Error queueing a buffer (%d): %s",
@ -100,162 +101,53 @@ gst_v4lsrc_queue_frame (GstV4lSrc *v4lsrc,
return FALSE;
}
v4lsrc->frame_queued[num] = FRAME_QUEUED;
v4lsrc->frame_queue_state[num] = QUEUE_STATE_QUEUED;
v4lsrc->num_queued++;
return TRUE;
}
/******************************************************
* gst_v4lsrc_hard_sync_frame(GstV4lSrc *v4lsrc,gint num)
* sync a frame and set the timestamp correctly
* Requires queue_state lock to be held
*****************************************************/
static gboolean
gst_v4lsrc_hard_sync_frame(GstV4lSrc *v4lsrc,gint num) {
DEBUG("Hardware syncing frame %d",num);
while (ioctl(GST_V4LELEMENT(v4lsrc)->video_fd, VIDIOCSYNC, &num) < 0) {
/* if the sync() got interrupted, we can retry */
if (errno != EINTR) {
v4lsrc->isready_soft_sync[num] = FRAME_ERROR;
v4lsrc->frame_queued[num] = FRAME_ERROR;
gst_element_error(GST_ELEMENT(v4lsrc),
"Error syncing a buffer (%d): %s",
num, g_strerror(errno));
return FALSE;
}
DEBUG("Sync got interrupted");
}
v4lsrc->frame_queued[num] = FRAME_DONE;
g_mutex_lock(v4lsrc->mutex_soft_sync);
if (v4lsrc->clock) {
v4lsrc->timestamp_soft_sync[num] = gst_clock_get_time(v4lsrc->clock);
} else {
GTimeVal time;
g_get_current_time(&time);
v4lsrc->timestamp_soft_sync[num] = GST_TIMEVAL_TO_TIME(time);
}
v4lsrc->isready_soft_sync[num] = FRAME_SYNCED;
g_cond_broadcast(v4lsrc->cond_soft_sync[num]);
g_mutex_unlock(v4lsrc->mutex_soft_sync);
return TRUE;
}
/******************************************************
* gst_v4lsrc_soft_sync_cleanup()
* cleans up the v4lsrc structure after an error or
* exit request and exits the thread
******************************************************/
static void
gst_v4lsrc_soft_sync_cleanup(GstV4lSrc *v4lsrc) {
int n;
DEBUG("Software sync thread exiting");
/* sync all queued buffers */
for (n=0;n < v4lsrc->mbuf.frames; n++) {
if (v4lsrc->frame_queued[n] == FRAME_QUEUED) {
gst_v4lsrc_hard_sync_frame(v4lsrc,n);
g_mutex_lock(v4lsrc->mutex_soft_sync);
v4lsrc->isready_soft_sync[n] = FRAME_ERROR;
g_cond_broadcast(v4lsrc->cond_soft_sync[n]);
g_mutex_unlock(v4lsrc->mutex_soft_sync);
}
}
g_thread_exit(NULL);
}
/******************************************************
* gst_v4lsrc_soft_sync_thread()
* syncs on frames and signals the main thread
* purpose: actually get the correct frame timestamps
******************************************************/
static void *
gst_v4lsrc_soft_sync_thread (void *arg)
gst_v4lsrc_sync_frame (GstV4lSrc *v4lsrc, gint num)
{
GstV4lSrc *v4lsrc = GST_V4LSRC(arg);
gint frame = 0;
gint qframe = 0;
gint nqueued = 0;
DEBUG("Syncing on frame %d",num);
DEBUG("starting software sync thread");
for (;;) {
/* queue as many frames as we can */
while (v4lsrc->frame_queued[qframe] == FRAME_QUEUE_READY) {
if (v4lsrc->quit || !gst_v4lsrc_queue_frame(v4lsrc,qframe)) {
gst_v4lsrc_soft_sync_cleanup(v4lsrc);
}
qframe = (qframe + 1) % v4lsrc->mbuf.frames;
nqueued++;
}
if (nqueued < MIN_BUFFERS_QUEUED) {
/* not enough frames queued, wait for one to get ready and queue as much
* as we can again */
DEBUG("!enough buffers, waiting for frame %d",qframe);
g_mutex_lock(v4lsrc->mutex_queued_frames);
if (v4lsrc->quit) {
g_mutex_unlock(v4lsrc->mutex_queued_frames);
gst_v4lsrc_soft_sync_cleanup(v4lsrc);
}
if (!(v4lsrc->frame_queued[qframe] == FRAME_QUEUE_READY)) {
g_cond_wait(v4lsrc->cond_queued_frames,v4lsrc->mutex_queued_frames);
}
g_mutex_unlock(v4lsrc->mutex_queued_frames);
} else {
if (!gst_v4lsrc_hard_sync_frame(v4lsrc,frame))
gst_v4lsrc_soft_sync_cleanup(v4lsrc);
frame = (frame + 1) % v4lsrc->mbuf.frames;
nqueued--;
}
}
g_assert_not_reached();
}
/******************************************************
* gst_v4lsrc_sync_frame():
* sync on a frame for capturing
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4lsrc_sync_next_frame (GstV4lSrc *v4lsrc,
gint *num)
{
*num = v4lsrc->sync_frame;
DEBUG("syncing on next frame (%d)", *num);
/* "software sync()" on the frame */
g_mutex_lock(v4lsrc->mutex_soft_sync);
while (v4lsrc->isready_soft_sync[*num] == FRAME_DONE)
{
DEBUG("Waiting for frame %d to be synced on", *num);
g_cond_wait(v4lsrc->cond_soft_sync[*num], v4lsrc->mutex_soft_sync);
}
g_mutex_unlock(v4lsrc->mutex_soft_sync);
if (v4lsrc->isready_soft_sync[*num] != FRAME_SYNCED)
if (v4lsrc->frame_queue_state[num] != QUEUE_STATE_QUEUED) {
return FALSE;
v4lsrc->isready_soft_sync[*num] = FRAME_DONE;
}
v4lsrc->sync_frame = (v4lsrc->sync_frame + 1)%v4lsrc->mbuf.frames;
while (ioctl(GST_V4LELEMENT(v4lsrc)->video_fd, VIDIOCSYNC, &num) < 0) {
/* if the sync() got interrupted, we can retry */
if (errno != EINTR) {
v4lsrc->frame_queue_state[num] = QUEUE_STATE_ERROR;
gst_element_error(GST_ELEMENT(v4lsrc),
"Error syncing on a buffer (%d): %s",
num, g_strerror(errno));
return FALSE;
}
DEBUG("Sync got interrupted");
}
if (v4lsrc->clock) {
v4lsrc->timestamp_sync = gst_clock_get_time(v4lsrc->clock);
} else {
GTimeVal time;
g_get_current_time(&time);
v4lsrc->timestamp_sync = GST_TIMEVAL_TO_TIME(time);
}
v4lsrc->frame_queue_state[num] = QUEUE_STATE_SYNCED;
v4lsrc->num_queued--;
return TRUE;
}
/******************************************************
* gst_v4lsrc_set_capture():
* set capture parameters, palette = VIDEO_PALETTE_*
@ -291,8 +183,6 @@ gst_v4lsrc_set_capture (GstV4lSrc *v4lsrc,
gboolean
gst_v4lsrc_capture_init (GstV4lSrc *v4lsrc)
{
int n;
DEBUG("initting capture subsystem");
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lsrc));
GST_V4L_CHECK_NOT_ACTIVE(GST_V4LELEMENT(v4lsrc));
@ -319,55 +209,16 @@ gst_v4lsrc_capture_init (GstV4lSrc *v4lsrc)
v4lsrc->mbuf.size/(v4lsrc->mbuf.frames*1024));
/* keep track of queued buffers */
v4lsrc->frame_queued = (gint8 *) malloc(sizeof(gint8) * v4lsrc->mbuf.frames);
if (!v4lsrc->frame_queued)
{
gst_element_error(GST_ELEMENT(v4lsrc),
"Error creating buffer tracker: %s",
g_strerror(errno));
return FALSE;
}
v4lsrc->frame_queue_state = (gint8 *)
g_malloc(sizeof(gint8) * v4lsrc->mbuf.frames);
/* init the GThread stuff */
v4lsrc->mutex_soft_sync = g_mutex_new();
v4lsrc->isready_soft_sync = (gint8 *) malloc(sizeof(gint8) * v4lsrc->mbuf.frames);
if (!v4lsrc->isready_soft_sync)
{
gst_element_error(GST_ELEMENT(v4lsrc),
"Error creating software-sync buffer tracker: %s",
g_strerror(errno));
return FALSE;
}
v4lsrc->timestamp_soft_sync = (GstClockTime *)
malloc(sizeof(GstClockTime) * v4lsrc->mbuf.frames);
if (!v4lsrc->timestamp_soft_sync)
{
gst_element_error(GST_ELEMENT(v4lsrc),
"Error creating software-sync timestamp tracker: %s",
g_strerror(errno));
return FALSE;
}
v4lsrc->cond_soft_sync = (GCond **) malloc( sizeof(GCond *) * v4lsrc->mbuf.frames);
if (!v4lsrc->cond_soft_sync)
{
gst_element_error(GST_ELEMENT(v4lsrc),
"Error creating software-sync condition tracker: %s",
g_strerror(errno));
return FALSE;
}
for (n=0;n<v4lsrc->mbuf.frames;n++)
v4lsrc->cond_soft_sync[n] = g_cond_new();
v4lsrc->use_num_times = (gint *) malloc(sizeof(gint) * v4lsrc->mbuf.frames);
if (!v4lsrc->use_num_times)
{
gst_element_error(GST_ELEMENT(v4lsrc),
"Error creating sync-use-time tracker: %s",
g_strerror(errno));
return FALSE;
}
/* track how often to use each frame */
v4lsrc->use_num_times = (gint *)
g_malloc(sizeof(gint) * v4lsrc->mbuf.frames);
v4lsrc->mutex_queued_frames = g_mutex_new();
v4lsrc->cond_queued_frames = g_cond_new();
/* lock for the frame_state */
v4lsrc->mutex_queue_state = g_mutex_new();
v4lsrc->cond_queue_state = g_cond_new();
/* Map the buffers */
GST_V4LELEMENT(v4lsrc)->buffer = mmap(0, v4lsrc->mbuf.size,
@ -394,30 +245,30 @@ gst_v4lsrc_capture_init (GstV4lSrc *v4lsrc)
gboolean
gst_v4lsrc_capture_start (GstV4lSrc *v4lsrc)
{
GError *error = NULL;
int n;
DEBUG("starting capture");
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lsrc));
g_mutex_lock(v4lsrc->mutex_queue_state);
v4lsrc->quit = FALSE;
v4lsrc->num_queued = 0;
v4lsrc->sync_frame = 0;
v4lsrc->queue_frame = 0;
/* set all buffers ready to queue , this starts streaming capture */
for (n=0;n<v4lsrc->mbuf.frames;n++) {
v4lsrc->isready_soft_sync[n] = FRAME_DONE;
v4lsrc->frame_queued[n] = FRAME_QUEUE_READY;
v4lsrc->frame_queue_state[n] = QUEUE_STATE_READY_FOR_QUEUE;
if (!gst_v4lsrc_queue_frame(v4lsrc, n)) {
g_mutex_unlock(v4lsrc->mutex_queue_state);
gst_v4lsrc_capture_stop(v4lsrc);
return FALSE;
}
}
v4lsrc->sync_frame = 0;
/* start the sync() thread (correct timestamps) */
v4lsrc->thread_soft_sync = g_thread_create(gst_v4lsrc_soft_sync_thread,
(void *) v4lsrc, TRUE, &error);
if (!v4lsrc->thread_soft_sync)
{
gst_element_error(GST_ELEMENT(v4lsrc),
"Failed to create software sync thread: %s",error->message);
return FALSE;
}
g_mutex_unlock(v4lsrc->mutex_queue_state);
return TRUE;
}
@ -436,9 +287,41 @@ gst_v4lsrc_grab_frame (GstV4lSrc *v4lsrc, gint *num)
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lsrc));
g_mutex_lock(v4lsrc->mutex_queue_state);
/* do we have enough frames? */
while (v4lsrc->num_queued < MIN_BUFFERS_QUEUED ||
v4lsrc->frame_queue_state[v4lsrc->queue_frame] ==
QUEUE_STATE_READY_FOR_QUEUE) {
while (v4lsrc->frame_queue_state[v4lsrc->queue_frame] !=
QUEUE_STATE_READY_FOR_QUEUE &&
!v4lsrc->quit) {
GST_DEBUG(GST_CAT_PLUGIN_INFO,
"Waiting for frames to become available (%d < %d)",
v4lsrc->num_queued, MIN_BUFFERS_QUEUED);
g_cond_wait(v4lsrc->cond_queue_state,
v4lsrc->mutex_queue_state);
}
if (v4lsrc->quit) {
g_mutex_unlock(v4lsrc->mutex_queue_state);
return TRUE; /* it won't get through anyway */
}
if (!gst_v4lsrc_queue_frame(v4lsrc, v4lsrc->queue_frame)) {
g_mutex_unlock(v4lsrc->mutex_queue_state);
return FALSE;
}
v4lsrc->queue_frame = (v4lsrc->queue_frame + 1) % v4lsrc->mbuf.frames;
}
/* syncing on the buffer grabs it */
if (!gst_v4lsrc_sync_next_frame(v4lsrc, num))
*num = v4lsrc->sync_frame;
if (!gst_v4lsrc_sync_frame(v4lsrc, *num)) {
g_mutex_unlock(v4lsrc->mutex_queue_state);
return FALSE;
}
v4lsrc->sync_frame = (v4lsrc->sync_frame + 1) % v4lsrc->mbuf.frames;
g_mutex_unlock(v4lsrc->mutex_queue_state);
return TRUE;
}
@ -453,8 +336,6 @@ gst_v4lsrc_grab_frame (GstV4lSrc *v4lsrc, gint *num)
guint8 *
gst_v4lsrc_get_buffer (GstV4lSrc *v4lsrc, gint num)
{
/*DEBUG("gst_v4lsrc_get_buffer(), num = %d", num);*/
if (!GST_V4L_IS_ACTIVE(GST_V4LELEMENT(v4lsrc)) ||
!GST_V4L_IS_OPEN(GST_V4LELEMENT(v4lsrc)))
return NULL;
@ -480,12 +361,22 @@ gst_v4lsrc_requeue_frame (GstV4lSrc *v4lsrc, gint num)
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lsrc));
/* mark frame as 'ready to requeue' */
g_mutex_lock(v4lsrc->mutex_queued_frames);
g_mutex_lock(v4lsrc->mutex_queue_state);
v4lsrc->frame_queued[num] = FRAME_QUEUE_READY;
g_cond_broadcast(v4lsrc->cond_queued_frames);
if (v4lsrc->frame_queue_state[num] != QUEUE_STATE_SYNCED) {
gst_element_error(GST_ELEMENT(v4lsrc),
"Invalid state %d (expected %d), can't requeue",
v4lsrc->frame_queue_state[num],
QUEUE_STATE_SYNCED);
return FALSE;
}
g_mutex_unlock(v4lsrc->mutex_queued_frames);
v4lsrc->frame_queue_state[num] = QUEUE_STATE_READY_FOR_QUEUE;
/* let an optional wait know */
g_cond_broadcast(v4lsrc->cond_queue_state);
g_mutex_unlock(v4lsrc->mutex_queue_state);
return TRUE;
}
@ -504,14 +395,23 @@ gst_v4lsrc_capture_stop (GstV4lSrc *v4lsrc)
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lsrc));
/* we actually need to sync on all queued buffers but
* not on the non-queued ones */
g_mutex_lock(v4lsrc->mutex_queued_frames);
v4lsrc->quit = TRUE;
g_cond_broadcast(v4lsrc->cond_queued_frames);
g_mutex_unlock(v4lsrc->mutex_queued_frames);
g_mutex_lock(v4lsrc->mutex_queue_state);
g_thread_join(v4lsrc->thread_soft_sync);
/* make an optional pending wait stop */
v4lsrc->quit = TRUE;
g_cond_broadcast(v4lsrc->cond_queue_state);
/* sync on remaining frames */
while (1) {
if (v4lsrc->frame_queue_state[v4lsrc->sync_frame] == QUEUE_STATE_QUEUED) {
gst_v4lsrc_sync_frame(v4lsrc, v4lsrc->sync_frame);
v4lsrc->sync_frame = (v4lsrc->sync_frame + 1) % v4lsrc->mbuf.frames;
} else {
break;
}
}
g_mutex_unlock(v4lsrc->mutex_queue_state);
return TRUE;
}
@ -526,21 +426,15 @@ gst_v4lsrc_capture_stop (GstV4lSrc *v4lsrc)
gboolean
gst_v4lsrc_capture_deinit (GstV4lSrc *v4lsrc)
{
int n;
DEBUG("quitting capture subsystem");
GST_V4L_CHECK_OPEN(GST_V4LELEMENT(v4lsrc));
GST_V4L_CHECK_ACTIVE(GST_V4LELEMENT(v4lsrc));
/* free buffer tracker */
g_mutex_free(v4lsrc->mutex_queued_frames);
for (n=0;n<v4lsrc->mbuf.frames;n++)
g_cond_free(v4lsrc->cond_soft_sync[n]);
free(v4lsrc->frame_queued);
free(v4lsrc->cond_soft_sync);
free(v4lsrc->isready_soft_sync);
free(v4lsrc->timestamp_soft_sync);
free(v4lsrc->use_num_times);
g_mutex_free(v4lsrc->mutex_queue_state);
g_cond_free(v4lsrc->cond_queue_state);
g_free(v4lsrc->frame_queue_state);
g_free(v4lsrc->use_num_times);
/* unmap the buffer */
munmap(GST_V4LELEMENT(v4lsrc)->buffer, v4lsrc->mbuf.size);