v4l2: set min_latency for output device according to required minimum number of buffers

Since we can get the minimum number of buffers needed by an output
device to work, use it to set min_latency which will determine how many
buffers are queued.

https://bugzilla.gnome.org/show_bug.cgi?id=736072
This commit is contained in:
Aurélien Zanelli 2014-09-04 18:35:46 +02:00 committed by Nicolas Dufresne
parent d9a7954dc9
commit 3afec4dd01
3 changed files with 12 additions and 4 deletions

View file

@ -623,8 +623,10 @@ gst_v4l2_buffer_pool_start (GstBufferPool * bpool)
&max_buffers))
goto wrong_config;
/* TODO Also consider min_buffers_for_output when implemented */
min_latency = MAX (GST_V4L2_MIN_BUFFERS, obj->min_buffers_for_capture);
if (V4L2_TYPE_IS_OUTPUT (obj->type))
min_latency = MAX (GST_V4L2_MIN_BUFFERS, obj->min_buffers_for_output);
else
min_latency = MAX (GST_V4L2_MIN_BUFFERS, obj->min_buffers_for_capture);
switch (obj->mode) {
case GST_V4L2_IO_RW:

View file

@ -3472,10 +3472,13 @@ gst_v4l2_object_propose_allocation (GstV4l2Object * obj, GstQuery * query)
if (v4l2_ioctl (obj->video_fd, VIDIOC_G_CTRL, &ctl) >= 0) {
GST_DEBUG_OBJECT (obj->element, "driver require a miminum of %d buffers",
ctl.value);
min = MAX (ctl.value, GST_V4L2_MIN_BUFFERS);
obj->min_buffers_for_output = ctl.value;
} else {
obj->min_buffers_for_output = 0;
}
min = MAX (obj->min_buffers_for_output, GST_V4L2_MIN_BUFFERS);
gst_query_add_allocation_pool (query, pool, size, min, max);
/* we also support various metadata */

View file

@ -121,6 +121,9 @@ struct _GstV4l2Object {
* calculate the minimum latency of a m2m decoder. */
guint32 min_buffers_for_capture;
/* This will be set if supported in propose allocation. */
guint32 min_buffers_for_output;
/* wanted mode */
GstV4l2IOMode req_mode;