v4l2: add support for multi-planar V4L2 API

This api is in linux kernel since version 2.6.39,
and present in all version 3.

The commit that adds the API in master branch of the
linux kernel source is:
f8f3914cf9

v4l2 doc: "Some devices require data for each input
or output video frame to be placed in discontiguous
memory buffers"

There are newer structures 'struct v4l2_pix_format_mplane'
and 'struct v4l2_plane'.
So the pixel format is not setup with the same API when using
multi-planar.

Also for gst-v4l2, one of the difference is that in GstV4l2Meta
there are now one mem pointer for each maped plane.

When not using multi-planar, this commit takes care of keeping
the same code path than previously. So that the 2 cases are
in two different blocks triggered from V4L2_TYPE_IS_MULTIPLANAR.

Fixes bug https://bugzilla.gnome.org/show_bug.cgi?id=712754
This commit is contained in:
Julien Isorce 2013-11-13 12:05:40 +00:00
parent 0d55724a2b
commit 61ae84b50d
5 changed files with 555 additions and 104 deletions

View file

@ -122,11 +122,24 @@ gst_v4l2_buffer_pool_free_buffer (GstBufferPool * bpool, GstBuffer * buffer)
g_assert (meta != NULL);
index = meta->vbuffer.index;
GST_LOG_OBJECT (pool,
"unmap buffer %p idx %d (data %p, len %u)", buffer,
index, meta->mem, meta->vbuffer.length);
v4l2_munmap (meta->mem, meta->vbuffer.length);
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
GST_LOG_OBJECT (pool,
"unmap multiplanar buffer %p idx %d (data %p, len %u, plane %u)",
buffer, index, meta->mem[i], meta->vbuffer.m.planes[i].length, i);
v4l2_munmap (meta->mem[i], meta->vbuffer.m.planes[i].length);
}
} else {
GST_LOG_OBJECT (pool,
"unmap buffer %p idx %d (data %p, len %u)", buffer,
index, meta->mem[0], meta->vbuffer.length);
v4l2_munmap (meta->mem[0], meta->vbuffer.length);
}
pool->buffers[index] = NULL;
break;
}
@ -191,34 +204,84 @@ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
GST_LOG_OBJECT (pool, "creating buffer %u, %p", index, newbuf);
/* prepare the buffer */
memset (&meta->vbuffer, 0x0, sizeof (struct v4l2_buffer));
meta->vbuffer.index = index;
meta->vbuffer.type = obj->type;
meta->vbuffer.memory = V4L2_MEMORY_MMAP;
/* prepare the planes of the buffer */
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
/* length is the number of elements in the
* vplanes array */
meta->vbuffer.length = obj->n_v4l2_planes;
meta->vbuffer.m.planes = meta->vplanes;
}
/* the buffer is prepared, now fill in it with meaningful values */
if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
goto querybuf_failed;
GST_LOG_OBJECT (pool, " index: %u", meta->vbuffer.index);
GST_LOG_OBJECT (pool, " type: %d", meta->vbuffer.type);
GST_LOG_OBJECT (pool, " bytesused: %u", meta->vbuffer.bytesused);
GST_LOG_OBJECT (pool, " flags: %08x", meta->vbuffer.flags);
GST_LOG_OBJECT (pool, " field: %d", meta->vbuffer.field);
GST_LOG_OBJECT (pool, " memory: %d", meta->vbuffer.memory);
if (meta->vbuffer.memory == V4L2_MEMORY_MMAP)
GST_LOG_OBJECT (pool, " MMAP offset: %u", meta->vbuffer.m.offset);
GST_LOG_OBJECT (pool, " planes: %d", obj->n_v4l2_planes);
#ifndef GST_DISABLE_GST_DEBUG
if (meta->vbuffer.memory == V4L2_MEMORY_MMAP) {
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
GST_LOG_OBJECT (pool, " bytesused: %u, plane: %u",
meta->vbuffer.m.planes[i].bytesused, i);
GST_LOG_OBJECT (pool, " MMAP offset: %u, plane: %u",
meta->vbuffer.m.planes[i].m.mem_offset, i);
}
} else {
GST_LOG_OBJECT (pool, " bytesused: %u", meta->vbuffer.bytesused);
GST_LOG_OBJECT (pool, " MMAP offset: %u", meta->vbuffer.m.offset);
}
}
#endif
if (obj->mode == GST_V4L2_IO_MMAP) {
meta->mem = v4l2_mmap (0, meta->vbuffer.length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
meta->vbuffer.m.offset);
if (meta->mem == MAP_FAILED)
goto mmap_failed;
gst_buffer_append_memory (newbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem, meta->vbuffer.length, 0, meta->vbuffer.length, NULL,
NULL));
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
/* append one gstmemory for each plane */
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
meta->mem[i] = v4l2_mmap (0, meta->vbuffer.m.planes[i].length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
meta->vbuffer.m.planes[i].m.mem_offset);
if (meta->mem[i] == MAP_FAILED)
goto mmap_failed;
GST_LOG_OBJECT (pool, " buffer length %d for plane %d",
meta->vbuffer.m.planes[i].length, i);
gst_buffer_append_memory (newbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem[i], meta->vbuffer.m.planes[i].length,
meta->vbuffer.m.planes[i].data_offset,
meta->vbuffer.m.planes[i].length, NULL, NULL));
}
} else {
/* append one gstmemory that contains all the planes */
meta->mem[0] = v4l2_mmap (0, meta->vbuffer.length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
meta->vbuffer.m.offset);
if (meta->mem[0] == MAP_FAILED)
goto mmap_failed;
GST_LOG_OBJECT (pool, " buffer length %d", meta->vbuffer.length);
gst_buffer_append_memory (newbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem[0], meta->vbuffer.length, 0, meta->vbuffer.length,
NULL, NULL));
}
}
#if HAVE_DECL_V4L2_MEMORY_DMABUF
if (obj->mode == GST_V4L2_IO_DMABUF) {
@ -241,17 +304,24 @@ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
if (pool->add_videometa && info->finfo) {
const GstVideoFormatInfo *finfo = info->finfo;
gsize offset[GST_VIDEO_MAX_PLANES];
gint width, height, n_planes, offs, i, stride[GST_VIDEO_MAX_PLANES];
gint width, height, n_gst_planes, offs, i, stride[GST_VIDEO_MAX_PLANES];
width = GST_VIDEO_INFO_WIDTH (info);
height = GST_VIDEO_INFO_HEIGHT (info);
n_planes = GST_VIDEO_INFO_N_PLANES (info);
GST_DEBUG_OBJECT (pool, "adding video meta, bytesperline %d",
obj->bytesperline);
/* n_gst_planes is the number of planes
* (RGB: 1, YUY2: 1, NV12: 2, I420: 3)
* It's greater or equal than the number of v4l2 planes. */
n_gst_planes = GST_VIDEO_INFO_N_PLANES (info);
/* the basic are common between MPLANE mode and non MPLANE mode
* except a special case inside the loop at the end
*/
offs = 0;
for (i = 0; i < n_planes; i++) {
for (i = 0; i < n_gst_planes; i++) {
GST_DEBUG_OBJECT (pool, "adding video meta, bytesperline %d",
obj->bytesperline[i]);
offset[i] = offs;
switch (info->finfo->format) {
@ -261,20 +331,31 @@ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
case GST_VIDEO_FORMAT_NV24:
stride[i] =
(i == 0 ? 1 : 2) * GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo,
i, obj->bytesperline);
i, obj->bytesperline[i]);
break;
default:
stride[i] =
GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i,
obj->bytesperline);
obj->bytesperline[i]);
break;
}
offs +=
stride[i] * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, height);
/* when using multiplanar mode and if there is one v4l plane for
* each gst plane (here meta->vbuffer.length is the number of planes)
*/
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type) && meta->vbuffer.length > 1)
/* non_contiguous case here so we have to make sure that gst goes to the
* next plane (using default gstvideometa.c::default_map).
* And the next plane is after length bytes of the previous one from
* the gst buffer point of view. */
offs += meta->vplanes[i].length;
else
offs +=
stride[i] * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i,
height);
}
gst_buffer_add_video_meta_full (newbuf, GST_VIDEO_FRAME_FLAG_NONE,
GST_VIDEO_INFO_FORMAT (info), width, height, n_planes,
GST_VIDEO_INFO_FORMAT (info), width, height, n_gst_planes,
offset, stride);
}
break;
@ -360,17 +441,23 @@ gst_v4l2_buffer_pool_set_config (GstBufferPool * bpool, GstStructure * config)
if (!pool->add_videometa &&
GST_VIDEO_INFO_FORMAT (&obj->info) != GST_VIDEO_FORMAT_ENCODED) {
gint stride;
/* in non MPLANE mode, there is only one bytesperline field */
gint nb_checked_planes =
V4L2_TYPE_IS_MULTIPLANAR (obj->
type) ? GST_VIDEO_INFO_N_PLANES (&obj->info) : 1;
gint stride = 0;
gint i = 0;
for (i = 0; i < nb_checked_planes; i++) {
/* we don't have video metadata, and we are dealing with raw video,
* see if the strides are compatible */
stride = GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, i);
/* we don't have video metadata, and we are not dealing with raw video,
* see if the strides are compatible */
stride = GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0);
GST_DEBUG_OBJECT (pool, "no videometadata, checking strides %d and %u",
stride, obj->bytesperline[i]);
GST_DEBUG_OBJECT (pool, "no videometadata, checking strides %d and %u",
stride, obj->bytesperline);
if (stride != obj->bytesperline)
goto missing_video_api;
if (stride != obj->bytesperline[i])
goto missing_video_api;
}
}
/* parse the config and keep around */
@ -470,7 +557,7 @@ missing_video_api:
{
GST_ERROR_OBJECT (pool, "missing GstMetaVideo API in config, "
"default stride: %d, wanted stride %u",
GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0), obj->bytesperline);
GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0), obj->bytesperline[0]);
return FALSE;
}
wrong_config:
@ -542,7 +629,7 @@ gst_v4l2_buffer_pool_start (GstBufferPool * bpool)
/* we can start capturing now, we wait for the playback case until we queued
* the first buffer */
if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (!V4L2_TYPE_IS_OUTPUT (obj->type))
if (!start_streaming (pool))
goto start_failed;
@ -673,6 +760,7 @@ static GstFlowReturn
gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf)
{
GstV4l2Meta *meta;
GstV4l2Object *obj = NULL;
gint index;
meta = GST_V4L2_META_GET (buf);
@ -683,13 +771,29 @@ gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf)
return GST_FLOW_OK;
}
obj = pool->obj;
index = meta->vbuffer.index;
/* this field is common to MPLANE and not MPLANE */
meta->vbuffer.bytesused = gst_buffer_get_size (buf);
GST_LOG_OBJECT (pool,
"enqueue buffer %p, index:%d, queued:%d, flags:%08x mem:%p used:%d",
buf, index, pool->num_queued, meta->vbuffer.flags,
meta->mem, meta->vbuffer.bytesused);
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
meta->vbuffer.m.planes[i].bytesused =
gst_buffer_get_sizes_range (buf, i, 1, NULL, NULL);
GST_LOG_OBJECT (pool,
"enqueue buffer %p, index:%d, queued:%d, flags:%08x mem:%p used:%d, plane:%d",
buf, index, pool->num_queued, meta->vbuffer.flags,
meta->mem[i], meta->vbuffer.m.planes[i].bytesused, i);
}
} else {
GST_LOG_OBJECT (pool,
"enqueue buffer %p, index:%d, queued:%d, flags:%08x mem:%p used:%d",
buf, index, pool->num_queued, meta->vbuffer.flags,
meta->mem[0], meta->vbuffer.bytesused);
}
if (pool->buffers[index] != NULL)
goto already_queued;
@ -723,13 +827,16 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
GstFlowReturn res;
GstBuffer *outbuf;
struct v4l2_buffer vbuffer;
struct v4l2_plane vplanes[GST_VIDEO_MAX_PLANES];
GstV4l2Object *obj = pool->obj;
GstClockTime timestamp;
GstV4l2Meta *meta;
gint i = 0;
if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK)
goto poll_error;
/* prepare the buffer */
memset (&vbuffer, 0x00, sizeof (vbuffer));
vbuffer.type = obj->type;
#if HAVE_DECL_V4L2_MEMORY_DMABUF
@ -739,6 +846,15 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
#endif
vbuffer.memory = V4L2_MEMORY_MMAP;
/* prepare the planes of the buffer */
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
/* length is the number of elements in the
* vplanes array */
vbuffer.length = obj->n_v4l2_planes;
vbuffer.m.planes = vplanes;
}
/* the buffer is prepared, now fill in it with meaningful values */
GST_LOG_OBJECT (pool, "doing DQBUF");
if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &vbuffer) < 0)
goto error;
@ -759,11 +875,45 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
meta = GST_V4L2_META_GET (outbuf);
g_assert (meta != NULL);
GST_LOG_OBJECT (pool,
"dequeued buffer %p seq:%d (ix=%d), mem %p used %d, flags %08x, ts %"
GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf, vbuffer.sequence,
vbuffer.index, meta->mem, vbuffer.bytesused, vbuffer.flags,
GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
/* The size can change at every frame, esp. with jpeg. The GstMemory
* inside the GstBuffer could have been changed by some other
* element. So update our meta */
if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
|| obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
/* this field is common to MPLANE and not MPLANE */
meta->vbuffer.length = vbuffer.length;
meta->vbuffer.bytesused = vbuffer.bytesused;
if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
for (i = 0; i < meta->vbuffer.length; i++) {
/* the following also update meta->vbuffer.m.planes[i].length */
meta->vplanes[i].length = vbuffer.m.planes[i].length;
/* the following also update meta->vbuffer.m.planes[i].bytesused */
meta->vplanes[i].bytesused = vbuffer.m.planes[i].bytesused;
/* the following also update meta->vbuffer.m.planes[i].data_offset */
meta->vplanes[i].data_offset = vbuffer.m.planes[i].data_offset;
}
}
}
#ifndef GST_DISABLE_GST_DEBUG
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
for (i = 0; i < meta->vbuffer.length; i++) {
GST_LOG_OBJECT (pool,
"dequeued buffer %p seq:%d (ix=%d), mem %p used %d, plane=%d, flags %08x, ts %"
GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf,
vbuffer.sequence, vbuffer.index, meta->mem[i],
meta->vbuffer.m.planes[i].bytesused, i, vbuffer.flags,
GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
}
} else {
GST_LOG_OBJECT (pool,
"dequeued buffer %p seq:%d (ix=%d), mem %p used %d, flags %08x, ts %"
GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf, vbuffer.sequence,
vbuffer.index, meta->mem[0], vbuffer.bytesused, vbuffer.flags,
GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
}
#endif
/* set top/bottom field first if v4l2_buffer has the information */
if (vbuffer.field == V4L2_FIELD_INTERLACED_TB) {
@ -782,7 +932,16 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
gst_buffer_remove_all_memory (outbuf);
gst_buffer_append_memory (outbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem, vbuffer.length, 0, vbuffer.bytesused, NULL, NULL));
meta->mem[0], vbuffer.length, 0, vbuffer.bytesused, NULL, NULL));
} else if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
gst_buffer_remove_all_memory (outbuf);
for (i = 0; i < meta->vbuffer.length; i++) {
gst_buffer_append_memory (outbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem[i], vbuffer.m.planes[i].length,
vbuffer.m.planes[i].data_offset, vbuffer.m.planes[i].bytesused,
NULL, NULL));
}
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
@ -868,6 +1027,7 @@ gst_v4l2_buffer_pool_acquire_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
switch (obj->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
/* capture, This function should return a buffer with new captured data */
switch (obj->mode) {
case GST_V4L2_IO_RW:
@ -919,6 +1079,7 @@ gst_v4l2_buffer_pool_acquire_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
/* playback, This function should return an empty buffer */
switch (obj->mode) {
case GST_V4L2_IO_RW:
@ -967,6 +1128,7 @@ gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
switch (obj->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
/* capture, put the buffer back in the queue so that we can refill it
* later. */
switch (obj->mode) {
@ -989,6 +1151,7 @@ gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
switch (obj->mode) {
case GST_V4L2_IO_RW:
/* release back in the pool */
@ -1010,7 +1173,24 @@ gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
index);
/* reset to the full length, in case it was changed */
gst_buffer_resize (buffer, 0, meta->vbuffer.length);
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
gint total_length = 0;
for (i = 0; i < meta->vbuffer.length; i++)
total_length += meta->vbuffer.m.planes[i].length;
if (total_length != gst_buffer_get_size (buffer)) {
/* FIXME if the lengths has actually changed it may require
* to restore the sizes of the individual memories and
* re-add them */
GST_WARNING_OBJECT (pool,
"lengths changed, more work required");
}
gst_buffer_resize (buffer, 0, total_length);
} else {
gst_buffer_resize (buffer, 0, meta->vbuffer.length);
}
/* playback, put the buffer back in the queue to refill later. */
GST_BUFFER_POOL_CLASS (parent_class)->release_buffer (bpool,
@ -1201,6 +1381,7 @@ gst_v4l2_buffer_pool_process (GstV4l2BufferPool * pool, GstBuffer * buf)
switch (obj->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
/* capture */
switch (obj->mode) {
case GST_V4L2_IO_RW:
@ -1237,6 +1418,7 @@ gst_v4l2_buffer_pool_process (GstV4l2BufferPool * pool, GstBuffer * buf)
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
/* playback */
switch (obj->mode) {
case GST_V4L2_IO_RW:

View file

@ -74,7 +74,17 @@ struct _GstV4l2BufferPoolClass
struct _GstV4l2Meta {
GstMeta meta;
gpointer mem;
/* VIDEO_MAX_PLANES is defined to 8 in videodev2.h
* whereas GST_VIDEO_MAX_PLANES is defined to 4 in
* video-format.h so lets use the minimum */
/* only useful in GST_V4L2_IO_MMAP case.
* it contains address at which the mapping
* was placed for each v4l2 plane */
gpointer mem[GST_VIDEO_MAX_PLANES];
/* plane info for multi-planar buffers */
struct v4l2_plane vplanes[GST_VIDEO_MAX_PLANES];
/* video buffer info */
struct v4l2_buffer vbuffer;
};

View file

@ -55,6 +55,13 @@
#define V4L2_FIELD_INTERLACED_BT 9
#endif
#ifndef V4L2_PIX_FMT_NV12M
#define V4L2_PIX_FMT_NV12M GST_MAKE_FOURCC ('N', 'M', '1', '2')
#endif
#ifndef V4L2_PIX_FMT_NV21M
#define V4L2_PIX_FMT_NV21M GST_MAKE_FOURCC ('N', 'M', '2', '1')
#endif
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
#define GST_CAT_DEFAULT v4l2_debug
@ -561,6 +568,28 @@ gst_v4l2_object_new (GstElement * element,
v4l2object->keep_aspect = TRUE;
v4l2object->n_v4l2_planes = 0;
/*
* this boolean only applies in v4l2-MPLANE mode.
* TRUE: means it prefers to use several v4l2 (non contiguous)
* planes. For example if the device supports NV12 and NV12M
* both in MPLANE mode, then it will prefer NV12M
* FALSE: means it prefers to use one v4l2 plane (which contains
* all gst planes as if it was working in non-v4l2-MPLANE mode.
* For example if the device supports NV12 and NV12M
* both in MPLANE mode, then it will prefer NV12
*
* this boolean is also used to manage the case where the
* device only supports the mode MPLANE and at the same time it
* does not support both NV12 and NV12M. So in this case we first
* try to use the prefered config, and at least try the other case
* if it fails. For example in MPLANE mode if it has NV12 and not
* NV21M then even if you set prefered_non_contiguous to TRUE it will
* try NV21 as well.
*/
v4l2object->prefered_non_contiguous = TRUE;
return v4l2object;
}
@ -767,6 +796,12 @@ gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object,
V4L2_CAP_VIDEO_OVERLAY |
V4L2_CAP_VBI_CAPTURE |
V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
flags |= V4L2_CAP_VIDEO_CAPTURE;
if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
flags |= V4L2_CAP_VIDEO_OUTPUT;
}
g_value_set_flags (value, flags);
break;
@ -936,7 +971,9 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = {
/* two planes -- one Y, one Cr + Cb interleaved */
{V4L2_PIX_FMT_NV12, TRUE},
{V4L2_PIX_FMT_NV12M, TRUE},
{V4L2_PIX_FMT_NV21, TRUE},
{V4L2_PIX_FMT_NV21M, TRUE},
/* The following formats are not defined in the V4L2 specification */
{V4L2_PIX_FMT_YUV410, TRUE},
@ -1091,7 +1128,9 @@ gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
break;
case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
rank = YUV_ODD_BASE_RANK;
@ -1192,7 +1231,8 @@ format_cmp_func (gconstpointer a, gconstpointer b)
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object)
gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object,
enum v4l2_buf_type type)
{
gint n;
struct v4l2_fmtdesc *format;
@ -1204,7 +1244,7 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object)
format = g_new0 (struct v4l2_fmtdesc, 1);
format->index = n;
format->type = v4l2object->type;
format->type = type;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
if (errno == EINVAL) {
@ -1258,14 +1298,38 @@ failed:
}
/*
* Get the list of supported capture formats, a list of
* <code>struct v4l2_fmtdesc</code>.
*/
* Get the list of supported capture formats, a list of
* <code>struct v4l2_fmtdesc</code>.
*/
static GSList *
gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
{
if (!v4l2object->formats)
gst_v4l2_object_fill_format_list (v4l2object);
if (!v4l2object->formats) {
/* check usual way */
gst_v4l2_object_fill_format_list (v4l2object, v4l2object->type);
/* if our driver supports multi-planar
* and if formats are still empty then we can workaround driver bug
* by also looking up formats as if our device was not supporting
* multiplanar */
if (!v4l2object->formats) {
switch (v4l2object->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
gst_v4l2_object_fill_format_list (v4l2object,
V4L2_BUF_TYPE_VIDEO_CAPTURE);
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
gst_v4l2_object_fill_format_list (v4l2object,
V4L2_BUF_TYPE_VIDEO_OUTPUT);
break;
default:
break;
}
}
}
return v4l2object->formats;
}
@ -1318,7 +1382,9 @@ gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
case V4L2_PIX_FMT_RGB32:
case V4L2_PIX_FMT_BGR32:
case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
case V4L2_PIX_FMT_NV12M:
case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
case V4L2_PIX_FMT_NV21M:
case V4L2_PIX_FMT_YVU410:
case V4L2_PIX_FMT_YUV410:
case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
@ -1358,9 +1424,11 @@ gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
format = GST_VIDEO_FORMAT_BGRx;
break;
case V4L2_PIX_FMT_NV12:
case V4L2_PIX_FMT_NV12M:
format = GST_VIDEO_FORMAT_NV12;
break;
case V4L2_PIX_FMT_NV21:
case V4L2_PIX_FMT_NV21M:
format = GST_VIDEO_FORMAT_NV21;
break;
case V4L2_PIX_FMT_YVU410:
@ -1402,8 +1470,9 @@ gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)
g_assert_not_reached ();
break;
}
structure = gst_structure_new ("video/x-raw",
"format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
if (format != GST_VIDEO_FORMAT_UNKNOWN)
structure = gst_structure_new ("video/x-raw",
"format", G_TYPE_STRING, gst_video_format_to_string (format), NULL);
break;
}
case V4L2_PIX_FMT_DV:
@ -1477,6 +1546,31 @@ gst_v4l2_object_get_all_caps (void)
}
/* if the device actually support multi-planar
* we also allow to use plane in a contiguous manner
* if the device can do that. Through prefered_non_contiguous */
static gboolean
gst_v4l2_object_has_mplane (GstV4l2Object * obj)
{
gboolean ret = FALSE;
switch (obj->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
ret = (obj->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) != 0
&& obj->prefered_non_contiguous;
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
ret = (obj->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE) != 0
&& obj->prefered_non_contiguous;
break;
default:
ret = FALSE;
break;
}
return ret;
}
/* collect data for the given caps
* @caps: given input caps
* @format: location for the v4l format
@ -1529,10 +1623,14 @@ gst_v4l2_object_get_caps_info (GstV4l2Object * v4l2object, GstCaps * caps,
fourcc = V4L2_PIX_FMT_YUV422P;
break;
case GST_VIDEO_FORMAT_NV12:
fourcc = V4L2_PIX_FMT_NV12;
fourcc =
gst_v4l2_object_has_mplane (v4l2object) ? V4L2_PIX_FMT_NV12M :
V4L2_PIX_FMT_NV12;
break;
case GST_VIDEO_FORMAT_NV21:
fourcc = V4L2_PIX_FMT_NV21;
fourcc =
gst_v4l2_object_has_mplane (v4l2object) ? V4L2_PIX_FMT_NV21M :
V4L2_PIX_FMT_NV21;
break;
#ifdef V4L2_PIX_FMT_YVYU
case GST_VIDEO_FORMAT_YVYU:
@ -2320,9 +2418,30 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
struct v4l2_fmtdesc *fmtdesc;
GstVideoInfo info;
gint width, height, fps_n, fps_d, stride;
gint i = 0;
if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
goto invalid_caps;
if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
/* gst does not distinguish GST_VIDEO_FORMAT_NV21 than GST_VIDEO_FORMAT_NV21M so
* we have to check it blindly
*/
if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info)) {
/* for example if the device supports NV21 and not NV21M, let's try with
* the non prefered one if we initially prefered_non_contiguous to TRUE
* in this case.
*/
GST_DEBUG_OBJECT (v4l2object->element,
"prefered multiplanar does not exist, try the other one");
v4l2object->prefered_non_contiguous =
!v4l2object->prefered_non_contiguous;
if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
goto invalid_caps;
}
} else {
if (!gst_v4l2_object_get_caps_info (v4l2object, caps, &fmtdesc, &info))
goto invalid_caps;
}
pixelformat = fmtdesc->pixelformat;
width = GST_VIDEO_INFO_WIDTH (&info);
@ -2331,6 +2450,10 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
fps_d = GST_VIDEO_INFO_FPS_D (&info);
stride = GST_VIDEO_INFO_PLANE_STRIDE (&info, 0);
/* get bytesperline for each plane */
for (i = 0; i < GST_VIDEO_INFO_N_PLANES (&info); i++)
v4l2object->bytesperline[i] = GST_VIDEO_INFO_PLANE_STRIDE (&info, i);
if (GST_VIDEO_INFO_IS_INTERLACED (&info)) {
GST_DEBUG_OBJECT (v4l2object->element, "interlaced video");
/* ideally we would differentiate between types of interlaced video
@ -2366,49 +2489,133 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
if (v4l2_ioctl (fd, VIDIOC_G_FMT, &format) < 0)
goto get_fmt_failed;
GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
"%" GST_FOURCC_FORMAT " bytesperline %d, colorspace %d",
format.fmt.pix.width, format.fmt.pix.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat), format.fmt.pix.bytesperline,
format.fmt.pix.colorspace);
if (format.type != v4l2object->type ||
format.fmt.pix.width != width ||
format.fmt.pix.height != height ||
format.fmt.pix.pixelformat != pixelformat ||
format.fmt.pix.field != field || format.fmt.pix.bytesperline != stride) {
/* something different, set the format */
GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
"%" GST_FOURCC_FORMAT " bytesperline %d", width, height,
GST_FOURCC_ARGS (pixelformat), stride);
format.type = v4l2object->type;
format.fmt.pix.width = width;
format.fmt.pix.height = height;
format.fmt.pix.pixelformat = pixelformat;
format.fmt.pix.field = field;
/* try to ask our prefered stride */
format.fmt.pix.bytesperline = stride;
if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
goto set_fmt_failed;
if (V4L2_TYPE_IS_MULTIPLANAR (v4l2object->type)) {
/* even in v4l2 multiplanar mode we can work in contiguous mode
* if the device supports it */
gint n_v4l_planes =
v4l2object->prefered_non_contiguous ? GST_VIDEO_INFO_N_PLANES (&info) :
1;
GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
"%" GST_FOURCC_FORMAT " stride %d", format.fmt.pix.width,
format.fmt.pix.height, GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
format.fmt.pix.bytesperline);
"%" GST_FOURCC_FORMAT " colorspace %d, nb planes %d",
format.fmt.pix_mp.width, format.fmt.pix_mp.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
format.fmt.pix_mp.colorspace, format.fmt.pix_mp.num_planes);
if (format.fmt.pix.width != width || format.fmt.pix.height != height)
goto invalid_dimensions;
if (format.type != v4l2object->type ||
format.fmt.pix_mp.width != width ||
format.fmt.pix_mp.height != height ||
format.fmt.pix_mp.pixelformat != pixelformat ||
format.fmt.pix_mp.field != field ||
format.fmt.pix_mp.num_planes != n_v4l_planes) {
/* something different, set the format */
GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
"%" GST_FOURCC_FORMAT, width, height, GST_FOURCC_ARGS (pixelformat));
if (format.fmt.pix.pixelformat != pixelformat)
goto invalid_pixelformat;
format.type = v4l2object->type;
format.fmt.pix_mp.pixelformat = pixelformat;
format.fmt.pix_mp.width = width;
format.fmt.pix_mp.height = height;
format.fmt.pix_mp.field = field;
format.fmt.pix_mp.num_planes = n_v4l_planes;
/* try to ask our prefered stride but it's not a failure
* if not accepted */
for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
format.fmt.pix_mp.plane_fmt[i].bytesperline =
v4l2object->bytesperline[i];
if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
goto set_fmt_failed;
GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
"%" GST_FOURCC_FORMAT ", nb planes %d", format.fmt.pix.width,
format.fmt.pix_mp.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
format.fmt.pix_mp.num_planes);
#ifndef GST_DISABLE_GST_DEBUG
for (i = 0; i < format.fmt.pix_mp.num_planes; i++)
GST_DEBUG_OBJECT (v4l2object->element, " stride %d",
format.fmt.pix_mp.plane_fmt[i].bytesperline);
#endif
if (format.fmt.pix_mp.width != width
|| format.fmt.pix_mp.height != height)
goto invalid_dimensions;
if (format.fmt.pix_mp.pixelformat != pixelformat)
goto invalid_pixelformat;
if (format.fmt.pix_mp.num_planes != n_v4l_planes)
goto invalid_planes;
}
/* figure out the frame layout */
v4l2object->n_v4l2_planes = format.fmt.pix_mp.num_planes;
v4l2object->sizeimage = 0;
for (i = 0; i < format.fmt.pix_mp.num_planes; i++) {
/* For compatibility reasons with the non-v4l2-multiplanar mode
* we have to use the bytesperline of the first v4l plane
* See plane_fmt[0] instead of plane_fmt[i] in next line */
v4l2object->bytesperline[i] = format.fmt.pix_mp.plane_fmt[0].bytesperline;
v4l2object->sizeimage += format.fmt.pix_mp.plane_fmt[i].sizeimage;
}
} else {
GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
"%" GST_FOURCC_FORMAT " bytesperline %d, colorspace %d",
format.fmt.pix.width, format.fmt.pix.height,
GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
format.fmt.pix.bytesperline, format.fmt.pix.colorspace);
if (format.type != v4l2object->type ||
format.fmt.pix.width != width ||
format.fmt.pix.height != height ||
format.fmt.pix.pixelformat != pixelformat ||
format.fmt.pix.field != field
|| format.fmt.pix.bytesperline != stride) {
/* something different, set the format */
GST_DEBUG_OBJECT (v4l2object->element, "Setting format to %dx%d, format "
"%" GST_FOURCC_FORMAT " bytesperline %d", width, height,
GST_FOURCC_ARGS (pixelformat), stride);
format.type = v4l2object->type;
format.fmt.pix.width = width;
format.fmt.pix.height = height;
format.fmt.pix.pixelformat = pixelformat;
format.fmt.pix.field = field;
/* try to ask our prefered stride */
format.fmt.pix.bytesperline = stride;
if (v4l2_ioctl (fd, VIDIOC_S_FMT, &format) < 0)
goto set_fmt_failed;
GST_DEBUG_OBJECT (v4l2object->element, "Got format to %dx%d, format "
"%" GST_FOURCC_FORMAT " stride %d", format.fmt.pix.width,
format.fmt.pix.height, GST_FOURCC_ARGS (format.fmt.pix.pixelformat),
format.fmt.pix.bytesperline);
if (format.fmt.pix.width != width || format.fmt.pix.height != height)
goto invalid_dimensions;
if (format.fmt.pix.pixelformat != pixelformat)
goto invalid_pixelformat;
}
/* only one plane in non-MPLANE mode */
v4l2object->n_v4l2_planes = 1;
/* figure out the frame layout */
for (i = 0; i < GST_VIDEO_MAX_PLANES; i++) {
/* In non-multiplanar mode, there is only one field for bytesperline
* Just set it everywhere in order to the code factorized
* with multiplanar case in gstv4l2bufferpool.c::alloc_buffer function
*/
v4l2object->bytesperline[i] = format.fmt.pix.bytesperline;
}
v4l2object->sizeimage = format.fmt.pix.sizeimage;
}
/* figure out the frame layout */
v4l2object->bytesperline = format.fmt.pix.bytesperline;
v4l2object->sizeimage = format.fmt.pix.sizeimage;
GST_DEBUG_OBJECT (v4l2object->element, "Got sizeimage %u",
v4l2object->sizeimage);
@ -2426,7 +2633,8 @@ gst_v4l2_object_set_format (GstV4l2Object * v4l2object, GstCaps * caps)
streamparm.parm.capture.timeperframe.denominator;
GST_VIDEO_INFO_FPS_D (&info) = streamparm.parm.capture.timeperframe.numerator;
if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE) {
if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
|| v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
GST_DEBUG_OBJECT (v4l2object->element, "Got framerate: %u/%u",
streamparm.parm.capture.timeperframe.denominator,
streamparm.parm.capture.timeperframe.numerator);
@ -2534,6 +2742,14 @@ invalid_pixelformat:
GST_FOURCC_ARGS (format.fmt.pix.pixelformat)));
return FALSE;
}
invalid_planes:
{
GST_ELEMENT_ERROR (v4l2object->element, RESOURCE, SETTINGS,
(_("Device '%s' does support non-contiguous planes"),
v4l2object->videodev),
("Device wants %d planes", format.fmt.pix_mp.num_planes));
return FALSE;
}
get_parm_failed:
{
/* it's possible that this call is not supported */

View file

@ -118,10 +118,22 @@ struct _GstV4l2Object {
struct v4l2_fmtdesc *fmtdesc;
GstVideoInfo info;
guint32 bytesperline;
/* only used if the device supports MPLANE
* nb planes is meaning of v4l2 planes
* the gstreamer equivalent is gst_buffer_n_memory
*/
gint n_v4l2_planes;
guint32 bytesperline[GST_VIDEO_MAX_PLANES];
guint32 sizeimage;
GstClockTime duration;
/* if the MPLANE device support both contiguous and non contiguous
* it allows to select which one we want. But we prefered_non_contiguous
* non contiguous mode.
*/
gboolean prefered_non_contiguous;
/* wanted mode */
GstV4l2IOMode req_mode;

View file

@ -525,13 +525,43 @@ gst_v4l2_open (GstV4l2Object * v4l2object)
/* do we need to be a capture device? */
if (GST_IS_V4L2SRC (v4l2object->element) &&
!(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
!(v4l2object->vcap.capabilities & (V4L2_CAP_VIDEO_CAPTURE |
V4L2_CAP_VIDEO_CAPTURE_MPLANE)))
goto not_capture;
if (GST_IS_V4L2SINK (v4l2object->element) &&
!(v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT))
!(v4l2object->vcap.capabilities & (V4L2_CAP_VIDEO_OUTPUT |
V4L2_CAP_VIDEO_OUTPUT_MPLANE)))
goto not_output;
/* when calling gst_v4l2_object_new the user decides the initial type
* so adjust it if multi-planar is supported
* the driver should make it exclusive. So the driver should
* not support both MPLANE and non-PLANE.
* Because even when using MPLANE it still possibles to use it
* in a contiguous manner. In this case the first v4l2 plane
* contains all the gst planes.
*/
switch (v4l2object->type) {
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE) {
GST_DEBUG ("adjust type to multi-planar output");
v4l2object->type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
}
break;
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
/* FIXME: for now it's an untested case so just put a warning */
GST_WARNING ("untested V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE");
GST_DEBUG ("adjust type to multi-planar capture");
v4l2object->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
}
break;
default:
break;
}
/* create enumerations, posts errors. */
if (!gst_v4l2_fill_lists (v4l2object))
goto error;
@ -542,7 +572,8 @@ gst_v4l2_open (GstV4l2Object * v4l2object)
pollfd.fd = v4l2object->video_fd;
gst_poll_add_fd (v4l2object->poll, &pollfd);
if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
|| v4l2object->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
gst_poll_fd_ctl_read (v4l2object->poll, &pollfd, TRUE);
else
gst_poll_fd_ctl_write (v4l2object->poll, &pollfd, TRUE);