Commit 61ae84b5 authored by Julien Isorce's avatar Julien Isorce
Browse files

v4l2: add support for multi-planar V4L2 API

This api is in linux kernel since version 2.6.39,
and present in all version 3.

The commit that adds the API in master branch of the
linux kernel source is:
https://github.com/torvalds/linux/commit/f8f3914cf922f5f9e1d60e9e10f6fb92742907ad

v4l2 doc: "Some devices require data for each input
or output video frame to be placed in discontiguous
memory buffers"

There are newer structures 'struct v4l2_pix_format_mplane'
and 'struct v4l2_plane'.
So the pixel format is not setup with the same API when using
multi-planar.

Also for gst-v4l2, one of the difference is that in GstV4l2Meta
there are now one mem pointer for each maped plane.

When not using multi-planar, this commit takes care of keeping
the same code path than previously. So that the 2 cases are
in two different blocks triggered from V4L2_TYPE_IS_MULTIPLANAR.

Fixes bug https://bugzilla.gnome.org/show_bug.cgi?id=712754
parent 0d55724a
......@@ -122,11 +122,24 @@ gst_v4l2_buffer_pool_free_buffer (GstBufferPool * bpool, GstBuffer * buffer)
g_assert (meta != NULL);
index = meta->vbuffer.index;
GST_LOG_OBJECT (pool,
"unmap buffer %p idx %d (data %p, len %u)", buffer,
index, meta->mem, meta->vbuffer.length);
v4l2_munmap (meta->mem, meta->vbuffer.length);
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
GST_LOG_OBJECT (pool,
"unmap multiplanar buffer %p idx %d (data %p, len %u, plane %u)",
buffer, index, meta->mem[i], meta->vbuffer.m.planes[i].length, i);
v4l2_munmap (meta->mem[i], meta->vbuffer.m.planes[i].length);
}
} else {
GST_LOG_OBJECT (pool,
"unmap buffer %p idx %d (data %p, len %u)", buffer,
index, meta->mem[0], meta->vbuffer.length);
v4l2_munmap (meta->mem[0], meta->vbuffer.length);
}
pool->buffers[index] = NULL;
break;
}
......@@ -191,34 +204,84 @@ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
GST_LOG_OBJECT (pool, "creating buffer %u, %p", index, newbuf);
/* prepare the buffer */
memset (&meta->vbuffer, 0x0, sizeof (struct v4l2_buffer));
meta->vbuffer.index = index;
meta->vbuffer.type = obj->type;
meta->vbuffer.memory = V4L2_MEMORY_MMAP;
/* prepare the planes of the buffer */
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
/* length is the number of elements in the
* vplanes array */
meta->vbuffer.length = obj->n_v4l2_planes;
meta->vbuffer.m.planes = meta->vplanes;
}
/* the buffer is prepared, now fill in it with meaningful values */
if (v4l2_ioctl (pool->video_fd, VIDIOC_QUERYBUF, &meta->vbuffer) < 0)
goto querybuf_failed;
GST_LOG_OBJECT (pool, " index: %u", meta->vbuffer.index);
GST_LOG_OBJECT (pool, " type: %d", meta->vbuffer.type);
GST_LOG_OBJECT (pool, " bytesused: %u", meta->vbuffer.bytesused);
GST_LOG_OBJECT (pool, " flags: %08x", meta->vbuffer.flags);
GST_LOG_OBJECT (pool, " field: %d", meta->vbuffer.field);
GST_LOG_OBJECT (pool, " memory: %d", meta->vbuffer.memory);
if (meta->vbuffer.memory == V4L2_MEMORY_MMAP)
GST_LOG_OBJECT (pool, " MMAP offset: %u", meta->vbuffer.m.offset);
GST_LOG_OBJECT (pool, " planes: %d", obj->n_v4l2_planes);
#ifndef GST_DISABLE_GST_DEBUG
if (meta->vbuffer.memory == V4L2_MEMORY_MMAP) {
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
GST_LOG_OBJECT (pool, " bytesused: %u, plane: %u",
meta->vbuffer.m.planes[i].bytesused, i);
GST_LOG_OBJECT (pool, " MMAP offset: %u, plane: %u",
meta->vbuffer.m.planes[i].m.mem_offset, i);
}
} else {
GST_LOG_OBJECT (pool, " bytesused: %u", meta->vbuffer.bytesused);
GST_LOG_OBJECT (pool, " MMAP offset: %u", meta->vbuffer.m.offset);
}
}
#endif
if (obj->mode == GST_V4L2_IO_MMAP) {
meta->mem = v4l2_mmap (0, meta->vbuffer.length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
meta->vbuffer.m.offset);
if (meta->mem == MAP_FAILED)
goto mmap_failed;
gst_buffer_append_memory (newbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem, meta->vbuffer.length, 0, meta->vbuffer.length, NULL,
NULL));
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
/* append one gstmemory for each plane */
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
meta->mem[i] = v4l2_mmap (0, meta->vbuffer.m.planes[i].length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
meta->vbuffer.m.planes[i].m.mem_offset);
if (meta->mem[i] == MAP_FAILED)
goto mmap_failed;
GST_LOG_OBJECT (pool, " buffer length %d for plane %d",
meta->vbuffer.m.planes[i].length, i);
gst_buffer_append_memory (newbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem[i], meta->vbuffer.m.planes[i].length,
meta->vbuffer.m.planes[i].data_offset,
meta->vbuffer.m.planes[i].length, NULL, NULL));
}
} else {
/* append one gstmemory that contains all the planes */
meta->mem[0] = v4l2_mmap (0, meta->vbuffer.length,
PROT_READ | PROT_WRITE, MAP_SHARED, pool->video_fd,
meta->vbuffer.m.offset);
if (meta->mem[0] == MAP_FAILED)
goto mmap_failed;
GST_LOG_OBJECT (pool, " buffer length %d", meta->vbuffer.length);
gst_buffer_append_memory (newbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem[0], meta->vbuffer.length, 0, meta->vbuffer.length,
NULL, NULL));
}
}
#if HAVE_DECL_V4L2_MEMORY_DMABUF
if (obj->mode == GST_V4L2_IO_DMABUF) {
......@@ -241,17 +304,24 @@ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
if (pool->add_videometa && info->finfo) {
const GstVideoFormatInfo *finfo = info->finfo;
gsize offset[GST_VIDEO_MAX_PLANES];
gint width, height, n_planes, offs, i, stride[GST_VIDEO_MAX_PLANES];
gint width, height, n_gst_planes, offs, i, stride[GST_VIDEO_MAX_PLANES];
width = GST_VIDEO_INFO_WIDTH (info);
height = GST_VIDEO_INFO_HEIGHT (info);
n_planes = GST_VIDEO_INFO_N_PLANES (info);
GST_DEBUG_OBJECT (pool, "adding video meta, bytesperline %d",
obj->bytesperline);
/* n_gst_planes is the number of planes
* (RGB: 1, YUY2: 1, NV12: 2, I420: 3)
* It's greater or equal than the number of v4l2 planes. */
n_gst_planes = GST_VIDEO_INFO_N_PLANES (info);
/* the basic are common between MPLANE mode and non MPLANE mode
* except a special case inside the loop at the end
*/
offs = 0;
for (i = 0; i < n_planes; i++) {
for (i = 0; i < n_gst_planes; i++) {
GST_DEBUG_OBJECT (pool, "adding video meta, bytesperline %d",
obj->bytesperline[i]);
offset[i] = offs;
switch (info->finfo->format) {
......@@ -261,20 +331,31 @@ gst_v4l2_buffer_pool_alloc_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
case GST_VIDEO_FORMAT_NV24:
stride[i] =
(i == 0 ? 1 : 2) * GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo,
i, obj->bytesperline);
i, obj->bytesperline[i]);
break;
default:
stride[i] =
GST_VIDEO_FORMAT_INFO_SCALE_WIDTH (finfo, i,
obj->bytesperline);
obj->bytesperline[i]);
break;
}
offs +=
stride[i] * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i, height);
/* when using multiplanar mode and if there is one v4l plane for
* each gst plane (here meta->vbuffer.length is the number of planes)
*/
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type) && meta->vbuffer.length > 1)
/* non_contiguous case here so we have to make sure that gst goes to the
* next plane (using default gstvideometa.c::default_map).
* And the next plane is after length bytes of the previous one from
* the gst buffer point of view. */
offs += meta->vplanes[i].length;
else
offs +=
stride[i] * GST_VIDEO_FORMAT_INFO_SCALE_HEIGHT (finfo, i,
height);
}
gst_buffer_add_video_meta_full (newbuf, GST_VIDEO_FRAME_FLAG_NONE,
GST_VIDEO_INFO_FORMAT (info), width, height, n_planes,
GST_VIDEO_INFO_FORMAT (info), width, height, n_gst_planes,
offset, stride);
}
break;
......@@ -360,17 +441,23 @@ gst_v4l2_buffer_pool_set_config (GstBufferPool * bpool, GstStructure * config)
if (!pool->add_videometa &&
GST_VIDEO_INFO_FORMAT (&obj->info) != GST_VIDEO_FORMAT_ENCODED) {
gint stride;
/* we don't have video metadata, and we are not dealing with raw video,
* see if the strides are compatible */
stride = GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0);
GST_DEBUG_OBJECT (pool, "no videometadata, checking strides %d and %u",
stride, obj->bytesperline);
if (stride != obj->bytesperline)
goto missing_video_api;
/* in non MPLANE mode, there is only one bytesperline field */
gint nb_checked_planes =
V4L2_TYPE_IS_MULTIPLANAR (obj->
type) ? GST_VIDEO_INFO_N_PLANES (&obj->info) : 1;
gint stride = 0;
gint i = 0;
for (i = 0; i < nb_checked_planes; i++) {
/* we don't have video metadata, and we are dealing with raw video,
* see if the strides are compatible */
stride = GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, i);
GST_DEBUG_OBJECT (pool, "no videometadata, checking strides %d and %u",
stride, obj->bytesperline[i]);
if (stride != obj->bytesperline[i])
goto missing_video_api;
}
}
/* parse the config and keep around */
......@@ -470,7 +557,7 @@ missing_video_api:
{
GST_ERROR_OBJECT (pool, "missing GstMetaVideo API in config, "
"default stride: %d, wanted stride %u",
GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0), obj->bytesperline);
GST_VIDEO_INFO_PLANE_STRIDE (&obj->info, 0), obj->bytesperline[0]);
return FALSE;
}
wrong_config:
......@@ -542,7 +629,7 @@ gst_v4l2_buffer_pool_start (GstBufferPool * bpool)
/* we can start capturing now, we wait for the playback case until we queued
* the first buffer */
if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE)
if (!V4L2_TYPE_IS_OUTPUT (obj->type))
if (!start_streaming (pool))
goto start_failed;
......@@ -673,6 +760,7 @@ static GstFlowReturn
gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf)
{
GstV4l2Meta *meta;
GstV4l2Object *obj = NULL;
gint index;
meta = GST_V4L2_META_GET (buf);
......@@ -683,13 +771,29 @@ gst_v4l2_buffer_pool_qbuf (GstV4l2BufferPool * pool, GstBuffer * buf)
return GST_FLOW_OK;
}
obj = pool->obj;
index = meta->vbuffer.index;
/* this field is common to MPLANE and not MPLANE */
meta->vbuffer.bytesused = gst_buffer_get_size (buf);
GST_LOG_OBJECT (pool,
"enqueue buffer %p, index:%d, queued:%d, flags:%08x mem:%p used:%d",
buf, index, pool->num_queued, meta->vbuffer.flags,
meta->mem, meta->vbuffer.bytesused);
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
for (i = 0; i < meta->vbuffer.length; i++) {
meta->vbuffer.m.planes[i].bytesused =
gst_buffer_get_sizes_range (buf, i, 1, NULL, NULL);
GST_LOG_OBJECT (pool,
"enqueue buffer %p, index:%d, queued:%d, flags:%08x mem:%p used:%d, plane:%d",
buf, index, pool->num_queued, meta->vbuffer.flags,
meta->mem[i], meta->vbuffer.m.planes[i].bytesused, i);
}
} else {
GST_LOG_OBJECT (pool,
"enqueue buffer %p, index:%d, queued:%d, flags:%08x mem:%p used:%d",
buf, index, pool->num_queued, meta->vbuffer.flags,
meta->mem[0], meta->vbuffer.bytesused);
}
if (pool->buffers[index] != NULL)
goto already_queued;
......@@ -723,13 +827,16 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
GstFlowReturn res;
GstBuffer *outbuf;
struct v4l2_buffer vbuffer;
struct v4l2_plane vplanes[GST_VIDEO_MAX_PLANES];
GstV4l2Object *obj = pool->obj;
GstClockTime timestamp;
GstV4l2Meta *meta;
gint i = 0;
if ((res = gst_v4l2_object_poll (obj)) != GST_FLOW_OK)
goto poll_error;
/* prepare the buffer */
memset (&vbuffer, 0x00, sizeof (vbuffer));
vbuffer.type = obj->type;
#if HAVE_DECL_V4L2_MEMORY_DMABUF
......@@ -739,6 +846,15 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
#endif
vbuffer.memory = V4L2_MEMORY_MMAP;
/* prepare the planes of the buffer */
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
/* length is the number of elements in the
* vplanes array */
vbuffer.length = obj->n_v4l2_planes;
vbuffer.m.planes = vplanes;
}
/* the buffer is prepared, now fill in it with meaningful values */
GST_LOG_OBJECT (pool, "doing DQBUF");
if (v4l2_ioctl (pool->video_fd, VIDIOC_DQBUF, &vbuffer) < 0)
goto error;
......@@ -759,11 +875,45 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
meta = GST_V4L2_META_GET (outbuf);
g_assert (meta != NULL);
GST_LOG_OBJECT (pool,
"dequeued buffer %p seq:%d (ix=%d), mem %p used %d, flags %08x, ts %"
GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf, vbuffer.sequence,
vbuffer.index, meta->mem, vbuffer.bytesused, vbuffer.flags,
GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
/* The size can change at every frame, esp. with jpeg. The GstMemory
* inside the GstBuffer could have been changed by some other
* element. So update our meta */
if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE
|| obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
/* this field is common to MPLANE and not MPLANE */
meta->vbuffer.length = vbuffer.length;
meta->vbuffer.bytesused = vbuffer.bytesused;
if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
for (i = 0; i < meta->vbuffer.length; i++) {
/* the following also update meta->vbuffer.m.planes[i].length */
meta->vplanes[i].length = vbuffer.m.planes[i].length;
/* the following also update meta->vbuffer.m.planes[i].bytesused */
meta->vplanes[i].bytesused = vbuffer.m.planes[i].bytesused;
/* the following also update meta->vbuffer.m.planes[i].data_offset */
meta->vplanes[i].data_offset = vbuffer.m.planes[i].data_offset;
}
}
}
#ifndef GST_DISABLE_GST_DEBUG
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
for (i = 0; i < meta->vbuffer.length; i++) {
GST_LOG_OBJECT (pool,
"dequeued buffer %p seq:%d (ix=%d), mem %p used %d, plane=%d, flags %08x, ts %"
GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf,
vbuffer.sequence, vbuffer.index, meta->mem[i],
meta->vbuffer.m.planes[i].bytesused, i, vbuffer.flags,
GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
}
} else {
GST_LOG_OBJECT (pool,
"dequeued buffer %p seq:%d (ix=%d), mem %p used %d, flags %08x, ts %"
GST_TIME_FORMAT ", pool-queued=%d, buffer=%p", outbuf, vbuffer.sequence,
vbuffer.index, meta->mem[0], vbuffer.bytesused, vbuffer.flags,
GST_TIME_ARGS (timestamp), pool->num_queued, outbuf);
}
#endif
/* set top/bottom field first if v4l2_buffer has the information */
if (vbuffer.field == V4L2_FIELD_INTERLACED_TB) {
......@@ -782,7 +932,16 @@ gst_v4l2_buffer_pool_dqbuf (GstV4l2BufferPool * pool, GstBuffer ** buffer)
gst_buffer_remove_all_memory (outbuf);
gst_buffer_append_memory (outbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem, vbuffer.length, 0, vbuffer.bytesused, NULL, NULL));
meta->mem[0], vbuffer.length, 0, vbuffer.bytesused, NULL, NULL));
} else if (obj->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
gst_buffer_remove_all_memory (outbuf);
for (i = 0; i < meta->vbuffer.length; i++) {
gst_buffer_append_memory (outbuf,
gst_memory_new_wrapped (GST_MEMORY_FLAG_NO_SHARE,
meta->mem[i], vbuffer.m.planes[i].length,
vbuffer.m.planes[i].data_offset, vbuffer.m.planes[i].bytesused,
NULL, NULL));
}
}
GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
......@@ -868,6 +1027,7 @@ gst_v4l2_buffer_pool_acquire_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
switch (obj->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
/* capture, This function should return a buffer with new captured data */
switch (obj->mode) {
case GST_V4L2_IO_RW:
......@@ -919,6 +1079,7 @@ gst_v4l2_buffer_pool_acquire_buffer (GstBufferPool * bpool, GstBuffer ** buffer,
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
/* playback, This function should return an empty buffer */
switch (obj->mode) {
case GST_V4L2_IO_RW:
......@@ -967,6 +1128,7 @@ gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
switch (obj->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
/* capture, put the buffer back in the queue so that we can refill it
* later. */
switch (obj->mode) {
......@@ -989,6 +1151,7 @@ gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
switch (obj->mode) {
case GST_V4L2_IO_RW:
/* release back in the pool */
......@@ -1010,7 +1173,24 @@ gst_v4l2_buffer_pool_release_buffer (GstBufferPool * bpool, GstBuffer * buffer)
index);
/* reset to the full length, in case it was changed */
gst_buffer_resize (buffer, 0, meta->vbuffer.length);
if (V4L2_TYPE_IS_MULTIPLANAR (obj->type)) {
gint i = 0;
gint total_length = 0;
for (i = 0; i < meta->vbuffer.length; i++)
total_length += meta->vbuffer.m.planes[i].length;
if (total_length != gst_buffer_get_size (buffer)) {
/* FIXME if the lengths has actually changed it may require
* to restore the sizes of the individual memories and
* re-add them */
GST_WARNING_OBJECT (pool,
"lengths changed, more work required");
}
gst_buffer_resize (buffer, 0, total_length);
} else {
gst_buffer_resize (buffer, 0, meta->vbuffer.length);
}
/* playback, put the buffer back in the queue to refill later. */
GST_BUFFER_POOL_CLASS (parent_class)->release_buffer (bpool,
......@@ -1201,6 +1381,7 @@ gst_v4l2_buffer_pool_process (GstV4l2BufferPool * pool, GstBuffer * buf)
switch (obj->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE:
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
/* capture */
switch (obj->mode) {
case GST_V4L2_IO_RW:
......@@ -1237,6 +1418,7 @@ gst_v4l2_buffer_pool_process (GstV4l2BufferPool * pool, GstBuffer * buf)
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT:
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
/* playback */
switch (obj->mode) {
case GST_V4L2_IO_RW:
......
......@@ -74,7 +74,17 @@ struct _GstV4l2BufferPoolClass
struct _GstV4l2Meta {
GstMeta meta;
gpointer mem;
/* VIDEO_MAX_PLANES is defined to 8 in videodev2.h
* whereas GST_VIDEO_MAX_PLANES is defined to 4 in
* video-format.h so lets use the minimum */
/* only useful in GST_V4L2_IO_MMAP case.
* it contains address at which the mapping
* was placed for each v4l2 plane */
gpointer mem[GST_VIDEO_MAX_PLANES];
/* plane info for multi-planar buffers */
struct v4l2_plane vplanes[GST_VIDEO_MAX_PLANES];
/* video buffer info */
struct v4l2_buffer vbuffer;
};
......
......@@ -55,6 +55,13 @@
#define V4L2_FIELD_INTERLACED_BT 9
#endif
#ifndef V4L2_PIX_FMT_NV12M
#define V4L2_PIX_FMT_NV12M GST_MAKE_FOURCC ('N', 'M', '1', '2')
#endif
#ifndef V4L2_PIX_FMT_NV21M
#define V4L2_PIX_FMT_NV21M GST_MAKE_FOURCC ('N', 'M', '2', '1')
#endif
GST_DEBUG_CATEGORY_EXTERN (v4l2_debug);
GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
#define GST_CAT_DEFAULT v4l2_debug
......@@ -561,6 +568,28 @@ gst_v4l2_object_new (GstElement * element,
v4l2object->keep_aspect = TRUE;
v4l2object->n_v4l2_planes = 0;
/*
* this boolean only applies in v4l2-MPLANE mode.
* TRUE: means it prefers to use several v4l2 (non contiguous)
* planes. For example if the device supports NV12 and NV12M
* both in MPLANE mode, then it will prefer NV12M
* FALSE: means it prefers to use one v4l2 plane (which contains
* all gst planes as if it was working in non-v4l2-MPLANE mode.
* For example if the device supports NV12 and NV12M
* both in MPLANE mode, then it will prefer NV12
*
* this boolean is also used to manage the case where the
* device only supports the mode MPLANE and at the same time it
* does not support both NV12 and NV12M. So in this case we first
* try to use the prefered config, and at least try the other case
* if it fails. For example in MPLANE mode if it has NV12 and not
* NV21M then even if you set prefered_non_contiguous to TRUE it will
* try NV21 as well.
*/
v4l2object->prefered_non_contiguous = TRUE;
return v4l2object;
}
......@@ -767,6 +796,12 @@ gst_v4l2_object_get_property_helper (GstV4l2Object * v4l2object,
V4L2_CAP_VIDEO_OVERLAY |
V4L2_CAP_VBI_CAPTURE |
V4L2_CAP_VBI_OUTPUT | V4L2_CAP_TUNER | V4L2_CAP_AUDIO);
if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE)
flags |= V4L2_CAP_VIDEO_CAPTURE;
if (v4l2object->vcap.capabilities & V4L2_CAP_VIDEO_OUTPUT_MPLANE)
flags |= V4L2_CAP_VIDEO_OUTPUT;
}
g_value_set_flags (value, flags);
break;
......@@ -936,7 +971,9 @@ static const GstV4L2FormatDesc gst_v4l2_formats[] = {
/* two planes -- one Y, one Cr + Cb interleaved */
{V4L2_PIX_FMT_NV12, TRUE},
{V4L2_PIX_FMT_NV12M, TRUE},
{V4L2_PIX_FMT_NV21, TRUE},
{V4L2_PIX_FMT_NV21M, TRUE},
/* The following formats are not defined in the V4L2 specification */
{V4L2_PIX_FMT_YUV410, TRUE},
......@@ -1091,7 +1128,9 @@ gst_v4l2_object_format_get_rank (const struct v4l2_fmtdesc *fmt)
break;
case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
case V4L2_PIX_FMT_NV12M: /* Same as NV12 */
case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
case V4L2_PIX_FMT_NV21M: /* Same as NV21 */
case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
rank = YUV_ODD_BASE_RANK;
......@@ -1192,7 +1231,8 @@ format_cmp_func (gconstpointer a, gconstpointer b)
* return value: TRUE on success, FALSE on error
******************************************************/
static gboolean
gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object)
gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object,
enum v4l2_buf_type type)
{
gint n;
struct v4l2_fmtdesc *format;
......@@ -1204,7 +1244,7 @@ gst_v4l2_object_fill_format_list (GstV4l2Object * v4l2object)
format = g_new0 (struct v4l2_fmtdesc, 1);
format->index = n;
format->type = v4l2object->type;
format->type = type;
if (v4l2_ioctl (v4l2object->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
if (errno == EINVAL) {
......@@ -1258,14 +1298,38 @@ failed:
}
/*
* Get the list of supported capture formats, a list of
* <code>struct v4l2_fmtdesc</code>.
*/
* Get the list of supported capture formats, a list of
* <code>struct v4l2_fmtdesc</code>.
*/
static GSList *
gst_v4l2_object_get_format_list (GstV4l2Object * v4l2object)
{
if (!v4l2object->formats)
gst_v4l2_object_fill_format_list (v4l2object);
if (!v4l2object->formats) {
/* check usual way */
gst_v4l2_object_fill_format_list (v4l2object, v4l2object->type);
/* if our driver supports multi-planar
* and if formats are still empty then we can workaround driver bug
* by also looking up formats as if our device was not supporting
* multiplanar */
if (!v4l2object->formats) {
switch (v4l2object->type) {
case V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE:
gst_v4l2_object_fill_format_list (v4l2object,
V4L2_BUF_TYPE_VIDEO_CAPTURE);
break;
case V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE:
gst_v4l2_object_fill_format_list (v4l2object,
V4L2_BUF_TYPE_VIDEO_OUTPUT);
break;
default:
break;
}
}
}
return v4l2object->formats;
}
......@@ -1318,7 +1382,9 @@ gst_v4l2_object_v4l2fourcc_to_structure (guint32 fourcc)