summaryrefslogtreecommitdiffstats
path: root/sys/v4l2/v4l2src_calls.c
diff options
context:
space:
mode:
Diffstat (limited to 'sys/v4l2/v4l2src_calls.c')
-rw-r--r--sys/v4l2/v4l2src_calls.c612
1 files changed, 244 insertions, 368 deletions
diff --git a/sys/v4l2/v4l2src_calls.c b/sys/v4l2/v4l2src_calls.c
index 576b0c0b..3a55a231 100644
--- a/sys/v4l2/v4l2src_calls.c
+++ b/sys/v4l2/v4l2src_calls.c
@@ -31,26 +31,24 @@
#include <errno.h>
#include "v4l2src_calls.h"
#include <sys/time.h>
+#include <unistd.h>
+
+#define GST_CAT_DEFAULT v4l2src_debug
+
+/* lalala... */
+#define GST_V4L2_SET_ACTIVE(element) (element)->buffer = GINT_TO_POINTER (-1)
+#define GST_V4L2_SET_INACTIVE(element) (element)->buffer = NULL
#define DEBUG(format, args...) \
- GST_DEBUG_OBJECT (\
- GST_ELEMENT(v4l2src), \
+ GST_CAT_DEBUG_OBJECT (\
+ v4l2src_debug, v4l2src, \
"V4L2SRC: " format, ##args)
-#define MIN_BUFFERS_QUEUED 2
-
/* On some systems MAP_FAILED seems to be missing */
#ifndef MAP_FAILED
#define MAP_FAILED ( (caddr_t) -1 )
#endif
-enum {
- QUEUE_STATE_ERROR = -1,
- QUEUE_STATE_READY_FOR_QUEUE,
- QUEUE_STATE_QUEUED,
- QUEUE_STATE_SYNCED,
-};
-
/******************************************************
* gst_v4l2src_fill_format_list():
* create list of supported capture formats
@@ -60,54 +58,49 @@ enum {
gboolean
gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src)
{
- gint n;
-
- DEBUG("getting src format enumerations");
-
- /* format enumeration */
- for (n=0;;n++) {
- struct v4l2_fmtdesc format, *fmtptr;
- format.index = n;
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_ENUM_FMT, &format) < 0) {
- if (errno == EINVAL)
- break; /* end of enumeration */
- else {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Failed to get no. %d in pixelformat enumeration for %s: %s",
- n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
- }
- fmtptr = g_malloc(sizeof(format));
- memcpy(fmtptr, &format, sizeof(format));
- v4l2src->formats = g_list_append(v4l2src->formats, fmtptr);
-
- v4l2src->format_list = g_list_append(v4l2src->format_list, fmtptr->description);
- }
-
- return TRUE;
+ gint n;
+ struct v4l2_fmtdesc *format;
+
+ GST_DEBUG_OBJECT (v4l2src, "getting src format enumerations");
+
+ /* format enumeration */
+ for (n=0;;n++) {
+ format = g_new (struct v4l2_fmtdesc, 1);
+ format->index = n;
+ format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
+ if (errno == EINVAL) {
+ break; /* end of enumeration */
+ } else {
+ gst_element_error(GST_ELEMENT(v4l2src),
+ "Failed to get no. %d in pixelformat enumeration for %s: %s",
+ n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ g_free (format);
+ return FALSE;
+ }
+ }
+ GST_LOG_OBJECT (v4l2src, "got format"GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+ v4l2src->formats = g_slist_prepend (v4l2src->formats, format);
+ }
+
+ return TRUE;
}
/******************************************************
- * gst_v4l2src_empty_format_list():
+ * gst_v4l2src_clear_format_list():
* free list of supported capture formats
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
-gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src)
+gst_v4l2src_clear_format_list (GstV4l2Src *v4l2src)
{
- while (g_list_length(v4l2src->formats) > 0) {
- gpointer data = g_list_nth_data(v4l2src->formats, 0);
- v4l2src->formats = g_list_remove(v4l2src->formats, data);
- g_free(data);
- }
- g_list_free(v4l2src->format_list);
- v4l2src->format_list = NULL;
+ g_slist_foreach (v4l2src->formats, (GFunc) g_free, NULL);
+ g_slist_free (v4l2src->formats);
- return TRUE;
+ return TRUE;
}
@@ -117,65 +110,47 @@ gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src)
* return value: TRUE on success, FALSE on error
******************************************************/
-static gboolean
+gboolean
gst_v4l2src_queue_frame (GstV4l2Src *v4l2src,
- gint num)
+ guint i)
{
- DEBUG("queueing frame %d", num);
+ GST_LOG_OBJECT (v4l2src, "queueing frame %u", i);
- if (v4l2src->frame_queue_state[num] != QUEUE_STATE_READY_FOR_QUEUE) {
- return FALSE;
- }
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_QBUF, &v4l2src->pool->buffers[i].buffer) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error queueing buffer %u on device %s: %s",
+ i, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
- v4l2src->bufsettings.index = num;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_QBUF, &v4l2src->bufsettings) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error queueing buffer %d on device %s: %s",
- num, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- v4l2src->frame_queue_state[num] = QUEUE_STATE_QUEUED;
- v4l2src->num_queued++;
-
- return TRUE;
+ return TRUE;
}
/******************************************************
- * gst_v4l2src_sync_next_frame():
- * sync on a frame for capturing
+ * gst_v4l2src_grab_frame ():
+ * grab a frame for capturing
* return value: TRUE on success, FALSE on error
******************************************************/
-static gboolean
-gst_v4l2src_sync_next_frame (GstV4l2Src *v4l2src,
- gint *num)
+gint
+gst_v4l2src_grab_frame (GstV4l2Src *v4l2src)
{
- if (v4l2src->num_queued <= 0) {
- return FALSE;
- }
-
- while (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_DQBUF, &v4l2src->bufsettings) < 0) {
- /* if the sync() got interrupted, we can retry */
- if (errno != EINTR) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error syncing on a buffer on device %s: %s",
- GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
- DEBUG("Sync got interrupted");
- }
-
- DEBUG("synced on frame %d", v4l2src->bufsettings.index);
- *num = v4l2src->bufsettings.index;
-
- v4l2src->frame_queue_state[*num] = QUEUE_STATE_SYNCED;
- v4l2src->num_queued--;
-
- return TRUE;
+ struct v4l2_buffer buffer;
+
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ while (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_DQBUF, &buffer) < 0) {
+ /* if the sync() got interrupted, we can retry */
+ if (errno != EINTR) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error syncing on a buffer on device %s: %s",
+ GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return -1;
+ }
+ GST_DEBUG_OBJECT (v4l2src, "grab got interrupted");
+ }
+
+ GST_LOG_OBJECT (v4l2src, "grabbed frame %d", buffer.index);
+
+ return buffer.index;
}
@@ -226,6 +201,7 @@ gst_v4l2src_set_capture (GstV4l2Src *v4l2src,
v4l2src->format.fmt.pix.width = width;
v4l2src->format.fmt.pix.height = height;
v4l2src->format.fmt.pix.pixelformat = fmt->pixelformat;
+ v4l2src->format.fmt.pix.field = V4L2_FIELD_INTERLACED;
v4l2src->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_S_FMT, &v4l2src->format) < 0) {
@@ -250,88 +226,81 @@ gst_v4l2src_set_capture (GstV4l2Src *v4l2src,
gboolean
gst_v4l2src_capture_init (GstV4l2Src *v4l2src)
{
- gint n;
- gchar *desc = NULL;
- struct v4l2_buffer buf;
-
- DEBUG("initting the capture system");
-
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_NOT_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- /* request buffer info */
- if (v4l2src->breq.count < MIN_BUFFERS_QUEUED) {
- v4l2src->breq.count = MIN_BUFFERS_QUEUED;
- }
- v4l2src->breq.type = v4l2src->format.type;
- v4l2src->breq.memory = V4L2_MEMORY_MMAP;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_REQBUFS, &v4l2src->breq) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error requesting buffers (%d) for %s: %s",
- v4l2src->breq.count, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- if (v4l2src->breq.count < MIN_BUFFERS_QUEUED) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Too little buffers. We got %d, we want at least %d",
- v4l2src->breq.count, MIN_BUFFERS_QUEUED);
- return FALSE;
- }
- v4l2src->bufsettings.type = v4l2src->format.type;
-
- for (n=0;n<g_list_length(v4l2src->formats);n++) {
- struct v4l2_fmtdesc *fmt = (struct v4l2_fmtdesc *) g_list_nth_data(v4l2src->formats, n);
- if (v4l2src->format.fmt.pix.pixelformat == fmt->pixelformat) {
- desc = fmt->description;
- break;
- }
- }
- gst_info("Got %d buffers (%s) of size %d KB\n",
- v4l2src->breq.count, desc, v4l2src->format.fmt.pix.sizeimage/1024);
-
- /* keep track of queued buffers */
- v4l2src->frame_queue_state = (gint8 *)
- g_malloc(sizeof(gint8) * v4l2src->breq.count);
-
- /* track how often to use each frame */
- v4l2src->use_num_times = (gint *)
- g_malloc(sizeof(gint) * v4l2src->breq.count);
-
- /* lock for the frame_state */
- v4l2src->mutex_queue_state = g_mutex_new();
- v4l2src->cond_queue_state = g_cond_new();
-
- /* Map the buffers */
- GST_V4L2ELEMENT(v4l2src)->buffer = (guint8 **)
- g_malloc(sizeof(guint8 *) * v4l2src->breq.count);
- for (n=0;n<v4l2src->breq.count;n++) {
- buf.index = n;
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_QUERYBUF, &buf) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Failed to get buffer (%d) properties: %s",
- n, g_strerror(errno));
- gst_v4l2src_capture_deinit(v4l2src);
- return FALSE;
- }
- GST_V4L2ELEMENT(v4l2src)->buffer[n] = mmap(0,
- buf.length, PROT_READ|PROT_WRITE, MAP_SHARED,
- GST_V4L2ELEMENT(v4l2src)->video_fd, buf.m.offset);
- if (GST_V4L2ELEMENT(v4l2src)->buffer[n] == MAP_FAILED) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error mapping video buffer (%d) on device %s: %s",
- n, GST_V4L2ELEMENT(v4l2src)->device,
- g_strerror(errno));
- GST_V4L2ELEMENT(v4l2src)->buffer[n] = NULL;
- gst_v4l2src_capture_deinit(v4l2src);
- return FALSE;
- }
- }
-
- return TRUE;
+ gint n;
+ guint buffers;
+
+ GST_DEBUG_OBJECT (v4l2src, "initting the capture system");
+
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ GST_V4L2_CHECK_NOT_ACTIVE(GST_V4L2ELEMENT(v4l2src));
+
+ /* request buffer info */
+ buffers = v4l2src->breq.count;
+ if (v4l2src->breq.count > GST_V4L2_MAX_BUFFERS) {
+ v4l2src->breq.count = GST_V4L2_MAX_BUFFERS;
+ }
+ if (v4l2src->breq.count < GST_V4L2_MIN_BUFFERS) {
+ v4l2src->breq.count = GST_V4L2_MIN_BUFFERS;
+ }
+ v4l2src->breq.type = v4l2src->format.type;
+ v4l2src->breq.memory = V4L2_MEMORY_MMAP;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_REQBUFS, &v4l2src->breq) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error requesting buffers (%d) for %s: %s",
+ v4l2src->breq.count, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
+
+ if (v4l2src->breq.count < GST_V4L2_MIN_BUFFERS) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Too little buffers. We got %d, we want at least %d",
+ v4l2src->breq.count, GST_V4L2_MIN_BUFFERS);
+ v4l2src->breq.count = buffers;
+ return FALSE;
+ }
+ if (v4l2src->breq.count != buffers)
+ g_object_notify (G_OBJECT (v4l2src), "num_buffers");
+
+ GST_INFO_OBJECT (v4l2src, "Got %d buffers ("GST_FOURCC_FORMAT") of size %d KB\n",
+ v4l2src->breq.count, GST_FOURCC_ARGS (v4l2src->format.fmt.pix.pixelformat),
+ v4l2src->format.fmt.pix.sizeimage / 1024);
+
+ /* Map the buffers */
+ v4l2src->pool = g_new (GstV4l2BufferPool, 1);
+ gst_atomic_int_init (&v4l2src->pool->refcount, 1);
+ v4l2src->pool->video_fd = GST_V4L2ELEMENT (v4l2src)->video_fd;
+ v4l2src->pool->buffer_count = v4l2src->breq.count;
+ v4l2src->pool->buffers = g_new0 (GstV4l2Buffer, v4l2src->breq.count);
+
+ for (n = 0; n < v4l2src->breq.count; n++) {
+ GstV4l2Buffer *buffer = &v4l2src->pool->buffers[n];
+
+ gst_atomic_int_init (&buffer->refcount, 1);
+ buffer->pool = v4l2src->pool;
+ buffer->buffer.index = n;
+ buffer->buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_QUERYBUF, &buffer->buffer) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Failed to get buffer (%d) properties: %s",
+ n, g_strerror(errno));
+ gst_v4l2src_capture_deinit(v4l2src);
+ return FALSE;
+ }
+ buffer->start = mmap (0, buffer->buffer.length, PROT_READ|PROT_WRITE, MAP_SHARED,
+ GST_V4L2ELEMENT(v4l2src)->video_fd, buffer->buffer.m.offset);
+ if (buffer->start == MAP_FAILED) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error mapping video buffer (%d) on device %s: %s",
+ n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ buffer->start = 0;
+ gst_v4l2src_capture_deinit (v4l2src);
+ return FALSE;
+ }
+ buffer->length = buffer->buffer.length;
+ if (!gst_v4l2src_queue_frame(v4l2src, n)) {
+ gst_v4l2src_capture_deinit (v4l2src);
+ return FALSE;
+ }
+ }
+
+ GST_V4L2_SET_ACTIVE(GST_V4L2ELEMENT (v4l2src));
+ return TRUE;
}
@@ -344,147 +313,25 @@ gst_v4l2src_capture_init (GstV4l2Src *v4l2src)
gboolean
gst_v4l2src_capture_start (GstV4l2Src *v4l2src)
{
- gint n;
-
- DEBUG("starting the capturing");
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- v4l2src->quit = FALSE;
- v4l2src->num_queued = 0;
- v4l2src->queue_frame = 0;
-
- /* set all buffers ready to queue , this starts streaming capture */
- for (n=0;n<v4l2src->breq.count;n++) {
- v4l2src->frame_queue_state[n] = QUEUE_STATE_READY_FOR_QUEUE;
- if (!gst_v4l2src_queue_frame(v4l2src, n)) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- gst_v4l2src_capture_stop(v4l2src);
- return FALSE;
- }
- }
-
- n = 1;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMON, &n) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error starting streaming capture for %s: %s",
- GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- g_mutex_unlock(v4l2src->mutex_queue_state);
-
- return TRUE;
-}
-
-
-/******************************************************
- * gst_v4l2src_grab_frame():
- * capture one frame during streaming capture
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-
-gboolean
-gst_v4l2src_grab_frame (GstV4l2Src *v4l2src,
- gint *num)
-{
- DEBUG("syncing on the next frame");
+ gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- /* do we have enough frames? */
- while (v4l2src->num_queued < MIN_BUFFERS_QUEUED ||
- v4l2src->frame_queue_state[v4l2src->queue_frame] ==
- QUEUE_STATE_READY_FOR_QUEUE) {
- while (v4l2src->frame_queue_state[v4l2src->queue_frame] !=
- QUEUE_STATE_READY_FOR_QUEUE &&
- !v4l2src->quit) {
- GST_DEBUG (
- "Waiting for frames to become available (%d < %d)",
- v4l2src->num_queued, MIN_BUFFERS_QUEUED);
- g_cond_wait(v4l2src->cond_queue_state,
- v4l2src->mutex_queue_state);
- }
- if (v4l2src->quit) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- return TRUE; /* it won't get through anyway */
- }
- if (!gst_v4l2src_queue_frame(v4l2src, v4l2src->queue_frame)) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- return FALSE;
- }
- v4l2src->queue_frame = (v4l2src->queue_frame + 1) % v4l2src->breq.count;
- }
+ GST_DEBUG_OBJECT (v4l2src, "starting the capturing");
+
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ if (!GST_V4L2_IS_ACTIVE (GST_V4L2ELEMENT(v4l2src))) {
+ gst_pad_renegotiate (v4l2src->srcpad);
+ }
+ GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
- /* syncing on the buffer grabs it */
- if (!gst_v4l2src_sync_next_frame(v4l2src, num)) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- return FALSE;
- }
+ v4l2src->quit = FALSE;
- g_mutex_unlock(v4l2src->mutex_queue_state);
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMON, &type) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error starting streaming capture for %s: %s",
+ GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
- return TRUE;
-}
-
-
-/******************************************************
- *
- ******************************************************/
-
-guint8 *
-gst_v4l2src_get_buffer (GstV4l2Src *v4l2src,
- gint num)
-{
- if (!GST_V4L2_IS_ACTIVE(GST_V4L2ELEMENT(v4l2src)) ||
- !GST_V4L2_IS_OPEN(GST_V4L2ELEMENT(v4l2src)))
- return NULL;
-
- if (num < 0 || num >= v4l2src->breq.count)
- return NULL;
-
- return GST_V4L2ELEMENT(v4l2src)->buffer[num];
-}
-
-
-/******************************************************
- * gst_v4l2src_requeue_frame():
- * re-queue a frame after we're done with the buffer
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-
-gboolean
-gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src,
- gint num)
-{
- DEBUG("requeueing frame %d", num);
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- /* mark frame as 'ready to requeue' */
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- if (v4l2src->frame_queue_state[num] != QUEUE_STATE_SYNCED) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Invalid state %d (expected %d), can't requeue",
- v4l2src->frame_queue_state[num],
- QUEUE_STATE_SYNCED);
- return FALSE;
- }
-
- v4l2src->frame_queue_state[num] = QUEUE_STATE_READY_FOR_QUEUE;
-
- /* let an optional wait know */
- g_cond_broadcast(v4l2src->cond_queue_state);
-
- g_mutex_unlock(v4l2src->mutex_queue_state);
-
- return TRUE;
+ return TRUE;
}
@@ -497,37 +344,60 @@ gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src,
gboolean
gst_v4l2src_capture_stop (GstV4l2Src *v4l2src)
{
- gint n = 0;
-
- DEBUG("stopping capturing");
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- /* we actually need to sync on all queued buffers but not
- * on the non-queued ones */
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMOFF, &n) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error stopping streaming capture for %s: %s",
- GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- /* make an optional pending wait stop */
- v4l2src->quit = TRUE;
- g_cond_broadcast(v4l2src->cond_queue_state);
+ gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ GST_DEBUG_OBJECT (v4l2src, "stopping capturing");
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
+
+ /* we actually need to sync on all queued buffers but not
+ * on the non-queued ones */
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMOFF, &type) < 0) {
+ gst_element_error (GST_ELEMENT(v4l2src), "Error stopping streaming capture for %s: %s",
+ GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
+
+ /* make an optional pending wait stop */
+ v4l2src->quit = TRUE;
- /* sync on remaining frames */
- while (v4l2src->num_queued > 0) {
- gst_v4l2src_sync_next_frame(v4l2src, &n);
- }
-
- g_mutex_unlock(v4l2src->mutex_queue_state);
+ return TRUE;
+}
- return TRUE;
+static void
+gst_v4l2src_buffer_pool_free (GstV4l2BufferPool *pool, gboolean do_close)
+{
+ guint i;
+
+ for (i = 0; i < pool->buffer_count; i++) {
+ gst_atomic_int_destroy (&pool->buffers[i].refcount);
+ munmap (pool->buffers[i].start, pool->buffers[i].length);
+ }
+ g_free (pool->buffers);
+ gst_atomic_int_destroy (&pool->refcount);
+ if (do_close)
+ close (pool->video_fd);
+ g_free (pool);
}
+void
+gst_v4l2src_free_buffer (GstBuffer *buffer)
+{
+ GstV4l2Buffer *buf = (GstV4l2Buffer *) GST_BUFFER_PRIVATE (buffer);
+
+ GST_LOG ("freeing buffer %p (nr. %d)", buffer, buf->buffer.index);
+
+ if (!gst_atomic_int_dec_and_test (&buf->refcount)) {
+ /* we're still in use, add to queue again
+ note: this might fail because the device is already stopped (race) */
+ if (ioctl(buf->pool->video_fd, VIDIOC_QBUF, &buf->buffer) < 0)
+ GST_INFO ("readding to queue failed, assuming video device is stopped");
+ }
+ if (gst_atomic_int_dec_and_test (&buf->pool->refcount)) {
+ /* we're last thing that used all this */
+ gst_v4l2src_buffer_pool_free (buf->pool, TRUE);
+ }
+}
/******************************************************
* gst_v4l2src_capture_deinit():
@@ -538,31 +408,32 @@ gst_v4l2src_capture_stop (GstV4l2Src *v4l2src)
gboolean
gst_v4l2src_capture_deinit (GstV4l2Src *v4l2src)
{
- int n;
-
- DEBUG("deinitting capture system");
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- /* unmap the buffer */
- for (n=0;n<v4l2src->breq.count;n++) {
- if (!GST_V4L2ELEMENT(v4l2src)->buffer[n]) {
- break;
- }
- munmap(GST_V4L2ELEMENT(v4l2src)->buffer[n],
- v4l2src->format.fmt.pix.sizeimage);
- GST_V4L2ELEMENT(v4l2src)->buffer[n] = NULL;
- }
-
- /* free buffer tracker */
- g_free(GST_V4L2ELEMENT(v4l2src)->buffer);
- GST_V4L2ELEMENT(v4l2src)->buffer = NULL;
- g_mutex_free(v4l2src->mutex_queue_state);
- g_cond_free(v4l2src->cond_queue_state);
- g_free(v4l2src->frame_queue_state);
- g_free(v4l2src->use_num_times);
-
- return TRUE;
+ gint i, dequeue = 0;
+
+ GST_DEBUG_OBJECT (v4l2src, "deinitting capture system");
+
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
+
+ /* free the buffers */
+ for (i = 0; i < v4l2src->breq.count; i++) {
+ if (gst_atomic_int_dec_and_test (&v4l2src->pool->buffers[i].refcount))
+ dequeue++;
+ }
+ for (i = 0; i < dequeue; i++) {
+ struct v4l2_buffer buffer;
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_DQBUF, &buffer) < 0)
+ GST_WARNING_OBJECT (v4l2src, "Could not dequeue buffer on uninitialization");
+ }
+ if (gst_atomic_int_dec_and_test (&v4l2src->pool->refcount)) {
+ /* we're last thing that used all this */
+ gst_v4l2src_buffer_pool_free (v4l2src->pool, FALSE);
+ }
+ v4l2src->pool = NULL;
+
+ GST_V4L2_SET_INACTIVE (GST_V4L2ELEMENT (v4l2src));
+ return TRUE;
}
@@ -578,13 +449,16 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
{
struct v4l2_format fmt;
+ GST_LOG_OBJECT (v4l2src, "getting size limits with format " GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+
/* get size delimiters */
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 0;
fmt.fmt.pix.height = 0;
fmt.fmt.pix.pixelformat = format->pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_ANY;
+ fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_TRY_FMT, &fmt) < 0) {
return FALSE;
@@ -594,9 +468,10 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
*min_w = fmt.fmt.pix.width;
if (min_h)
*min_h = fmt.fmt.pix.height;
+ GST_LOG_OBJECT (v4l2src, "got min size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
fmt.fmt.pix.width = G_MAXINT;
- fmt.fmt.pix.height = G_MAXINT;
+ fmt.fmt.pix.height = 576;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_TRY_FMT, &fmt) < 0) {
return FALSE;
@@ -606,6 +481,7 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
*max_w = fmt.fmt.pix.width;
if (max_h)
*max_h = fmt.fmt.pix.height;
+ GST_LOG_OBJECT (v4l2src, "got max size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
return TRUE;
}