summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBenjamin Otte <otte@gnome.org>2004-01-12 02:19:57 +0000
committerBenjamin Otte <otte@gnome.org>2004-01-12 02:19:57 +0000
commit1adb86a5d6b2f1bb96568f6135e1fdb4d451e4cd (patch)
treec6a5591aa3d71d3cb1f58fba000e966970d6853d
parent09984b518a5f6bbb91cec7568a8d2d28132439a6 (diff)
downloadgst-plugins-bad-1adb86a5d6b2f1bb96568f6135e1fdb4d451e4cd.tar.gz
gst-plugins-bad-1adb86a5d6b2f1bb96568f6135e1fdb4d451e4cd.tar.bz2
gst-plugins-bad-1adb86a5d6b2f1bb96568f6135e1fdb4d451e4cd.zip
sys/v4l2/: add norm, channel and frequency properties.
Original commit message from CVS: 2004-01-12 Benjamin Otte <in7y118@public.uni-hamburg.de> * sys/v4l2/gstv4l2element.c: (gst_v4l2element_class_init), (gst_v4l2element_dispose), (gst_v4l2element_set_property), (gst_v4l2element_get_property): * sys/v4l2/v4l2_calls.c: (gst_v4l2_set_defaults), (gst_v4l2_open): add norm, channel and frequency properties. * sys/v4l2/gstv4l2tuner.c: fixes for tuner interface changes * sys/v4l2/gstv4l2element.h: * sys/v4l2/gstv4l2src.c: * sys/v4l2/gstv4l2src.h: * sys/v4l2/v4l2src_calls.c: * sys/v4l2/v4l2src_calls.h: rework v4l2src to work with saa1734 cards and allow mmaped buffers.
-rw-r--r--ChangeLog16
-rw-r--r--sys/v4l2/gstv4l2element.c188
-rw-r--r--sys/v4l2/gstv4l2element.h5
-rw-r--r--sys/v4l2/gstv4l2src.c1248
-rw-r--r--sys/v4l2/gstv4l2src.h45
-rw-r--r--sys/v4l2/gstv4l2tuner.c32
-rw-r--r--sys/v4l2/v4l2_calls.c48
-rw-r--r--sys/v4l2/v4l2src_calls.c612
-rw-r--r--sys/v4l2/v4l2src_calls.h17
9 files changed, 1114 insertions, 1097 deletions
diff --git a/ChangeLog b/ChangeLog
index e2ab4e28..ffc35c48 100644
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,5 +1,21 @@
2004-01-12 Benjamin Otte <in7y118@public.uni-hamburg.de>
+ * sys/v4l2/gstv4l2element.c: (gst_v4l2element_class_init),
+ (gst_v4l2element_dispose), (gst_v4l2element_set_property),
+ (gst_v4l2element_get_property):
+ * sys/v4l2/v4l2_calls.c: (gst_v4l2_set_defaults), (gst_v4l2_open):
+ add norm, channel and frequency properties.
+ * sys/v4l2/gstv4l2tuner.c:
+ fixes for tuner interface changes
+ * sys/v4l2/gstv4l2element.h:
+ * sys/v4l2/gstv4l2src.c:
+ * sys/v4l2/gstv4l2src.h:
+ * sys/v4l2/v4l2src_calls.c:
+ * sys/v4l2/v4l2src_calls.h:
+ rework v4l2src to work with saa1734 cards and allow mmaped buffers.
+
+2004-01-12 Benjamin Otte <in7y118@public.uni-hamburg.de>
+
* gst-libs/gst/tuner/tuner.c: (gst_tuner_class_init),
(gst_tuner_find_norm_by_name), (gst_v4l2_find_channel_by_name),
(gst_tuner_channel_changed), (gst_tuner_norm_changed),
diff --git a/sys/v4l2/gstv4l2element.c b/sys/v4l2/gstv4l2element.c
index d99f6d6d..eb12501d 100644
--- a/sys/v4l2/gstv4l2element.c
+++ b/sys/v4l2/gstv4l2element.c
@@ -43,17 +43,20 @@ static GstElementDetails gst_v4l2element_details = {
/* V4l2Element signals and args */
enum {
- /* FILL ME */
- SIGNAL_OPEN,
- SIGNAL_CLOSE,
- LAST_SIGNAL
+ /* FILL ME */
+ SIGNAL_OPEN,
+ SIGNAL_CLOSE,
+ LAST_SIGNAL
};
enum {
- ARG_0,
- ARG_DEVICE,
- ARG_DEVICE_NAME,
- ARG_FLAGS
+ ARG_0,
+ ARG_DEVICE,
+ ARG_DEVICE_NAME,
+ ARG_NORM,
+ ARG_CHANNEL,
+ ARG_FREQUENCY,
+ ARG_FLAGS
};
@@ -378,41 +381,47 @@ gst_v4l2element_base_init (GstV4l2ElementClass *klass)
static void
gst_v4l2element_class_init (GstV4l2ElementClass *klass)
{
- GObjectClass *gobject_class;
- GstElementClass *gstelement_class;
-
- gobject_class = (GObjectClass*)klass;
- gstelement_class = (GstElementClass*)klass;
-
- parent_class = g_type_class_ref(GST_TYPE_ELEMENT);
-
- g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_DEVICE,
- g_param_spec_string("device", "Device", "Device location",
- NULL, G_PARAM_READWRITE));
- g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_DEVICE_NAME,
- g_param_spec_string("device_name", "Device name",
- "Name of the device", NULL, G_PARAM_READABLE));
- g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_FLAGS,
- g_param_spec_flags("flags", "Flags", "Device type flags",
- GST_TYPE_V4L2_DEVICE_FLAGS, 0, G_PARAM_READABLE));
-
- /* signals */
- gst_v4l2element_signals[SIGNAL_OPEN] =
- g_signal_new("open", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET(GstV4l2ElementClass, open),
- NULL, NULL, g_cclosure_marshal_VOID__STRING,
- G_TYPE_NONE, 1, G_TYPE_STRING);
- gst_v4l2element_signals[SIGNAL_CLOSE] =
- g_signal_new("close", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET(GstV4l2ElementClass, close),
- NULL, NULL, g_cclosure_marshal_VOID__STRING,
- G_TYPE_NONE, 1, G_TYPE_STRING);
-
- gobject_class->set_property = gst_v4l2element_set_property;
- gobject_class->get_property = gst_v4l2element_get_property;
- gobject_class->dispose = gst_v4l2element_dispose;
-
- gstelement_class->change_state = gst_v4l2element_change_state;
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ g_object_class_install_property(gobject_class, ARG_DEVICE,
+ g_param_spec_string("device", "Device", "Device location",
+ NULL, G_PARAM_READWRITE));
+ g_object_class_install_property(gobject_class, ARG_DEVICE_NAME,
+ g_param_spec_string("device_name", "Device name",
+ "Name of the device", NULL, G_PARAM_READABLE));
+ g_object_class_install_property(gobject_class, ARG_FLAGS,
+ g_param_spec_flags("flags", "Flags", "Device type flags",
+ GST_TYPE_V4L2_DEVICE_FLAGS, 0, G_PARAM_READABLE));
+ g_object_class_install_property(gobject_class, ARG_NORM,
+ g_param_spec_string("norm", "norm",
+ "Norm to use", NULL, G_PARAM_READWRITE));
+ g_object_class_install_property(gobject_class, ARG_CHANNEL,
+ g_param_spec_string("channel", "channel",
+ "input/output to switch to", NULL, G_PARAM_READWRITE));
+ g_object_class_install_property(gobject_class, ARG_FREQUENCY,
+ g_param_spec_ulong ("frequency", "frequency",
+ "frequency to tune to", 0, G_MAXULONG, 0, G_PARAM_READWRITE));
+
+ /* signals */
+ gst_v4l2element_signals[SIGNAL_OPEN] =
+ g_signal_new("open", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4l2ElementClass, open),
+ NULL, NULL, g_cclosure_marshal_VOID__STRING,
+ G_TYPE_NONE, 1, G_TYPE_STRING);
+ gst_v4l2element_signals[SIGNAL_CLOSE] =
+ g_signal_new("close", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4l2ElementClass, close),
+ NULL, NULL, g_cclosure_marshal_VOID__STRING,
+ G_TYPE_NONE, 1, G_TYPE_STRING);
+
+ gobject_class->set_property = gst_v4l2element_set_property;
+ gobject_class->get_property = gst_v4l2element_get_property;
+ gobject_class->dispose = gst_v4l2element_dispose;
+
+ gstelement_class->change_state = gst_v4l2element_change_state;
}
@@ -446,10 +455,13 @@ gst_v4l2element_dispose (GObject *object)
g_free (v4l2element->display);
}
- if (v4l2element->device) {
- g_free (v4l2element->device);
- }
-
+ g_free (v4l2element->device);
+ v4l2element->device = NULL;
+ g_free (v4l2element->norm);
+ v4l2element->norm = NULL;
+ g_free (v4l2element->channel);
+ v4l2element->channel = NULL;
+
if (((GObjectClass *) parent_class)->dispose)
((GObjectClass *) parent_class)->dispose(object);
}
@@ -460,24 +472,63 @@ gst_v4l2element_set_property (GObject *object,
const GValue *value,
GParamSpec *pspec)
{
- GstV4l2Element *v4l2element;
-
- /* it's not null if we got it, but it might not be ours */
- g_return_if_fail(GST_IS_V4L2ELEMENT(object));
- v4l2element = GST_V4L2ELEMENT(object);
-
- switch (prop_id) {
- case ARG_DEVICE:
- if (!GST_V4L2_IS_OPEN(v4l2element)) {
- if (v4l2element->device)
- g_free(v4l2element->device);
- v4l2element->device = g_strdup(g_value_get_string(value));
- }
- break;
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
+ GstV4l2Element *v4l2element;
+ GstTuner *tuner;
+
+ /* it's not null if we got it, but it might not be ours */
+ g_return_if_fail (GST_IS_V4L2ELEMENT (object));
+ v4l2element = GST_V4L2ELEMENT (object);
+ /* stupid GstInterface */
+ tuner = (GstTuner *) object;
+
+ switch (prop_id) {
+ case ARG_DEVICE:
+ if (!GST_V4L2_IS_OPEN(v4l2element)) {
+ if (v4l2element->device)
+ g_free(v4l2element->device);
+ v4l2element->device = g_value_dup_string(value);
+ }
+ break;
+ case ARG_NORM:
+ if (GST_V4L2_IS_OPEN(v4l2element)) {
+ GstTunerNorm *norm = gst_tuner_get_norm (tuner);
+ if (norm) {
+ gst_tuner_set_norm (tuner, norm);
}
+ } else {
+ g_free (v4l2element->norm);
+ v4l2element->norm = g_value_dup_string (value);
+ g_object_notify (object, "norm");
+ }
+ break;
+ case ARG_CHANNEL:
+ if (GST_V4L2_IS_OPEN(v4l2element)) {
+ GstTunerChannel *channel = gst_tuner_get_channel (tuner);
+ if (channel) {
+ gst_tuner_set_channel (tuner, channel);
+ }
+ } else {
+ g_free (v4l2element->channel);
+ v4l2element->channel = g_value_dup_string (value);
+ g_object_notify (object, "channel");
+ }
+ break;
+ case ARG_FREQUENCY:
+ if (GST_V4L2_IS_OPEN(v4l2element)) {
+ GstTunerChannel *channel;
+ if (!v4l2element->channel) return;
+ channel = gst_tuner_get_channel (tuner);
+ g_assert (channel);
+ gst_tuner_set_frequency (tuner, channel, g_value_get_ulong (value));
+ } else {
+ v4l2element->frequency = g_value_get_ulong (value);
+ g_object_notify (object, "frequency");
+ }
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
}
@@ -513,6 +564,15 @@ gst_v4l2element_get_property (GObject *object,
g_value_set_flags(value, flags);
break;
}
+ case ARG_NORM:
+ g_value_set_string (value, v4l2element->norm);
+ break;
+ case ARG_CHANNEL:
+ g_value_set_string (value, v4l2element->channel);
+ break;
+ case ARG_FREQUENCY:
+ g_value_set_ulong (value, v4l2element->frequency);
+ break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec);
break;
diff --git a/sys/v4l2/gstv4l2element.h b/sys/v4l2/gstv4l2element.h
index a57f66f0..e46b2440 100644
--- a/sys/v4l2/gstv4l2element.h
+++ b/sys/v4l2/gstv4l2element.h
@@ -82,6 +82,11 @@ struct _GstV4l2Element {
GstXWindowListener *overlay;
XID xwindow_id;
+ /* properties */
+ gchar *norm;
+ gchar *channel;
+ gulong frequency;
+
/* caching values */
gchar *display;
};
diff --git a/sys/v4l2/gstv4l2src.c b/sys/v4l2/gstv4l2src.c
index 0c275b2c..edce2091 100644
--- a/sys/v4l2/gstv4l2src.c
+++ b/sys/v4l2/gstv4l2src.c
@@ -26,6 +26,9 @@
#include "v4l2src_calls.h"
#include "gstv4l2tuner.h"
+GST_DEBUG_CATEGORY (v4l2src_debug);
+#define GST_CAT_DEFAULT v4l2src_debug
+
/* elementfactory details */
static GstElementDetails gst_v4l2src_details = {
"Video (video4linux2) Source",
@@ -51,15 +54,50 @@ enum {
ARG_USE_FIXED_FPS
};
+guint32 gst_v4l2_formats[] = {
+ /* from Linux 2.6.0 videodev2.h */
+ V4L2_PIX_FMT_RGB332, /* 8 RGB-3-3-2 */
+ V4L2_PIX_FMT_RGB555, /* 16 RGB-5-5-5 */
+ V4L2_PIX_FMT_RGB565, /* 16 RGB-5-6-5 */
+ V4L2_PIX_FMT_RGB555X, /* 16 RGB-5-5-5 BE */
+ V4L2_PIX_FMT_RGB565X, /* 16 RGB-5-6-5 BE */
+ V4L2_PIX_FMT_BGR24, /* 24 BGR-8-8-8 */
+ V4L2_PIX_FMT_RGB24, /* 24 RGB-8-8-8 */
+ V4L2_PIX_FMT_BGR32, /* 32 BGR-8-8-8-8 */
+ V4L2_PIX_FMT_RGB32, /* 32 RGB-8-8-8-8 */
+ V4L2_PIX_FMT_GREY, /* 8 Greyscale */
+ V4L2_PIX_FMT_YVU410, /* 9 YVU 4:1:0 */
+ V4L2_PIX_FMT_YVU420, /* 12 YVU 4:2:0 */
+ V4L2_PIX_FMT_YUYV, /* 16 YUV 4:2:2 */
+ V4L2_PIX_FMT_UYVY, /* 16 YUV 4:2:2 */
+ V4L2_PIX_FMT_YUV422P, /* 16 YVU422 planar */
+ V4L2_PIX_FMT_YUV411P, /* 16 YVU411 planar */
+ V4L2_PIX_FMT_Y41P, /* 12 YUV 4:1:1 */
+ V4L2_PIX_FMT_NV12, /* 12 Y/CbCr 4:2:0 */
+ V4L2_PIX_FMT_NV21, /* 12 Y/CrCb 4:2:0 */
+ V4L2_PIX_FMT_YUV410, /* 9 YUV 4:1:0 */
+ V4L2_PIX_FMT_YUV420, /* 12 YUV 4:2:0 */
+ V4L2_PIX_FMT_YYUV, /* 16 YUV 4:2:2 */
+ V4L2_PIX_FMT_HI240, /* 8 8-bit color */
+ V4L2_PIX_FMT_MJPEG, /* Motion-JPEG */
+ V4L2_PIX_FMT_JPEG, /* JFIF JPEG */
+ V4L2_PIX_FMT_DV, /* 1394 */
+ V4L2_PIX_FMT_MPEG, /* MPEG */
+ V4L2_PIX_FMT_WNVA /* Winnov hw compres */
+};
+#define GST_V4L2_FORMAT_COUNT (G_N_ELEMENTS (gst_v4l2_formats))
+
GST_FORMATS_FUNCTION (GstPad *, gst_v4l2src_get_formats,
GST_FORMAT_TIME, GST_FORMAT_DEFAULT);
GST_QUERY_TYPE_FUNCTION (GstPad *, gst_v4l2src_get_query_types,
GST_QUERY_POSITION);
/* init functions */
-static void gst_v4l2src_class_init (GstV4l2SrcClass *klass);
-static void gst_v4l2src_base_init (GstV4l2SrcClass *klass);
-static void gst_v4l2src_init (GstV4l2Src *v4l2src);
+static void gst_v4l2src_class_init (gpointer g_class,
+ gpointer class_data);
+static void gst_v4l2src_base_init (gpointer g_class);
+static void gst_v4l2src_init (GTypeInstance * instance,
+ gpointer g_class);
/* signal functions */
static void gst_v4l2src_open (GstElement *element,
@@ -68,9 +106,12 @@ static void gst_v4l2src_close (GstElement *element,
const gchar *device);
/* pad/buffer functions */
-static GstPadLinkReturn gst_v4l2src_srcconnect (GstPad *pad,
+static const GstCaps * gst_v4l2src_get_all_caps (void);
+static GstPadLinkReturn gst_v4l2src_link (GstPad *pad,
const GstCaps *caps);
static GstCaps * gst_v4l2src_getcaps (GstPad *pad);
+static GstCaps * gst_v4l2src_fixate (GstPad * pad,
+ const GstCaps * caps);
static GstData * gst_v4l2src_get (GstPad *pad);
static gboolean gst_v4l2src_src_convert (GstPad *pad,
GstFormat src_format,
@@ -100,8 +141,6 @@ static GstElementStateReturn
static void gst_v4l2src_set_clock (GstElement *element,
GstClock *clock);
-static GstPadTemplate *src_template;
-
static GstElementClass *parent_class = NULL;
static guint gst_v4l2src_signals[LAST_SIGNAL] = { 0 };
@@ -109,133 +148,127 @@ static guint gst_v4l2src_signals[LAST_SIGNAL] = { 0 };
GType
gst_v4l2src_get_type (void)
{
- static GType v4l2src_type = 0;
-
- if (!v4l2src_type) {
- static const GTypeInfo v4l2src_info = {
- sizeof(GstV4l2SrcClass),
- (GBaseInitFunc) gst_v4l2src_base_init,
- NULL,
- (GClassInitFunc) gst_v4l2src_class_init,
- NULL,
- NULL,
- sizeof(GstV4l2Src),
- 0,
- (GInstanceInitFunc) gst_v4l2src_init,
- NULL
- };
- v4l2src_type = g_type_register_static(GST_TYPE_V4L2ELEMENT,
- "GstV4l2Src", &v4l2src_info, 0);
- }
- return v4l2src_type;
+ static GType v4l2src_type = 0;
+
+ if (!v4l2src_type) {
+ static const GTypeInfo v4l2src_info = {
+ sizeof (GstV4l2SrcClass),
+ gst_v4l2src_base_init,
+ NULL,
+ gst_v4l2src_class_init,
+ NULL,
+ NULL,
+ sizeof (GstV4l2Src),
+ 0,
+ gst_v4l2src_init,
+ NULL
+ };
+ v4l2src_type = g_type_register_static(GST_TYPE_V4L2ELEMENT,
+ "GstV4l2Src", &v4l2src_info, 0);
+ GST_DEBUG_CATEGORY_INIT (v4l2src_debug, "v4l2src", 0, "v4l2src element");
+ }
+ return v4l2src_type;
}
static void
-gst_v4l2src_base_init (GstV4l2SrcClass *klass)
+gst_v4l2src_base_init (gpointer g_class)
{
- GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+ GstPadTemplate *template;
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
- gst_element_class_set_details (gstelement_class,
- &gst_v4l2src_details);
+ gst_element_class_set_details (gstelement_class, &gst_v4l2src_details);
- src_template = gst_pad_template_new ("src",
- GST_PAD_SRC,
- GST_PAD_ALWAYS,
- NULL);
+ template = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ gst_caps_copy (gst_v4l2src_get_all_caps ()));
- gst_element_class_add_pad_template (gstelement_class, src_template);
+ gst_element_class_add_pad_template (gstelement_class, template);
}
static void
-gst_v4l2src_class_init (GstV4l2SrcClass *klass)
+gst_v4l2src_class_init (gpointer g_class, gpointer class_data)
{
- GObjectClass *gobject_class;
- GstElementClass *gstelement_class;
- GstV4l2ElementClass *v4l2_class;
-
- gobject_class = (GObjectClass*)klass;
- gstelement_class = (GstElementClass*)klass;
- v4l2_class = (GstV4l2ElementClass*)klass;
-
- parent_class = g_type_class_ref(GST_TYPE_V4L2ELEMENT);
-
- g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_NUMBUFS,
- g_param_spec_int("num_buffers","num_buffers","num_buffers",
- G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
- g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BUFSIZE,
- g_param_spec_int("buffer_size","buffer_size","buffer_size",
- G_MININT,G_MAXINT,0,G_PARAM_READABLE));
-
- g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_USE_FIXED_FPS,
- g_param_spec_boolean("use_fixed_fps", "Use Fixed FPS",
- "Drop/Insert frames to reach a certain FPS (TRUE) "
- "or adapt FPS to suit the number of frabbed frames",
- TRUE, G_PARAM_READWRITE));
-
- /* signals */
- gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE] =
- g_signal_new("frame_capture", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET(GstV4l2SrcClass, frame_capture),
- NULL, NULL, g_cclosure_marshal_VOID__VOID,
- G_TYPE_NONE, 0);
- gst_v4l2src_signals[SIGNAL_FRAME_DROP] =
- g_signal_new("frame_drop", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET(GstV4l2SrcClass, frame_drop),
- NULL, NULL, g_cclosure_marshal_VOID__VOID,
- G_TYPE_NONE, 0);
- gst_v4l2src_signals[SIGNAL_FRAME_INSERT] =
- g_signal_new("frame_insert", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET(GstV4l2SrcClass, frame_insert),
- NULL, NULL, g_cclosure_marshal_VOID__VOID,
- G_TYPE_NONE, 0);
- gst_v4l2src_signals[SIGNAL_FRAME_LOST] =
- g_signal_new("frame_lost", G_TYPE_FROM_CLASS(klass), G_SIGNAL_RUN_LAST,
- G_STRUCT_OFFSET(GstV4l2SrcClass, frame_lost),
- NULL, NULL, g_cclosure_marshal_VOID__INT,
- G_TYPE_NONE, 1, G_TYPE_INT);
-
-
- gobject_class->set_property = gst_v4l2src_set_property;
- gobject_class->get_property = gst_v4l2src_get_property;
-
- gstelement_class->change_state = gst_v4l2src_change_state;
-
- v4l2_class->open = gst_v4l2src_open;
- v4l2_class->close = gst_v4l2src_close;
-
- gstelement_class->set_clock = gst_v4l2src_set_clock;
+ GObjectClass *gobject_class = G_OBJECT_CLASS (g_class);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
+ GstV4l2ElementClass *v4l2_class = GST_V4L2ELEMENT_CLASS (g_class);
+
+ parent_class = g_type_class_peek_parent (g_class);
+
+ gobject_class->set_property = gst_v4l2src_set_property;
+ gobject_class->get_property = gst_v4l2src_get_property;
+
+ g_object_class_install_property(gobject_class, ARG_NUMBUFS,
+ g_param_spec_int("num_buffers","num_buffers","num_buffers",
+ G_MININT,G_MAXINT,0,G_PARAM_READWRITE));
+ g_object_class_install_property(gobject_class, ARG_BUFSIZE,
+ g_param_spec_int("buffer_size","buffer_size","buffer_size",
+ G_MININT,G_MAXINT,0,G_PARAM_READABLE));
+
+ g_object_class_install_property(gobject_class, ARG_USE_FIXED_FPS,
+ g_param_spec_boolean("use_fixed_fps", "Use Fixed FPS",
+ "Drop/Insert frames to reach a certain FPS (TRUE) "
+ "or adapt FPS to suit the number of frabbed frames",
+ TRUE, G_PARAM_READWRITE));
+
+ /* signals */
+ gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE] =
+ g_signal_new("frame_capture", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4l2SrcClass, frame_capture),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4l2src_signals[SIGNAL_FRAME_DROP] =
+ g_signal_new("frame_drop", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4l2SrcClass, frame_drop),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4l2src_signals[SIGNAL_FRAME_INSERT] =
+ g_signal_new("frame_insert", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4l2SrcClass, frame_insert),
+ NULL, NULL, g_cclosure_marshal_VOID__VOID,
+ G_TYPE_NONE, 0);
+ gst_v4l2src_signals[SIGNAL_FRAME_LOST] =
+ g_signal_new("frame_lost", G_TYPE_FROM_CLASS (g_class), G_SIGNAL_RUN_LAST,
+ G_STRUCT_OFFSET(GstV4l2SrcClass, frame_lost),
+ NULL, NULL, g_cclosure_marshal_VOID__INT,
+ G_TYPE_NONE, 1, G_TYPE_INT);
+
+ gstelement_class->change_state = gst_v4l2src_change_state;
+
+ v4l2_class->open = gst_v4l2src_open;
+ v4l2_class->close = gst_v4l2src_close;
+
+ gstelement_class->set_clock = gst_v4l2src_set_clock;
}
static void
-gst_v4l2src_init (GstV4l2Src *v4l2src)
+gst_v4l2src_init (GTypeInstance *instance, gpointer g_class)
{
- GST_FLAG_SET(GST_ELEMENT(v4l2src), GST_ELEMENT_THREAD_SUGGESTED);
+ GstV4l2Src *v4l2src = GST_V4L2SRC (instance);
+
+ GST_FLAG_SET(GST_ELEMENT(v4l2src), GST_ELEMENT_THREAD_SUGGESTED);
- v4l2src->srcpad = gst_pad_new_from_template(src_template, "src");
- gst_element_add_pad(GST_ELEMENT(v4l2src), v4l2src->srcpad);
+ v4l2src->srcpad = gst_pad_new_from_template(
+ gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (v4l2src), "src"), "src");
+ gst_element_add_pad(GST_ELEMENT(v4l2src), v4l2src->srcpad);
- gst_pad_set_get_function(v4l2src->srcpad, gst_v4l2src_get);
- gst_pad_set_link_function(v4l2src->srcpad, gst_v4l2src_srcconnect);
- gst_pad_set_getcaps_function (v4l2src->srcpad, gst_v4l2src_getcaps);
- gst_pad_set_convert_function (v4l2src->srcpad, gst_v4l2src_src_convert);
- gst_pad_set_formats_function (v4l2src->srcpad,
- gst_v4l2src_get_formats);
- gst_pad_set_query_function (v4l2src->srcpad,
- gst_v4l2src_src_query);
- gst_pad_set_query_type_function (v4l2src->srcpad,
- gst_v4l2src_get_query_types);
+ gst_pad_set_get_function(v4l2src->srcpad, gst_v4l2src_get);
+ gst_pad_set_link_function(v4l2src->srcpad, gst_v4l2src_link);
+ gst_pad_set_getcaps_function (v4l2src->srcpad, gst_v4l2src_getcaps);
+ gst_pad_set_fixate_function (v4l2src->srcpad, gst_v4l2src_fixate);
+ gst_pad_set_convert_function (v4l2src->srcpad, gst_v4l2src_src_convert);
+ gst_pad_set_formats_function (v4l2src->srcpad, gst_v4l2src_get_formats);
+ gst_pad_set_query_function (v4l2src->srcpad, gst_v4l2src_src_query);
+ gst_pad_set_query_type_function (v4l2src->srcpad, gst_v4l2src_get_query_types);
- v4l2src->breq.count = 0;
+ v4l2src->breq.count = 0;
- v4l2src->formats = NULL;
- v4l2src->format_list = NULL;
+ v4l2src->formats = NULL;
- /* no clock */
- v4l2src->clock = NULL;
+ /* no clock */
+ v4l2src->clock = NULL;
- /* fps */
- v4l2src->use_fixed_fps = TRUE;
+ /* fps */
+ v4l2src->use_fixed_fps = TRUE;
}
@@ -243,7 +276,7 @@ static void
gst_v4l2src_open (GstElement *element,
const gchar *device)
{
- gst_v4l2src_fill_format_list(GST_V4L2SRC(element));
+ gst_v4l2src_fill_format_list (GST_V4L2SRC (element));
}
@@ -251,7 +284,7 @@ static void
gst_v4l2src_close (GstElement *element,
const gchar *device)
{
- gst_v4l2src_empty_format_list(GST_V4L2SRC(element));
+ gst_v4l2src_clear_format_list (GST_V4L2SRC (element));
}
@@ -365,173 +398,154 @@ gst_v4l2src_src_query (GstPad *pad,
return res;
}
-
static GstStructure *
-gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc,
- gboolean compressed)
+gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc)
{
- GstStructure *structure;
+ GstStructure *structure = NULL;
- switch (fourcc) {
- case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
- case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
- structure = gst_structure_new ("video/x-jpeg", NULL);
- break;
+ switch (fourcc) {
+ case V4L2_PIX_FMT_MJPEG: /* Motion-JPEG */
+ case V4L2_PIX_FMT_JPEG: /* JFIF JPEG */
+ structure = gst_structure_new ("video/x-jpeg", NULL);
+ break;
+ case V4L2_PIX_FMT_RGB332:
+ case V4L2_PIX_FMT_RGB555:
+ case V4L2_PIX_FMT_RGB555X:
+ case V4L2_PIX_FMT_RGB565:
+ case V4L2_PIX_FMT_RGB565X:
+ case V4L2_PIX_FMT_RGB24:
+ case V4L2_PIX_FMT_BGR24:
+ case V4L2_PIX_FMT_RGB32:
+ case V4L2_PIX_FMT_BGR32: {
+ guint depth=0, bpp=0;
+ gint endianness = 0;
+ guint32 r_mask = 0, b_mask = 0, g_mask = 0;
+
+ switch (fourcc) {
case V4L2_PIX_FMT_RGB332:
+ bpp = depth = 8;
+ endianness = G_BYTE_ORDER; /* 'like, whatever' */
+ r_mask = 0xe0; g_mask = 0x1c; b_mask = 0x03;
+ break;
case V4L2_PIX_FMT_RGB555:
case V4L2_PIX_FMT_RGB555X:
+ bpp = 16; depth = 15;
+ endianness = fourcc == V4L2_PIX_FMT_RGB555X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
+ r_mask = 0x7c00;
+ g_mask = 0x03e0;
+ b_mask = 0x001f;
+ break;
case V4L2_PIX_FMT_RGB565:
case V4L2_PIX_FMT_RGB565X:
+ bpp = depth = 16;
+ endianness = fourcc == V4L2_PIX_FMT_RGB565X ? G_BIG_ENDIAN : G_LITTLE_ENDIAN;
+ r_mask = 0xf800;
+ g_mask = 0x07e0;
+ b_mask = 0x001f;
case V4L2_PIX_FMT_RGB24:
+ bpp = depth = 24;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0xff0000;
+ g_mask = 0x00ff00;
+ b_mask = 0x0000ff;
+ break;
case V4L2_PIX_FMT_BGR24:
+ bpp = depth = 24;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0x0000ff;
+ g_mask = 0x00ff00;
+ b_mask = 0xff0000;
+ break;
case V4L2_PIX_FMT_RGB32:
- case V4L2_PIX_FMT_BGR32: {
- guint depth=0, bpp=0;
- gint endianness = 0;
- guint32 r_mask = 0, b_mask = 0, g_mask = 0;
-
- switch (fourcc) {
- case V4L2_PIX_FMT_RGB332:
- bpp = depth = 8;
- endianness = G_BYTE_ORDER; /* 'like, whatever' */
- r_mask = 0xe0; g_mask = 0x1c; b_mask = 0x03;
- break;
- case V4L2_PIX_FMT_RGB555:
- case V4L2_PIX_FMT_RGB555X:
- bpp = 16; depth = 15;
- endianness = G_BYTE_ORDER;
- if ((fourcc == V4L2_PIX_FMT_RGB555 &&
- G_BYTE_ORDER == G_LITTLE_ENDIAN) ||
- (fourcc == V4L2_PIX_FMT_RGB555X &&
- G_BYTE_ORDER == G_BIG_ENDIAN)) {
- r_mask = 0x7c00;
- g_mask = 0x03e0;
- b_mask = 0x001f;
- } else {
- r_mask = 0x007c;
- g_mask = 0xe003;
- b_mask = 0x1f00;
- }
- break;
- case V4L2_PIX_FMT_RGB565:
- case V4L2_PIX_FMT_RGB565X:
- bpp = depth = 16;
- endianness = G_BYTE_ORDER;
- if ((fourcc == V4L2_PIX_FMT_RGB565 &&
- G_BYTE_ORDER == G_LITTLE_ENDIAN) ||
- (fourcc == V4L2_PIX_FMT_RGB565X &&
- G_BYTE_ORDER == G_BIG_ENDIAN)) {
- r_mask = 0xf800;
- g_mask = 0x07e0;
- b_mask = 0x001f;
- } else {
- r_mask = 0x00f8;
- g_mask = 0xe007;
- b_mask = 0x1f00;
- }
- break;
- case V4L2_PIX_FMT_RGB24:
- case V4L2_PIX_FMT_BGR24:
- bpp = depth = 24;
- endianness = G_BIG_ENDIAN;
- if (fourcc == V4L2_PIX_FMT_RGB24) {
- r_mask = 0xff0000;
- g_mask = 0x00ff00;
- b_mask = 0x0000ff;
- } else {
- r_mask = 0x0000ff;
- g_mask = 0x00ff00;
- b_mask = 0xff0000;
- }
- break;
- case V4L2_PIX_FMT_RGB32:
- case V4L2_PIX_FMT_BGR32:
- bpp = depth = 32;
- endianness = G_BIG_ENDIAN;
- if (fourcc == V4L2_PIX_FMT_RGB32) {
- r_mask = 0xff000000;
- g_mask = 0x00ff0000;
- b_mask = 0x0000ff00;
- } else {
- r_mask = 0x000000ff;
- g_mask = 0x0000ff00;
- b_mask = 0x00ff0000;
- }
- break;
- default:
- g_assert_not_reached();
- break;
- }
-
- structure = gst_structure_new ("video/x-raw-rgb",
- "bpp", G_TYPE_INT, bpp,
- "depth", G_TYPE_INT, depth,
- "red_mask", G_TYPE_INT, r_mask,
- "green_mask", G_TYPE_INT, g_mask,
- "blue_mask", G_TYPE_INT, b_mask,
- "endianness", G_TYPE_INT, endianness,
- NULL);
- break;
- }
- case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
+ bpp = depth = 32;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0xff000000;
+ g_mask = 0x00ff0000;
+ b_mask = 0x0000ff00;
+ break;
+ case V4L2_PIX_FMT_BGR32:
+ bpp = depth = 32;
+ endianness = G_BIG_ENDIAN;
+ r_mask = 0x000000ff;
+ g_mask = 0x0000ff00;
+ b_mask = 0x00ff0000;
+ break;
+ default:
+ g_assert_not_reached();
+ break;
+ }
+ structure = gst_structure_new ("video/x-raw-rgb",
+ "bpp", G_TYPE_INT, bpp,
+ "depth", G_TYPE_INT, depth,
+ "red_mask", G_TYPE_INT, r_mask,
+ "green_mask", G_TYPE_INT, g_mask,
+ "blue_mask", G_TYPE_INT, b_mask,
+ "endianness", G_TYPE_INT, endianness,
+ NULL);
+ break;
+ }
+ case V4L2_PIX_FMT_GREY: /* 8 Greyscale */
+ case V4L2_PIX_FMT_YUV422P: /* 16 YVU422 planar */
+ case V4L2_PIX_FMT_YUV411P: /* 16 YVU411 planar */
+ case V4L2_PIX_FMT_NV12: /* 12 Y/CbCr 4:2:0 */
+ case V4L2_PIX_FMT_NV21: /* 12 Y/CrCb 4:2:0 */
+ case V4L2_PIX_FMT_YYUV: /* 16 YUV 4:2:2 */
+ case V4L2_PIX_FMT_HI240: /* 8 8-bit color */
+ /* FIXME: get correct fourccs here */
+ break;
+ case V4L2_PIX_FMT_YVU410:
+ case V4L2_PIX_FMT_YUV410:
+ case V4L2_PIX_FMT_YUV420: /* I420/IYUV */
+ case V4L2_PIX_FMT_YUYV:
+ case V4L2_PIX_FMT_YVU420:
+ case V4L2_PIX_FMT_UYVY:
+ case V4L2_PIX_FMT_Y41P: {
+ guint32 fcc = 0;
+
+ switch (fourcc) {
+ case V4L2_PIX_FMT_YVU410:
+ fcc = GST_MAKE_FOURCC('Y','V','U','9');
+ break;
+ case V4L2_PIX_FMT_YUV410:
+ fcc = GST_MAKE_FOURCC('Y','U','V','9');
+ break;
+ case V4L2_PIX_FMT_YUV420:
+ fcc = GST_MAKE_FOURCC('I','4','2','0');
+ break;
case V4L2_PIX_FMT_YUYV:
+ fcc = GST_MAKE_FOURCC('Y','U','Y','2');
+ break;
case V4L2_PIX_FMT_YVU420:
+ fcc = GST_MAKE_FOURCC('Y','V','1','2');
+ break;
case V4L2_PIX_FMT_UYVY:
- case V4L2_PIX_FMT_Y41P: {
- guint32 fcc = 0;
-
- switch (fourcc) {
- case V4L2_PIX_FMT_YUV420:
- fcc = GST_MAKE_FOURCC('I','4','2','0');
- break;
- case V4L2_PIX_FMT_YUYV:
- fcc = GST_MAKE_FOURCC('Y','U','Y','2');
- break;
- case V4L2_PIX_FMT_YVU420:
- fcc = GST_MAKE_FOURCC('Y','V','1','2');
- break;
- case V4L2_PIX_FMT_UYVY:
- fcc = GST_MAKE_FOURCC('U','Y','V','Y');
- break;
- case V4L2_PIX_FMT_Y41P:
- fcc = GST_MAKE_FOURCC('Y','4','1','P');
- break;
- default:
- g_assert_not_reached();
- break;
- }
-
- structure = gst_structure_new ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, fcc,
- NULL);
- break;
- }
+ fcc = GST_MAKE_FOURCC('U','Y','V','Y');
+ break;
+ case V4L2_PIX_FMT_Y41P:
+ fcc = GST_MAKE_FOURCC('Y','4','1','B');
+ break;
default:
- GST_DEBUG (
- "Unknown fourcc 0x%08x " GST_FOURCC_FORMAT ", trying default",
+ g_assert_not_reached();
+ break;
+ }
+ structure = gst_structure_new ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, fcc,
+ NULL);
+ break;
+ }
+ case V4L2_PIX_FMT_DV:
+ structure = gst_structure_new ("video/x-dv", "systemstream", G_TYPE_BOOLEAN, TRUE, NULL);
+ break;
+ case V4L2_PIX_FMT_MPEG: /* MPEG */
+ /* someone figure out the MPEG format used... */
+ break;
+ case V4L2_PIX_FMT_WNVA: /* Winnov hw compres */
+ break;
+ default:
+ GST_DEBUG ("Unknown fourcc 0x%08x " GST_FOURCC_FORMAT,
fourcc, GST_FOURCC_ARGS(fourcc));
-
- /* add the standard one */
- if (compressed) {
- guint32 print_format = GUINT32_FROM_LE(fourcc);
- gchar *print_format_str = (gchar *) &print_format, *string_format;
- gint i;
-
- for (i=0;i<4;i++) {
- print_format_str[i] =
- g_ascii_tolower(print_format_str[i]);
- }
- string_format = g_strdup_printf("video/%4.4s",
- print_format_str);
- structure = gst_structure_new (string_format, NULL);
- g_free(string_format);
- } else {
- structure = gst_structure_new ("video/x-raw-yuv",
- "format", GST_TYPE_FOURCC, fourcc, NULL);
- }
- break;
- }
-
+ break;
+ }
#if 0
gst_caps_set_simple (caps,
"width", G_TYPE_INT, width,
@@ -539,351 +553,389 @@ gst_v4l2src_v4l2fourcc_to_caps (guint32 fourcc,
"framerate", G_TYPE_DOUBLE, fps,
NULL);
#endif
+ return structure;
+}
+
+static struct v4l2_fmtdesc *
+gst_v4l2src_get_format_from_fourcc (GstV4l2Src *v4l2src, guint32 fourcc)
+{
+ struct v4l2_fmtdesc *fmt;
+ GSList *walk;
+
+ if (fourcc == 0)
+ return NULL;
+
+ walk = v4l2src->formats;
+ while (walk) {
+ fmt = (struct v4l2_fmtdesc *) walk->data;
+ if (fmt->pixelformat == fourcc)
+ return fmt;
+ /* special case for jpeg */
+ if ((fmt->pixelformat == V4L2_PIX_FMT_MJPEG && fourcc == V4L2_PIX_FMT_JPEG) ||
+ (fmt->pixelformat == V4L2_PIX_FMT_JPEG && fourcc == V4L2_PIX_FMT_MJPEG)) {
+ return fmt;
+ }
+ walk = g_slist_next (walk);
+ }
- return structure;
+ return NULL;
}
-#define gst_v4l2src_v4l2fourcc_to_caps_fixed(f, width, height, fps, c) \
- gst_v4l2src_v4l2fourcc_to_caps(f, \
- gst_props_entry_new("width", \
- GST_PROPS_INT(width)), \
- gst_props_entry_new("height", \
- GST_PROPS_INT(height)), \
- gst_props_entry_new("framerate", \
- GST_PROPS_FLOAT(fps)), \
- c)
-
-#define gst_v4l2src_v4l2fourcc_to_caps_range(f, min_w, max_w, min_h, max_h, c) \
- gst_v4l2src_v4l2fourcc_to_caps(f, \
- gst_props_entry_new("width", \
- GST_PROPS_INT_RANGE(min_w, max_w)), \
- gst_props_entry_new("height", \
- GST_PROPS_INT_RANGE(min_h, max_h)), \
- gst_props_entry_new("framerate", \
- GST_PROPS_FLOAT_RANGE(0, G_MAXFLOAT)), \
- c)
+static guint32
+gst_v4l2_fourcc_from_structure (GstStructure *structure)
+{
+ guint32 fourcc = 0;
+ const gchar *mimetype = gst_structure_get_name (structure);
+
+ if (!strcmp(mimetype, "video/x-raw-yuv") ||
+ !strcmp(mimetype, "video/x-raw-rgb")) {
+ if (!strcmp(mimetype, "video/x-raw-rgb"))
+ fourcc = GST_MAKE_FOURCC('R','G','B',' ');
+ else
+ gst_structure_get_fourcc (structure, "format", &fourcc);
+
+ switch (fourcc) {
+ case GST_MAKE_FOURCC('I','4','2','0'):
+ case GST_MAKE_FOURCC('I','Y','U','V'):
+ fourcc = V4L2_PIX_FMT_YUV420;
+ break;
+ case GST_MAKE_FOURCC('Y','U','Y','2'):
+ fourcc = V4L2_PIX_FMT_YUYV;
+ break;
+ case GST_MAKE_FOURCC('Y','4','1','P'):
+ fourcc = V4L2_PIX_FMT_Y41P;
+ break;
+ case GST_MAKE_FOURCC('U','Y','V','Y'):
+ fourcc = V4L2_PIX_FMT_UYVY;
+ break;
+ case GST_MAKE_FOURCC('Y','V','1','2'):
+ fourcc = V4L2_PIX_FMT_YVU420;
+ break;
+ case GST_MAKE_FOURCC('R','G','B',' '): {
+ gint depth, endianness, r_mask;
+
+ gst_structure_get_int (structure, "depth", &depth);
+ gst_structure_get_int (structure, "endianness", &endianness);
+ gst_structure_get_int (structure, "red_mask", &r_mask);
+
+ switch (depth) {
+ case 8:
+ fourcc = V4L2_PIX_FMT_RGB332;
+ break;
+ case 15:
+ fourcc = (endianness == G_LITTLE_ENDIAN) ?
+ V4L2_PIX_FMT_RGB555 :
+ V4L2_PIX_FMT_RGB555X;
+ break;
+ case 16:
+ fourcc = (endianness == G_LITTLE_ENDIAN) ?
+ V4L2_PIX_FMT_RGB565 :
+ V4L2_PIX_FMT_RGB565X;
+ break;
+ case 24:
+ fourcc = (r_mask == 0xFF) ?
+ V4L2_PIX_FMT_BGR24 :
+ V4L2_PIX_FMT_RGB24;
+ break;
+ case 32:
+ fourcc = (r_mask == 0xFF) ?
+ V4L2_PIX_FMT_BGR32 :
+ V4L2_PIX_FMT_RGB32;
+ break;
+ }
+ default:
+ break;
+ }
+ }
+ } else if (strcmp (mimetype, "video/x-dv") == 0) {
+ fourcc = V4L2_PIX_FMT_DV;
+ } else if (strcmp (mimetype, "video/x-jpeg") == 0) {
+ fourcc = V4L2_PIX_FMT_JPEG;
+ }
+
+ return fourcc;
+}
static struct v4l2_fmtdesc *
-gst_v4l2_caps_to_v4l2fourcc (GstV4l2Src *v4l2src,
- GstStructure *structure)
+gst_v4l2_caps_to_v4l2fourcc (GstV4l2Src *v4l2src, GstStructure *structure)
{
- gint i;
- guint32 fourcc = 0;
- struct v4l2_fmtdesc *end_fmt = NULL;
- const gchar *format = gst_structure_get_name (structure);
-
- if (!strcmp(format, "video/x-raw-yuv") ||
- !strcmp(format, "video/x-raw-rgb")) {
- if (!strcmp(format, "video/x-raw-rgb"))
- fourcc = GST_MAKE_FOURCC('R','G','B',' ');
- else
- gst_structure_get_fourcc (structure, "format", &fourcc);
-
- switch (fourcc) {
- case GST_MAKE_FOURCC('I','4','2','0'):
- case GST_MAKE_FOURCC('I','Y','U','V'):
- fourcc = V4L2_PIX_FMT_YUV420;
- break;
- case GST_MAKE_FOURCC('Y','U','Y','2'):
- fourcc = V4L2_PIX_FMT_YUYV;
- break;
- case GST_MAKE_FOURCC('Y','4','1','P'):
- fourcc = V4L2_PIX_FMT_Y41P;
- break;
- case GST_MAKE_FOURCC('U','Y','V','Y'):
- fourcc = V4L2_PIX_FMT_UYVY;
- break;
- case GST_MAKE_FOURCC('Y','V','1','2'):
- fourcc = V4L2_PIX_FMT_YVU420;
- break;
- case GST_MAKE_FOURCC('R','G','B',' '): {
- gint depth, endianness;
-
- gst_structure_get_int (structure, "depth", &depth);
- gst_structure_get_int (structure, "endianness", &endianness);
-
- switch (depth) {
- case 8:
- fourcc = V4L2_PIX_FMT_RGB332;
- break;
- case 15:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_RGB555 :
- V4L2_PIX_FMT_RGB555X;
- break;
- case 16:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_RGB565 :
- V4L2_PIX_FMT_RGB565X;
- break;
- case 24:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_BGR24 :
- V4L2_PIX_FMT_RGB24;
- break;
- case 32:
- fourcc = (endianness == G_LITTLE_ENDIAN) ?
- V4L2_PIX_FMT_BGR32 :
- V4L2_PIX_FMT_RGB32;
- break;
- }
- }
- default:
- break;
- }
- for (i=0;i<g_list_length(v4l2src->formats);i++) {
- struct v4l2_fmtdesc *fmt;
- fmt = (struct v4l2_fmtdesc *)
- g_list_nth_data(v4l2src->formats, i);
- if (fmt->pixelformat == fourcc) {
- end_fmt = fmt;
- break;
- }
- }
- } else {
- /* compressed */
- if (strncmp(format, "video/", 6))
- return NULL;
- format = &format[6];
- if (strlen(format) != 4)
- return NULL;
- fourcc = GST_MAKE_FOURCC(g_ascii_toupper(format[0]),
- g_ascii_toupper(format[1]),
- g_ascii_toupper(format[2]),
- g_ascii_toupper(format[3]));
-
- switch (fourcc) {
- case GST_MAKE_FOURCC('J','P','E','G'): {
- struct v4l2_fmtdesc *fmt;
- for (i=0;i<g_list_length(v4l2src->formats);i++) {
- fmt = g_list_nth_data(v4l2src->formats, i);
- if (fmt->pixelformat == V4L2_PIX_FMT_MJPEG ||
- fmt->pixelformat == V4L2_PIX_FMT_JPEG) {
- end_fmt = fmt;
- break;
- }
- }
- break;
- }
- default: {
- /* FIXME: check for fourcc in list */
- struct v4l2_fmtdesc *fmt;
- for (i=0;i<g_list_length(v4l2src->formats);i++) {
- fmt = g_list_nth_data(v4l2src->formats, i);
- if (fourcc == fmt->pixelformat) {
- end_fmt = fmt;
- break;
- }
- }
- break;
- }
- }
- }
+ return gst_v4l2src_get_format_from_fourcc (v4l2src,
+ gst_v4l2_fourcc_from_structure (structure));
+}
+
+static const GstCaps *
+gst_v4l2src_get_all_caps (void)
+{
+ static GstCaps *caps = NULL;
+
+ if (caps == NULL) {
+ GstStructure *structure;
+ guint i;
+
+ caps = gst_caps_new_empty ();
+ for (i = 0; i < GST_V4L2_FORMAT_COUNT; i++) {
+ structure = gst_v4l2src_v4l2fourcc_to_caps (gst_v4l2_formats[i]);
+ if (structure) {
+ gst_structure_set (structure,
+ "width", GST_TYPE_INT_RANGE, 1, 4096,
+ "height", GST_TYPE_INT_RANGE, 1, 4096,
+ "framerate", GST_TYPE_DOUBLE_RANGE, (double) 0, G_MAXDOUBLE,
+ NULL);
+
+ gst_caps_append_structure (caps, structure);
+ }
+ }
+ }
- return end_fmt;
+ return caps;
}
-#define gst_caps_get_int_range(caps, name, min, max) \
- gst_props_entry_get_int_range(gst_props_get_entry((caps)->properties, \
- name), \
- min, max)
+static GstCaps *
+gst_v4l2src_fixate (GstPad *pad, const GstCaps *const_caps)
+{
+ gint i;
+ GstStructure *structure;
+ G_GNUC_UNUSED gchar *caps_str;
+ gboolean changed = FALSE;
+ GstCaps *caps = gst_caps_copy (const_caps);
+
+ caps_str = gst_caps_to_string (caps);
+ GST_DEBUG_OBJECT (gst_pad_get_parent (pad), "fixating caps %s", caps_str);
+ g_free (caps_str);
+
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ structure = gst_caps_get_structure (caps, i);
+ changed |= gst_caps_structure_fixate_field_nearest_int (structure, "width", G_MAXINT);
+ }
+ if (changed) return caps;
+ for (i = 0; i < gst_caps_get_size (caps); i++) {
+ structure = gst_caps_get_structure (caps, i);
+ changed |= gst_caps_structure_fixate_field_nearest_int (structure, "height", G_MAXINT);
+ }
+ if (changed) return caps;
+ gst_caps_free (caps);
+ return NULL;
+}
static GstPadLinkReturn
-gst_v4l2src_srcconnect (GstPad *pad,
- const GstCaps *vscapslist)
+gst_v4l2src_link (GstPad *pad, const GstCaps *caps)
{
- GstV4l2Src *v4l2src;
- GstV4l2Element *v4l2element;
- struct v4l2_fmtdesc *format;
- int w, h;
- GstStructure *structure;
-
- v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
- v4l2element = GST_V4L2ELEMENT(v4l2src);
-
- structure = gst_caps_get_structure (vscapslist, 0);
-
- /* clean up if we still haven't cleaned up our previous
- * capture session */
- if (GST_V4L2_IS_ACTIVE(v4l2element)) {
- if (!gst_v4l2src_capture_deinit(v4l2src))
- return GST_PAD_LINK_REFUSED;
- } else if (!GST_V4L2_IS_OPEN(v4l2element)) {
- return GST_PAD_LINK_DELAYED;
- }
+ GstV4l2Src *v4l2src;
+ GstV4l2Element *v4l2element;
+ struct v4l2_fmtdesc *format;
+ int w, h;
+ GstStructure *structure;
+
+ v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
+ v4l2element = GST_V4L2ELEMENT(v4l2src);
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* clean up if we still haven't cleaned up our previous
+ * capture session */
+ if (GST_V4L2_IS_ACTIVE(v4l2element)) {
+ if (!gst_v4l2src_capture_deinit(v4l2src))
+ return GST_PAD_LINK_REFUSED;
+ } else if (!GST_V4L2_IS_OPEN(v4l2element)) {
+ return GST_PAD_LINK_DELAYED;
+ }
- /* we want our own v4l2 type of fourcc codes */
- if (!(format = gst_v4l2_caps_to_v4l2fourcc(v4l2src, structure))) {
- return GST_PAD_LINK_REFUSED;
- }
+ /* we want our own v4l2 type of fourcc codes */
+ if (!(format = gst_v4l2_caps_to_v4l2fourcc(v4l2src, structure))) {
+ return GST_PAD_LINK_REFUSED;
+ }
- gst_structure_get_int (structure, "width", &w);
- gst_structure_get_int (structure, "height", &h);
+ gst_structure_get_int (structure, "width", &w);
+ gst_structure_get_int (structure, "height", &h);
- /* we found the pixelformat! - try it out */
- if (gst_v4l2src_set_capture(v4l2src, format, w, h)) {
- if (gst_v4l2src_capture_init(v4l2src)) {
- return GST_PAD_LINK_OK;
- }
- }
+ /* we found the pixelformat! - try it out */
+ if (gst_v4l2src_set_capture(v4l2src, format, w, h)) {
+ if (gst_v4l2src_capture_init(v4l2src)) {
+ return GST_PAD_LINK_OK;
+ }
+ }
- return GST_PAD_LINK_REFUSED;
+ return GST_PAD_LINK_REFUSED;
}
static GstCaps *
gst_v4l2src_getcaps (GstPad *pad)
{
- GstV4l2Src *v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
- GstCaps *caps;
- gint i;
- struct v4l2_fmtdesc *format;
- int min_w, max_w, min_h, max_h;
-
- if (!GST_V4L2_IS_OPEN(GST_V4L2ELEMENT(v4l2src))) {
- return gst_caps_new_any ();
- }
-
- /* build our own capslist */
- caps = gst_caps_new_empty();
- for (i=0;i<g_list_length(v4l2src->formats);i++) {
- GstStructure *structure;
-
- format = g_list_nth_data(v4l2src->formats, i);
-
- /* get size delimiters */
- if (!gst_v4l2src_get_size_limits(v4l2src, format,
- &min_w, &max_w,
- &min_h, &max_h)) {
- continue;
- }
-
- /* add to list */
- structure = gst_v4l2src_v4l2fourcc_to_caps (format->pixelformat,
- format->flags & V4L2_FMT_FLAG_COMPRESSED);
-
- gst_structure_set (structure,
- "width", GST_TYPE_INT_RANGE, min_w, max_w,
- "height", GST_TYPE_INT_RANGE, min_h, max_h,
- "framerate", GST_TYPE_DOUBLE_RANGE, 0, G_MAXDOUBLE,
- NULL);
+ GstV4l2Src *v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
+ GstCaps *caps;
+ struct v4l2_fmtdesc *format;
+ int min_w, max_w, min_h, max_h;
+ GSList *walk;
+ GstStructure *structure;
+
+ if (!GST_V4L2_IS_OPEN(GST_V4L2ELEMENT(v4l2src))) {
+ return gst_caps_new_any ();
+ }
- gst_caps_append_structure (caps, structure);
- }
+ /* build our own capslist */
+ caps = gst_caps_new_empty();
+ walk = v4l2src->formats;
+ while (walk) {
+ format = (struct v4l2_fmtdesc *) walk->data;
+ walk = g_slist_next (walk);
+
+ /* get size delimiters */
+ if (!gst_v4l2src_get_size_limits(v4l2src, format,
+ &min_w, &max_w, &min_h, &max_h)) {
+ continue;
+ }
+
+ /* add to list */
+ structure = gst_v4l2src_v4l2fourcc_to_caps (format->pixelformat);
+
+ if (structure) {
+ gst_structure_set (structure,
+ "width", GST_TYPE_INT_RANGE, min_w, max_w,
+ "height", GST_TYPE_INT_RANGE, min_h, max_h,
+ "framerate", GST_TYPE_DOUBLE_RANGE, (double) 0, G_MAXDOUBLE,
+ NULL);
+
+ gst_caps_append_structure (caps, structure);
+ }
+ }
- return caps;
+ return caps;
}
-
static GstData*
gst_v4l2src_get (GstPad *pad)
{
- GstV4l2Src *v4l2src;
- GstBuffer *buf;
- gint num;
- gdouble fps = 0;
+ GstV4l2Src *v4l2src;
+ GstBuffer *buf;
+ gint i, num = -1;
+ gdouble fps = 0;
- g_return_val_if_fail (pad != NULL, NULL);
+ v4l2src = GST_V4L2SRC (gst_pad_get_parent (pad));
- v4l2src = GST_V4L2SRC(gst_pad_get_parent (pad));
+ if (v4l2src->use_fixed_fps &&
+ (fps = gst_v4l2src_get_fps(v4l2src)) == 0) {
+ gst_element_error (GST_ELEMENT (v4l2src), "Could not get frame rate for element.");
+ return NULL;
+ }
- if (v4l2src->use_fixed_fps &&
- (fps = gst_v4l2src_get_fps(v4l2src)) == 0)
- return NULL;
+ if (v4l2src->need_writes > 0) {
+ /* use last frame */
+ buf = v4l2src->cached_buffer;
+ v4l2src->need_writes--;
+ } else {
+ GstClockTime time;
+ /* grab a frame from the device */
+ num = gst_v4l2src_grab_frame(v4l2src);
+ if (num == -1)
+ return NULL;
+
+ /* to check if v4l2 sets the correct time */
+ time = GST_TIMEVAL_TO_TIME(v4l2src->pool->buffers[num].buffer.timestamp);
+ if (v4l2src->clock && v4l2src->use_fixed_fps && time != 0) {
+ gboolean have_frame = FALSE;
+
+ do {
+ /* FIXME: isn't this v4l2 timestamp its own clock?! */
+ /* by default, we use the frame once */
+ v4l2src->need_writes = 1;
+
+ g_assert (time >= v4l2src->substract_time);
+ time -= v4l2src->substract_time;
+
+ /* first check whether we lost any frames according to the device */
+ if (v4l2src->last_seq != 0) {
+ if (v4l2src->pool->buffers[num].buffer.sequence - v4l2src->last_seq > 1) {
+ v4l2src->need_writes = v4l2src->pool->buffers[num].buffer.sequence - v4l2src->last_seq;
+ g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_LOST], 0,
+ v4l2src->need_writes - 1);
+ }
+ }
+ v4l2src->last_seq = v4l2src->pool->buffers[num].buffer.sequence;
+
+ /* decide how often we're going to write the frame - set
+ * v4lmjpegsrc->need_writes to (that-1) and have_frame to TRUE
+ * if we're going to write it - else, just continue.
+ *
+ * time is generally the system or audio clock. Let's
+ * say that we've written one second of audio, then we want
+ * to have written one second of video too, within the same
+ * timeframe. This means that if time - begin_time = X sec,
+ * we want to have written X*fps frames. If we've written
+ * more - drop, if we've written less - dup... */
+ if (v4l2src->handled * (GST_SECOND/fps) - time > 1.5 * (GST_SECOND/fps)) {
+ /* yo dude, we've got too many frames here! Drop! DROP! */
+ v4l2src->need_writes--; /* -= (v4l2src->handled - (time / fps)); */
+ g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_DROP], 0);
+ } else if (v4l2src->handled * (GST_SECOND/fps) - time < -1.5 * (GST_SECOND/fps)) {
+ /* this means we're lagging far behind */
+ v4l2src->need_writes++; /* += ((time / fps) - v4l2src->handled); */
+ g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_INSERT], 0);
+ }
if (v4l2src->need_writes > 0) {
- /* use last frame */
- num = v4l2src->last_frame;
- v4l2src->need_writes--;
- } else if (v4l2src->clock && v4l2src->use_fixed_fps) {
- GstClockTime time;
- gboolean have_frame = FALSE;
-
- do {
- /* by default, we use the frame once */
- v4l2src->need_writes = 1;
-
- /* grab a frame from the device */
- if (!gst_v4l2src_grab_frame(v4l2src, &num))
- return NULL;
-
- v4l2src->last_frame = num;
- time = GST_TIMEVAL_TO_TIME(v4l2src->bufsettings.timestamp) -
- v4l2src->substract_time;
-
- /* first check whether we lost any frames according to the device */
- if (v4l2src->last_seq != 0) {
- if (v4l2src->bufsettings.sequence - v4l2src->last_seq > 1) {
- v4l2src->need_writes = v4l2src->bufsettings.sequence -
- v4l2src->last_seq;
- g_signal_emit(G_OBJECT(v4l2src),
- gst_v4l2src_signals[SIGNAL_FRAME_LOST],
- 0,
- v4l2src->bufsettings.sequence -
- v4l2src->last_seq - 1);
- }
- }
- v4l2src->last_seq = v4l2src->bufsettings.sequence;
-
- /* decide how often we're going to write the frame - set
- * v4lmjpegsrc->need_writes to (that-1) and have_frame to TRUE
- * if we're going to write it - else, just continue.
- *
- * time is generally the system or audio clock. Let's
- * say that we've written one second of audio, then we want
- * to have written one second of video too, within the same
- * timeframe. This means that if time - begin_time = X sec,
- * we want to have written X*fps frames. If we've written
- * more - drop, if we've written less - dup... */
- if (v4l2src->handled * (GST_SECOND/fps) - time >
- 1.5 * (GST_SECOND/fps)) {
- /* yo dude, we've got too many frames here! Drop! DROP! */
- v4l2src->need_writes--; /* -= (v4l2src->handled - (time / fps)); */
- g_signal_emit(G_OBJECT(v4l2src),
- gst_v4l2src_signals[SIGNAL_FRAME_DROP], 0);
- } else if (v4l2src->handled * (GST_SECOND/fps) - time <
- -1.5 * (GST_SECOND/fps)) {
- /* this means we're lagging far behind */
- v4l2src->need_writes++; /* += ((time / fps) - v4l2src->handled); */
- g_signal_emit(G_OBJECT(v4l2src),
- gst_v4l2src_signals[SIGNAL_FRAME_INSERT], 0);
- }
-
- if (v4l2src->need_writes > 0) {
- have_frame = TRUE;
- v4l2src->use_num_times[num] = v4l2src->need_writes;
- v4l2src->need_writes--;
- } else {
- gst_v4l2src_requeue_frame(v4l2src, num);
- }
- } while (!have_frame);
+ have_frame = TRUE;
+ v4l2src->need_writes--;
} else {
- /* grab a frame from the device */
- if (!gst_v4l2src_grab_frame(v4l2src, &num))
- return NULL;
-
- v4l2src->use_num_times[num] = 1;
+ if (!gst_v4l2src_queue_frame(v4l2src, num))
+ return NULL;
+ num = gst_v4l2src_grab_frame(v4l2src);
+ if (num == -1)
+ return NULL;
}
+ } while (!have_frame);
+ }
+
+ g_assert (num != -1);
+ GST_LOG_OBJECT (v4l2src, "buffer %d needs %d writes", num, v4l2src->need_writes + 1);
+ i = v4l2src->pool->buffers[num].buffer.bytesused > 0 ?
+ v4l2src->pool->buffers[num].buffer.bytesused :
+ v4l2src->pool->buffers[num].length;
+ /* check if this is the last buffer in the queue. If so do a memcpy to put it back asap
+ to avoid framedrops and deadlocks because of stupid elements */
+ if (gst_atomic_int_read (&v4l2src->pool->refcount) == v4l2src->breq.count) {
+ GST_LOG_OBJECT (v4l2src, "using memcpy'd buffer");
+ buf = gst_buffer_new_and_alloc (i);
+ memcpy (GST_BUFFER_DATA (buf), v4l2src->pool->buffers[num].start, i);
+ if (!gst_v4l2src_queue_frame(v4l2src, num)) {
+ gst_data_unref (GST_DATA (buf));
+ return NULL;
+ }
+ } else {
+ GST_LOG_OBJECT (v4l2src, "using mmap'd buffer");
+ buf = gst_buffer_new ();
+ GST_BUFFER_DATA (buf) = v4l2src->pool->buffers[num].start;
+ GST_BUFFER_SIZE (buf) = i;
+ GST_BUFFER_FREE_DATA_FUNC (buf) = gst_v4l2src_free_buffer;
+ GST_BUFFER_PRIVATE (buf) = &v4l2src->pool->buffers[num];
+ /* no need to be careful here, both are > 0, because the element uses them */
+ gst_atomic_int_inc (&v4l2src->pool->buffers[num].refcount);
+ gst_atomic_int_inc (&v4l2src->pool->refcount);
+ }
+ GST_BUFFER_MAXSIZE (buf) = v4l2src->pool->buffers[num].length;
+ if (v4l2src->use_fixed_fps) {
+ GST_BUFFER_TIMESTAMP (buf) = v4l2src->handled * GST_SECOND / fps;
+ GST_BUFFER_DURATION (buf) = GST_SECOND / fps;
+ } else {
+ /* calculate time based on our own clock */
+ GST_BUFFER_TIMESTAMP(buf) = GST_TIMEVAL_TO_TIME(v4l2src->pool->buffers[num].buffer.timestamp) -
+ v4l2src->substract_time;
+ }
+ if (v4l2src->need_writes > 0) {
+ v4l2src->cached_buffer = buf;
+ for (i = 0; i < v4l2src->need_writes; i++) {
+ gst_data_ref (GST_DATA (buf));
+ }
+ }
+ }
- buf = gst_buffer_new ();
- GST_BUFFER_DATA(buf) = gst_v4l2src_get_buffer(v4l2src, num);
- GST_BUFFER_SIZE(buf) = v4l2src->bufsettings.bytesused;
- GST_BUFFER_FLAG_SET(buf, GST_BUFFER_READONLY);
- if (v4l2src->use_fixed_fps)
- GST_BUFFER_TIMESTAMP(buf) = v4l2src->handled * GST_SECOND / fps;
- else /* calculate time based on our own clock */
- GST_BUFFER_TIMESTAMP(buf) = GST_TIMEVAL_TO_TIME(v4l2src->bufsettings.timestamp) -
- v4l2src->substract_time;
-
- v4l2src->handled++;
- g_signal_emit(G_OBJECT(v4l2src),
- gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE], 0);
-
- return GST_DATA (buf);
-}
+ v4l2src->handled++;
+ g_signal_emit(G_OBJECT(v4l2src), gst_v4l2src_signals[SIGNAL_FRAME_CAPTURE], 0);
+ return GST_DATA (buf);
+}
static void
gst_v4l2src_set_property (GObject *object,
@@ -972,7 +1024,6 @@ gst_v4l2src_change_state (GstElement *element)
case GST_STATE_READY_TO_PAUSED:
v4l2src->handled = 0;
v4l2src->need_writes = 0;
- v4l2src->last_frame = 0;
v4l2src->substract_time = 0;
/* buffer setup moved to capsnego */
break;
@@ -1013,60 +1064,3 @@ gst_v4l2src_set_clock (GstElement *element,
GST_V4L2SRC(element)->clock = clock;
}
-
-#if 0
-static GstBuffer*
-gst_v4l2src_buffer_new (GstBufferPool *pool,
- guint64 offset,
- guint size,
- gpointer user_data)
-{
- GstBuffer *buffer;
- GstV4l2Src *v4l2src = GST_V4L2SRC(user_data);
-
- if (!GST_V4L2_IS_ACTIVE(GST_V4L2ELEMENT(v4l2src)))
- return NULL;
-
- buffer = gst_buffer_new();
- if (!buffer)
- return NULL;
-
- /* TODO: add interlacing info to buffer as metadata
- * (height>288 or 240 = topfieldfirst, else noninterlaced) */
- GST_BUFFER_MAXSIZE(buffer) = v4l2src->bufsettings.length;
- GST_BUFFER_FLAG_SET(buffer, GST_BUFFER_DONTFREE);
-
- return buffer;
-}
-#endif
-
-#if 0
-static void
-gst_v4l2src_buffer_free (GstBufferPool *pool,
- GstBuffer *buf,
- gpointer user_data)
-{
- GstV4l2Src *v4l2src = GST_V4L2SRC(user_data);
- int n;
-
- if (gst_element_get_state(GST_ELEMENT(v4l2src)) != GST_STATE_PLAYING)
- return; /* we've already cleaned up ourselves */
-
- for (n=0;n<v4l2src->breq.count;n++)
- if (GST_BUFFER_DATA(buf) == gst_v4l2src_get_buffer(v4l2src, n)) {
- v4l2src->use_num_times[n]--;
- if (v4l2src->use_num_times[n] <= 0) {
- gst_v4l2src_requeue_frame(v4l2src, n);
- }
- break;
- }
-
- if (n == v4l2src->breq.count)
- gst_element_error(GST_ELEMENT(v4l2src),
- "Couldn\'t find the buffer");
-
- /* free the buffer itself */
- gst_buffer_default_free(buf);
-}
-#endif
-
diff --git a/sys/v4l2/gstv4l2src.h b/sys/v4l2/gstv4l2src.h
index bae60900..ca528d1e 100644
--- a/sys/v4l2/gstv4l2src.h
+++ b/sys/v4l2/gstv4l2src.h
@@ -22,6 +22,10 @@
#include <gstv4l2element.h>
+GST_DEBUG_CATEGORY_EXTERN (v4l2src_debug);
+
+#define GST_V4L2_MAX_BUFFERS 16
+#define GST_V4L2_MIN_BUFFERS 2
#define GST_TYPE_V4L2SRC \
(gst_v4l2src_get_type())
@@ -34,9 +38,26 @@
#define GST_IS_V4L2SRC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_V4L2SRC))
+typedef struct _GstV4l2BufferPool GstV4l2BufferPool;
+typedef struct _GstV4l2Buffer GstV4l2Buffer;
+typedef struct _GstV4l2Src GstV4l2Src;
+typedef struct _GstV4l2SrcClass GstV4l2SrcClass;
+
+/* global info */
+struct _GstV4l2BufferPool {
+ GstAtomicInt refcount; /* number of users: 1 for every buffer, 1 for element */
+ gint video_fd;
+ guint buffer_count;
+ GstV4l2Buffer * buffers;
+};
-typedef struct _GstV4l2Src GstV4l2Src;
-typedef struct _GstV4l2SrcClass GstV4l2SrcClass;
+struct _GstV4l2Buffer {
+ struct v4l2_buffer buffer;
+ guint8 * start;
+ guint length;
+ GstAtomicInt refcount; /* add 1 if in use by element, add 1 if in use by GstBuffer */
+ GstV4l2BufferPool * pool;
+};
struct _GstV4l2Src {
GstV4l2Element v4l2element;
@@ -45,28 +66,21 @@ struct _GstV4l2Src {
GstPad *srcpad;
/* internal lists */
- GList /*v4l2_fmtdesc*/ *formats, *format_list; /* list of available capture formats */
+ GSList *formats; /* list of available capture formats */
+
+ /* buffers */
+ GstV4l2BufferPool *pool;
- /* buffer properties */
- struct v4l2_buffer bufsettings;
struct v4l2_requestbuffers breq;
struct v4l2_format format;
- /* num of queued frames and some GThread stuff
- * to wait if there's not enough */
- gint8 *frame_queue_state;
- GMutex *mutex_queue_state;
- GCond *cond_queue_state;
- gint num_queued;
- gint queue_frame;
-
/* True if we want to stop */
gboolean quit;
/* A/V sync... frame counter and internal cache */
gulong handled;
- gint last_frame;
gint need_writes;
+ GstBuffer *cached_buffer;
gulong last_seq;
/* clock */
@@ -75,9 +89,6 @@ struct _GstV4l2Src {
/* time to substract from clock time to get back to timestamp */
GstClockTime substract_time;
- /* how often are we going to use each frame? */
- gint *use_num_times;
-
/* how are we going to push buffers? */
gboolean use_fixed_fps;
};
diff --git a/sys/v4l2/gstv4l2tuner.c b/sys/v4l2/gstv4l2tuner.c
index 9cde6daa..f4f2cd64 100644
--- a/sys/v4l2/gstv4l2tuner.c
+++ b/sys/v4l2/gstv4l2tuner.c
@@ -40,14 +40,14 @@ static const GList *
gst_v4l2_tuner_list_channels (GstTuner *mixer);
static void gst_v4l2_tuner_set_channel (GstTuner *mixer,
GstTunerChannel *channel);
-static const GstTunerChannel *
+static GstTunerChannel *
gst_v4l2_tuner_get_channel (GstTuner *mixer);
static const GList *
gst_v4l2_tuner_list_norms (GstTuner *mixer);
static void gst_v4l2_tuner_set_norm (GstTuner *mixer,
GstTunerNorm *norm);
-static const GstTunerNorm *
+static GstTunerNorm *
gst_v4l2_tuner_get_norm (GstTuner *mixer);
static void gst_v4l2_tuner_set_frequency (GstTuner *mixer,
@@ -205,13 +205,15 @@ gst_v4l2_tuner_set_channel (GstTuner *mixer,
g_return_if_fail (gst_v4l2_tuner_contains_channel (v4l2element, v4l2channel));
/* ... or output, if we're a sink... */
- if (gst_v4l2_tuner_is_sink (v4l2element))
- gst_v4l2_set_output (v4l2element, v4l2channel->index);
- else
- gst_v4l2_set_input (v4l2element, v4l2channel->index);
+ if (gst_v4l2_tuner_is_sink (v4l2element) ?
+ gst_v4l2_set_output (v4l2element, v4l2channel->index) :
+ gst_v4l2_set_input (v4l2element, v4l2channel->index)) {
+ gst_tuner_channel_changed (mixer, channel);
+ g_object_notify (G_OBJECT (v4l2element), "channel");
+ }
}
-static const GstTunerChannel *
+static GstTunerChannel *
gst_v4l2_tuner_get_channel (GstTuner *mixer)
{
GstV4l2Element *v4l2element = GST_V4L2ELEMENT (mixer);
@@ -229,7 +231,7 @@ gst_v4l2_tuner_get_channel (GstTuner *mixer)
for (item = v4l2element->channels; item != NULL; item = item->next) {
if (channel == GST_V4L2_TUNER_CHANNEL (item->data)->index)
- return (const GstTunerChannel *) item->data;
+ return (GstTunerChannel *) item->data;
}
return NULL;
@@ -265,10 +267,13 @@ gst_v4l2_tuner_set_norm (GstTuner *mixer,
g_return_if_fail (GST_V4L2_IS_OPEN (v4l2element));
g_return_if_fail (gst_v4l2_tuner_contains_norm (v4l2element, v4l2norm));
- gst_v4l2_set_norm (v4l2element, v4l2norm->index);
+ if (gst_v4l2_set_norm (v4l2element, v4l2norm->index)) {
+ gst_tuner_norm_changed (mixer, norm);
+ g_object_notify (G_OBJECT (v4l2element), "norm");
+ }
}
-static const GstTunerNorm *
+static GstTunerNorm *
gst_v4l2_tuner_get_norm (GstTuner *mixer)
{
GstV4l2Element *v4l2element = GST_V4L2ELEMENT (mixer);
@@ -282,7 +287,7 @@ gst_v4l2_tuner_get_norm (GstTuner *mixer)
for (item = v4l2element->norms; item != NULL; item = item->next) {
if (norm == GST_V4L2_TUNER_NORM (item->data)->index)
- return (const GstTunerNorm *) item->data;
+ return (GstTunerNorm *) item->data;
}
return NULL;
@@ -306,7 +311,10 @@ gst_v4l2_tuner_set_frequency (GstTuner *mixer,
gst_v4l2_get_input (v4l2element, &chan);
if (chan == GST_V4L2_TUNER_CHANNEL (channel)->index &&
GST_TUNER_CHANNEL_HAS_FLAG (channel, GST_TUNER_CHANNEL_FREQUENCY)) {
- gst_v4l2_set_frequency (v4l2element, v4l2channel->tuner, frequency);
+ if (gst_v4l2_set_frequency (v4l2element, v4l2channel->tuner, frequency)) {
+ gst_tuner_frequency_changed (mixer, channel, frequency);
+ g_object_notify (G_OBJECT (v4l2element), "frequency");
+ }
}
}
diff --git a/sys/v4l2/v4l2_calls.c b/sys/v4l2/v4l2_calls.c
index ee6e00dd..8e53dae5 100644
--- a/sys/v4l2/v4l2_calls.c
+++ b/sys/v4l2/v4l2_calls.c
@@ -342,6 +342,49 @@ gst_v4l2_empty_lists (GstV4l2Element *v4l2element)
v4l2element->colors = NULL;
}
+/* FIXME: move this stuff to gstv4l2tuner.c? */
+
+static void
+gst_v4l2_set_defaults (GstV4l2Element *v4l2element)
+{
+ GstTunerNorm *norm = NULL;
+ GstTunerChannel *channel = NULL;
+ GstTuner *tuner = GST_TUNER (v4l2element);
+
+ if (v4l2element->norm)
+ norm = gst_tuner_find_norm_by_name (tuner, v4l2element->norm);
+ if (norm) {
+ gst_tuner_set_norm (tuner, norm);
+ } else {
+ norm = GST_TUNER_NORM (gst_tuner_get_norm (GST_TUNER (v4l2element)));
+ v4l2element->norm = g_strdup (norm->label);
+ gst_tuner_norm_changed (tuner, norm);
+ g_object_notify (G_OBJECT (v4l2element), "norm");
+ }
+
+ if (v4l2element->channel)
+ channel = gst_tuner_find_channel_by_name (tuner, v4l2element->channel);
+ if (channel) {
+ gst_tuner_set_channel (tuner, channel);
+ } else {
+ channel = GST_TUNER_CHANNEL (gst_tuner_get_channel (GST_TUNER (v4l2element)));
+ v4l2element->channel = g_strdup (channel->label);
+ gst_tuner_channel_changed (tuner, channel);
+ g_object_notify (G_OBJECT (v4l2element), "channel");
+ }
+ if (v4l2element->frequency != 0) {
+ gst_tuner_set_frequency (tuner, channel, v4l2element->frequency);
+ } else {
+ v4l2element->frequency = gst_tuner_get_frequency (tuner, channel);
+ if (v4l2element->frequency == 0) {
+ /* guess */
+ gst_tuner_set_frequency (tuner, channel, 1000);
+ } else {
+ g_object_notify (G_OBJECT (v4l2element), "frequency");
+ }
+ }
+}
+
/******************************************************
* gst_v4l2_open():
@@ -387,7 +430,10 @@ gst_v4l2_open (GstV4l2Element *v4l2element)
if (!gst_v4l2_fill_lists(v4l2element))
goto error;
- gst_info("Opened device '%s' (%s) successfully\n",
+ /* set defaults */
+ gst_v4l2_set_defaults (v4l2element);
+
+ GST_INFO_OBJECT (v4l2element, "Opened device '%s' (%s) successfully\n",
v4l2element->vcap.card, v4l2element->device);
return TRUE;
diff --git a/sys/v4l2/v4l2src_calls.c b/sys/v4l2/v4l2src_calls.c
index 576b0c0b..3a55a231 100644
--- a/sys/v4l2/v4l2src_calls.c
+++ b/sys/v4l2/v4l2src_calls.c
@@ -31,26 +31,24 @@
#include <errno.h>
#include "v4l2src_calls.h"
#include <sys/time.h>
+#include <unistd.h>
+
+#define GST_CAT_DEFAULT v4l2src_debug
+
+/* lalala... */
+#define GST_V4L2_SET_ACTIVE(element) (element)->buffer = GINT_TO_POINTER (-1)
+#define GST_V4L2_SET_INACTIVE(element) (element)->buffer = NULL
#define DEBUG(format, args...) \
- GST_DEBUG_OBJECT (\
- GST_ELEMENT(v4l2src), \
+ GST_CAT_DEBUG_OBJECT (\
+ v4l2src_debug, v4l2src, \
"V4L2SRC: " format, ##args)
-#define MIN_BUFFERS_QUEUED 2
-
/* On some systems MAP_FAILED seems to be missing */
#ifndef MAP_FAILED
#define MAP_FAILED ( (caddr_t) -1 )
#endif
-enum {
- QUEUE_STATE_ERROR = -1,
- QUEUE_STATE_READY_FOR_QUEUE,
- QUEUE_STATE_QUEUED,
- QUEUE_STATE_SYNCED,
-};
-
/******************************************************
* gst_v4l2src_fill_format_list():
* create list of supported capture formats
@@ -60,54 +58,49 @@ enum {
gboolean
gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src)
{
- gint n;
-
- DEBUG("getting src format enumerations");
-
- /* format enumeration */
- for (n=0;;n++) {
- struct v4l2_fmtdesc format, *fmtptr;
- format.index = n;
- format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_ENUM_FMT, &format) < 0) {
- if (errno == EINVAL)
- break; /* end of enumeration */
- else {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Failed to get no. %d in pixelformat enumeration for %s: %s",
- n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
- }
- fmtptr = g_malloc(sizeof(format));
- memcpy(fmtptr, &format, sizeof(format));
- v4l2src->formats = g_list_append(v4l2src->formats, fmtptr);
-
- v4l2src->format_list = g_list_append(v4l2src->format_list, fmtptr->description);
- }
-
- return TRUE;
+ gint n;
+ struct v4l2_fmtdesc *format;
+
+ GST_DEBUG_OBJECT (v4l2src, "getting src format enumerations");
+
+ /* format enumeration */
+ for (n=0;;n++) {
+ format = g_new (struct v4l2_fmtdesc, 1);
+ format->index = n;
+ format->type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_ENUM_FMT, format) < 0) {
+ if (errno == EINVAL) {
+ break; /* end of enumeration */
+ } else {
+ gst_element_error(GST_ELEMENT(v4l2src),
+ "Failed to get no. %d in pixelformat enumeration for %s: %s",
+ n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ g_free (format);
+ return FALSE;
+ }
+ }
+ GST_LOG_OBJECT (v4l2src, "got format"GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+ v4l2src->formats = g_slist_prepend (v4l2src->formats, format);
+ }
+
+ return TRUE;
}
/******************************************************
- * gst_v4l2src_empty_format_list():
+ * gst_v4l2src_clear_format_list():
* free list of supported capture formats
* return value: TRUE on success, FALSE on error
******************************************************/
gboolean
-gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src)
+gst_v4l2src_clear_format_list (GstV4l2Src *v4l2src)
{
- while (g_list_length(v4l2src->formats) > 0) {
- gpointer data = g_list_nth_data(v4l2src->formats, 0);
- v4l2src->formats = g_list_remove(v4l2src->formats, data);
- g_free(data);
- }
- g_list_free(v4l2src->format_list);
- v4l2src->format_list = NULL;
+ g_slist_foreach (v4l2src->formats, (GFunc) g_free, NULL);
+ g_slist_free (v4l2src->formats);
- return TRUE;
+ return TRUE;
}
@@ -117,65 +110,47 @@ gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src)
* return value: TRUE on success, FALSE on error
******************************************************/
-static gboolean
+gboolean
gst_v4l2src_queue_frame (GstV4l2Src *v4l2src,
- gint num)
+ guint i)
{
- DEBUG("queueing frame %d", num);
+ GST_LOG_OBJECT (v4l2src, "queueing frame %u", i);
- if (v4l2src->frame_queue_state[num] != QUEUE_STATE_READY_FOR_QUEUE) {
- return FALSE;
- }
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_QBUF, &v4l2src->pool->buffers[i].buffer) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error queueing buffer %u on device %s: %s",
+ i, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
- v4l2src->bufsettings.index = num;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_QBUF, &v4l2src->bufsettings) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error queueing buffer %d on device %s: %s",
- num, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- v4l2src->frame_queue_state[num] = QUEUE_STATE_QUEUED;
- v4l2src->num_queued++;
-
- return TRUE;
+ return TRUE;
}
/******************************************************
- * gst_v4l2src_sync_next_frame():
- * sync on a frame for capturing
+ * gst_v4l2src_grab_frame ():
+ * grab a frame for capturing
* return value: TRUE on success, FALSE on error
******************************************************/
-static gboolean
-gst_v4l2src_sync_next_frame (GstV4l2Src *v4l2src,
- gint *num)
+gint
+gst_v4l2src_grab_frame (GstV4l2Src *v4l2src)
{
- if (v4l2src->num_queued <= 0) {
- return FALSE;
- }
-
- while (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_DQBUF, &v4l2src->bufsettings) < 0) {
- /* if the sync() got interrupted, we can retry */
- if (errno != EINTR) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error syncing on a buffer on device %s: %s",
- GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
- DEBUG("Sync got interrupted");
- }
-
- DEBUG("synced on frame %d", v4l2src->bufsettings.index);
- *num = v4l2src->bufsettings.index;
-
- v4l2src->frame_queue_state[*num] = QUEUE_STATE_SYNCED;
- v4l2src->num_queued--;
-
- return TRUE;
+ struct v4l2_buffer buffer;
+
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ while (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_DQBUF, &buffer) < 0) {
+ /* if the sync() got interrupted, we can retry */
+ if (errno != EINTR) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error syncing on a buffer on device %s: %s",
+ GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return -1;
+ }
+ GST_DEBUG_OBJECT (v4l2src, "grab got interrupted");
+ }
+
+ GST_LOG_OBJECT (v4l2src, "grabbed frame %d", buffer.index);
+
+ return buffer.index;
}
@@ -226,6 +201,7 @@ gst_v4l2src_set_capture (GstV4l2Src *v4l2src,
v4l2src->format.fmt.pix.width = width;
v4l2src->format.fmt.pix.height = height;
v4l2src->format.fmt.pix.pixelformat = fmt->pixelformat;
+ v4l2src->format.fmt.pix.field = V4L2_FIELD_INTERLACED;
v4l2src->format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_S_FMT, &v4l2src->format) < 0) {
@@ -250,88 +226,81 @@ gst_v4l2src_set_capture (GstV4l2Src *v4l2src,
gboolean
gst_v4l2src_capture_init (GstV4l2Src *v4l2src)
{
- gint n;
- gchar *desc = NULL;
- struct v4l2_buffer buf;
-
- DEBUG("initting the capture system");
-
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_NOT_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- /* request buffer info */
- if (v4l2src->breq.count < MIN_BUFFERS_QUEUED) {
- v4l2src->breq.count = MIN_BUFFERS_QUEUED;
- }
- v4l2src->breq.type = v4l2src->format.type;
- v4l2src->breq.memory = V4L2_MEMORY_MMAP;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_REQBUFS, &v4l2src->breq) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error requesting buffers (%d) for %s: %s",
- v4l2src->breq.count, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- if (v4l2src->breq.count < MIN_BUFFERS_QUEUED) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Too little buffers. We got %d, we want at least %d",
- v4l2src->breq.count, MIN_BUFFERS_QUEUED);
- return FALSE;
- }
- v4l2src->bufsettings.type = v4l2src->format.type;
-
- for (n=0;n<g_list_length(v4l2src->formats);n++) {
- struct v4l2_fmtdesc *fmt = (struct v4l2_fmtdesc *) g_list_nth_data(v4l2src->formats, n);
- if (v4l2src->format.fmt.pix.pixelformat == fmt->pixelformat) {
- desc = fmt->description;
- break;
- }
- }
- gst_info("Got %d buffers (%s) of size %d KB\n",
- v4l2src->breq.count, desc, v4l2src->format.fmt.pix.sizeimage/1024);
-
- /* keep track of queued buffers */
- v4l2src->frame_queue_state = (gint8 *)
- g_malloc(sizeof(gint8) * v4l2src->breq.count);
-
- /* track how often to use each frame */
- v4l2src->use_num_times = (gint *)
- g_malloc(sizeof(gint) * v4l2src->breq.count);
-
- /* lock for the frame_state */
- v4l2src->mutex_queue_state = g_mutex_new();
- v4l2src->cond_queue_state = g_cond_new();
-
- /* Map the buffers */
- GST_V4L2ELEMENT(v4l2src)->buffer = (guint8 **)
- g_malloc(sizeof(guint8 *) * v4l2src->breq.count);
- for (n=0;n<v4l2src->breq.count;n++) {
- buf.index = n;
- buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
- VIDIOC_QUERYBUF, &buf) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Failed to get buffer (%d) properties: %s",
- n, g_strerror(errno));
- gst_v4l2src_capture_deinit(v4l2src);
- return FALSE;
- }
- GST_V4L2ELEMENT(v4l2src)->buffer[n] = mmap(0,
- buf.length, PROT_READ|PROT_WRITE, MAP_SHARED,
- GST_V4L2ELEMENT(v4l2src)->video_fd, buf.m.offset);
- if (GST_V4L2ELEMENT(v4l2src)->buffer[n] == MAP_FAILED) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error mapping video buffer (%d) on device %s: %s",
- n, GST_V4L2ELEMENT(v4l2src)->device,
- g_strerror(errno));
- GST_V4L2ELEMENT(v4l2src)->buffer[n] = NULL;
- gst_v4l2src_capture_deinit(v4l2src);
- return FALSE;
- }
- }
-
- return TRUE;
+ gint n;
+ guint buffers;
+
+ GST_DEBUG_OBJECT (v4l2src, "initting the capture system");
+
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ GST_V4L2_CHECK_NOT_ACTIVE(GST_V4L2ELEMENT(v4l2src));
+
+ /* request buffer info */
+ buffers = v4l2src->breq.count;
+ if (v4l2src->breq.count > GST_V4L2_MAX_BUFFERS) {
+ v4l2src->breq.count = GST_V4L2_MAX_BUFFERS;
+ }
+ if (v4l2src->breq.count < GST_V4L2_MIN_BUFFERS) {
+ v4l2src->breq.count = GST_V4L2_MIN_BUFFERS;
+ }
+ v4l2src->breq.type = v4l2src->format.type;
+ v4l2src->breq.memory = V4L2_MEMORY_MMAP;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_REQBUFS, &v4l2src->breq) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error requesting buffers (%d) for %s: %s",
+ v4l2src->breq.count, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
+
+ if (v4l2src->breq.count < GST_V4L2_MIN_BUFFERS) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Too little buffers. We got %d, we want at least %d",
+ v4l2src->breq.count, GST_V4L2_MIN_BUFFERS);
+ v4l2src->breq.count = buffers;
+ return FALSE;
+ }
+ if (v4l2src->breq.count != buffers)
+ g_object_notify (G_OBJECT (v4l2src), "num_buffers");
+
+ GST_INFO_OBJECT (v4l2src, "Got %d buffers ("GST_FOURCC_FORMAT") of size %d KB\n",
+ v4l2src->breq.count, GST_FOURCC_ARGS (v4l2src->format.fmt.pix.pixelformat),
+ v4l2src->format.fmt.pix.sizeimage / 1024);
+
+ /* Map the buffers */
+ v4l2src->pool = g_new (GstV4l2BufferPool, 1);
+ gst_atomic_int_init (&v4l2src->pool->refcount, 1);
+ v4l2src->pool->video_fd = GST_V4L2ELEMENT (v4l2src)->video_fd;
+ v4l2src->pool->buffer_count = v4l2src->breq.count;
+ v4l2src->pool->buffers = g_new0 (GstV4l2Buffer, v4l2src->breq.count);
+
+ for (n = 0; n < v4l2src->breq.count; n++) {
+ GstV4l2Buffer *buffer = &v4l2src->pool->buffers[n];
+
+ gst_atomic_int_init (&buffer->refcount, 1);
+ buffer->pool = v4l2src->pool;
+ buffer->buffer.index = n;
+ buffer->buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_QUERYBUF, &buffer->buffer) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Failed to get buffer (%d) properties: %s",
+ n, g_strerror(errno));
+ gst_v4l2src_capture_deinit(v4l2src);
+ return FALSE;
+ }
+ buffer->start = mmap (0, buffer->buffer.length, PROT_READ|PROT_WRITE, MAP_SHARED,
+ GST_V4L2ELEMENT(v4l2src)->video_fd, buffer->buffer.m.offset);
+ if (buffer->start == MAP_FAILED) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error mapping video buffer (%d) on device %s: %s",
+ n, GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ buffer->start = 0;
+ gst_v4l2src_capture_deinit (v4l2src);
+ return FALSE;
+ }
+ buffer->length = buffer->buffer.length;
+ if (!gst_v4l2src_queue_frame(v4l2src, n)) {
+ gst_v4l2src_capture_deinit (v4l2src);
+ return FALSE;
+ }
+ }
+
+ GST_V4L2_SET_ACTIVE(GST_V4L2ELEMENT (v4l2src));
+ return TRUE;
}
@@ -344,147 +313,25 @@ gst_v4l2src_capture_init (GstV4l2Src *v4l2src)
gboolean
gst_v4l2src_capture_start (GstV4l2Src *v4l2src)
{
- gint n;
-
- DEBUG("starting the capturing");
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- v4l2src->quit = FALSE;
- v4l2src->num_queued = 0;
- v4l2src->queue_frame = 0;
-
- /* set all buffers ready to queue , this starts streaming capture */
- for (n=0;n<v4l2src->breq.count;n++) {
- v4l2src->frame_queue_state[n] = QUEUE_STATE_READY_FOR_QUEUE;
- if (!gst_v4l2src_queue_frame(v4l2src, n)) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- gst_v4l2src_capture_stop(v4l2src);
- return FALSE;
- }
- }
-
- n = 1;
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMON, &n) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error starting streaming capture for %s: %s",
- GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- g_mutex_unlock(v4l2src->mutex_queue_state);
-
- return TRUE;
-}
-
-
-/******************************************************
- * gst_v4l2src_grab_frame():
- * capture one frame during streaming capture
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-
-gboolean
-gst_v4l2src_grab_frame (GstV4l2Src *v4l2src,
- gint *num)
-{
- DEBUG("syncing on the next frame");
+ gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- /* do we have enough frames? */
- while (v4l2src->num_queued < MIN_BUFFERS_QUEUED ||
- v4l2src->frame_queue_state[v4l2src->queue_frame] ==
- QUEUE_STATE_READY_FOR_QUEUE) {
- while (v4l2src->frame_queue_state[v4l2src->queue_frame] !=
- QUEUE_STATE_READY_FOR_QUEUE &&
- !v4l2src->quit) {
- GST_DEBUG (
- "Waiting for frames to become available (%d < %d)",
- v4l2src->num_queued, MIN_BUFFERS_QUEUED);
- g_cond_wait(v4l2src->cond_queue_state,
- v4l2src->mutex_queue_state);
- }
- if (v4l2src->quit) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- return TRUE; /* it won't get through anyway */
- }
- if (!gst_v4l2src_queue_frame(v4l2src, v4l2src->queue_frame)) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- return FALSE;
- }
- v4l2src->queue_frame = (v4l2src->queue_frame + 1) % v4l2src->breq.count;
- }
+ GST_DEBUG_OBJECT (v4l2src, "starting the capturing");
+
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ if (!GST_V4L2_IS_ACTIVE (GST_V4L2ELEMENT(v4l2src))) {
+ gst_pad_renegotiate (v4l2src->srcpad);
+ }
+ GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
- /* syncing on the buffer grabs it */
- if (!gst_v4l2src_sync_next_frame(v4l2src, num)) {
- g_mutex_unlock(v4l2src->mutex_queue_state);
- return FALSE;
- }
+ v4l2src->quit = FALSE;
- g_mutex_unlock(v4l2src->mutex_queue_state);
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMON, &type) < 0) {
+ gst_element_error(GST_ELEMENT(v4l2src), "Error starting streaming capture for %s: %s",
+ GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
- return TRUE;
-}
-
-
-/******************************************************
- *
- ******************************************************/
-
-guint8 *
-gst_v4l2src_get_buffer (GstV4l2Src *v4l2src,
- gint num)
-{
- if (!GST_V4L2_IS_ACTIVE(GST_V4L2ELEMENT(v4l2src)) ||
- !GST_V4L2_IS_OPEN(GST_V4L2ELEMENT(v4l2src)))
- return NULL;
-
- if (num < 0 || num >= v4l2src->breq.count)
- return NULL;
-
- return GST_V4L2ELEMENT(v4l2src)->buffer[num];
-}
-
-
-/******************************************************
- * gst_v4l2src_requeue_frame():
- * re-queue a frame after we're done with the buffer
- * return value: TRUE on success, FALSE on error
- ******************************************************/
-
-gboolean
-gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src,
- gint num)
-{
- DEBUG("requeueing frame %d", num);
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- /* mark frame as 'ready to requeue' */
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- if (v4l2src->frame_queue_state[num] != QUEUE_STATE_SYNCED) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Invalid state %d (expected %d), can't requeue",
- v4l2src->frame_queue_state[num],
- QUEUE_STATE_SYNCED);
- return FALSE;
- }
-
- v4l2src->frame_queue_state[num] = QUEUE_STATE_READY_FOR_QUEUE;
-
- /* let an optional wait know */
- g_cond_broadcast(v4l2src->cond_queue_state);
-
- g_mutex_unlock(v4l2src->mutex_queue_state);
-
- return TRUE;
+ return TRUE;
}
@@ -497,37 +344,60 @@ gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src,
gboolean
gst_v4l2src_capture_stop (GstV4l2Src *v4l2src)
{
- gint n = 0;
-
- DEBUG("stopping capturing");
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- g_mutex_lock(v4l2src->mutex_queue_state);
-
- /* we actually need to sync on all queued buffers but not
- * on the non-queued ones */
- if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMOFF, &n) < 0) {
- gst_element_error(GST_ELEMENT(v4l2src),
- "Error stopping streaming capture for %s: %s",
- GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
- return FALSE;
- }
-
- /* make an optional pending wait stop */
- v4l2src->quit = TRUE;
- g_cond_broadcast(v4l2src->cond_queue_state);
+ gint type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+
+ GST_DEBUG_OBJECT (v4l2src, "stopping capturing");
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
+
+ /* we actually need to sync on all queued buffers but not
+ * on the non-queued ones */
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_STREAMOFF, &type) < 0) {
+ gst_element_error (GST_ELEMENT(v4l2src), "Error stopping streaming capture for %s: %s",
+ GST_V4L2ELEMENT(v4l2src)->device, g_strerror(errno));
+ return FALSE;
+ }
+
+ /* make an optional pending wait stop */
+ v4l2src->quit = TRUE;
- /* sync on remaining frames */
- while (v4l2src->num_queued > 0) {
- gst_v4l2src_sync_next_frame(v4l2src, &n);
- }
-
- g_mutex_unlock(v4l2src->mutex_queue_state);
+ return TRUE;
+}
- return TRUE;
+static void
+gst_v4l2src_buffer_pool_free (GstV4l2BufferPool *pool, gboolean do_close)
+{
+ guint i;
+
+ for (i = 0; i < pool->buffer_count; i++) {
+ gst_atomic_int_destroy (&pool->buffers[i].refcount);
+ munmap (pool->buffers[i].start, pool->buffers[i].length);
+ }
+ g_free (pool->buffers);
+ gst_atomic_int_destroy (&pool->refcount);
+ if (do_close)
+ close (pool->video_fd);
+ g_free (pool);
}
+void
+gst_v4l2src_free_buffer (GstBuffer *buffer)
+{
+ GstV4l2Buffer *buf = (GstV4l2Buffer *) GST_BUFFER_PRIVATE (buffer);
+
+ GST_LOG ("freeing buffer %p (nr. %d)", buffer, buf->buffer.index);
+
+ if (!gst_atomic_int_dec_and_test (&buf->refcount)) {
+ /* we're still in use, add to queue again
+ note: this might fail because the device is already stopped (race) */
+ if (ioctl(buf->pool->video_fd, VIDIOC_QBUF, &buf->buffer) < 0)
+ GST_INFO ("readding to queue failed, assuming video device is stopped");
+ }
+ if (gst_atomic_int_dec_and_test (&buf->pool->refcount)) {
+ /* we're last thing that used all this */
+ gst_v4l2src_buffer_pool_free (buf->pool, TRUE);
+ }
+}
/******************************************************
* gst_v4l2src_capture_deinit():
@@ -538,31 +408,32 @@ gst_v4l2src_capture_stop (GstV4l2Src *v4l2src)
gboolean
gst_v4l2src_capture_deinit (GstV4l2Src *v4l2src)
{
- int n;
-
- DEBUG("deinitting capture system");
- GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
- GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
-
- /* unmap the buffer */
- for (n=0;n<v4l2src->breq.count;n++) {
- if (!GST_V4L2ELEMENT(v4l2src)->buffer[n]) {
- break;
- }
- munmap(GST_V4L2ELEMENT(v4l2src)->buffer[n],
- v4l2src->format.fmt.pix.sizeimage);
- GST_V4L2ELEMENT(v4l2src)->buffer[n] = NULL;
- }
-
- /* free buffer tracker */
- g_free(GST_V4L2ELEMENT(v4l2src)->buffer);
- GST_V4L2ELEMENT(v4l2src)->buffer = NULL;
- g_mutex_free(v4l2src->mutex_queue_state);
- g_cond_free(v4l2src->cond_queue_state);
- g_free(v4l2src->frame_queue_state);
- g_free(v4l2src->use_num_times);
-
- return TRUE;
+ gint i, dequeue = 0;
+
+ GST_DEBUG_OBJECT (v4l2src, "deinitting capture system");
+
+ GST_V4L2_CHECK_OPEN(GST_V4L2ELEMENT(v4l2src));
+ GST_V4L2_CHECK_ACTIVE(GST_V4L2ELEMENT(v4l2src));
+
+ /* free the buffers */
+ for (i = 0; i < v4l2src->breq.count; i++) {
+ if (gst_atomic_int_dec_and_test (&v4l2src->pool->buffers[i].refcount))
+ dequeue++;
+ }
+ for (i = 0; i < dequeue; i++) {
+ struct v4l2_buffer buffer;
+ buffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
+ if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd, VIDIOC_DQBUF, &buffer) < 0)
+ GST_WARNING_OBJECT (v4l2src, "Could not dequeue buffer on uninitialization");
+ }
+ if (gst_atomic_int_dec_and_test (&v4l2src->pool->refcount)) {
+ /* we're last thing that used all this */
+ gst_v4l2src_buffer_pool_free (v4l2src->pool, FALSE);
+ }
+ v4l2src->pool = NULL;
+
+ GST_V4L2_SET_INACTIVE (GST_V4L2ELEMENT (v4l2src));
+ return TRUE;
}
@@ -578,13 +449,16 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
{
struct v4l2_format fmt;
+ GST_LOG_OBJECT (v4l2src, "getting size limits with format " GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (format->pixelformat));
+
/* get size delimiters */
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = 0;
fmt.fmt.pix.height = 0;
fmt.fmt.pix.pixelformat = format->pixelformat;
- fmt.fmt.pix.field = V4L2_FIELD_ANY;
+ fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_TRY_FMT, &fmt) < 0) {
return FALSE;
@@ -594,9 +468,10 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
*min_w = fmt.fmt.pix.width;
if (min_h)
*min_h = fmt.fmt.pix.height;
+ GST_LOG_OBJECT (v4l2src, "got min size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
fmt.fmt.pix.width = G_MAXINT;
- fmt.fmt.pix.height = G_MAXINT;
+ fmt.fmt.pix.height = 576;
if (ioctl(GST_V4L2ELEMENT(v4l2src)->video_fd,
VIDIOC_TRY_FMT, &fmt) < 0) {
return FALSE;
@@ -606,6 +481,7 @@ gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
*max_w = fmt.fmt.pix.width;
if (max_h)
*max_h = fmt.fmt.pix.height;
+ GST_LOG_OBJECT (v4l2src, "got max size %dx%d", fmt.fmt.pix.width, fmt.fmt.pix.height);
return TRUE;
}
diff --git a/sys/v4l2/v4l2src_calls.h b/sys/v4l2/v4l2src_calls.h
index 4332312d..dc5fca0c 100644
--- a/sys/v4l2/v4l2src_calls.h
+++ b/sys/v4l2/v4l2src_calls.h
@@ -17,8 +17,8 @@
* Boston, MA 02111-1307, USA.
*/
-#ifndef __V4L2_SRC_CALLS_H__
-#define __V4L2_SRC_CALLS_H__
+#ifndef __V4L2SRC_CALLS_H__
+#define __V4L2SRC_CALLS_H__
#include "gstv4l2src.h"
#include "v4l2_calls.h"
@@ -31,17 +31,16 @@ gboolean gst_v4l2src_set_capture (GstV4l2Src *v4l2src,
gint height);
gboolean gst_v4l2src_capture_init (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_capture_start (GstV4l2Src *v4l2src);
-gboolean gst_v4l2src_grab_frame (GstV4l2Src *v4l2src,
- gint *num);
+gint gst_v4l2src_grab_frame (GstV4l2Src *v4l2src);
guint8 * gst_v4l2src_get_buffer (GstV4l2Src *v4l2src,
gint num);
-gboolean gst_v4l2src_requeue_frame (GstV4l2Src *v4l2src,
- gint num);
+gboolean gst_v4l2src_queue_frame (GstV4l2Src *v4l2src,
+ guint i);
gboolean gst_v4l2src_capture_stop (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_capture_deinit (GstV4l2Src *v4l2src);
gboolean gst_v4l2src_fill_format_list (GstV4l2Src *v4l2src);
-gboolean gst_v4l2src_empty_format_list (GstV4l2Src *v4l2src);
+gboolean gst_v4l2src_clear_format_list (GstV4l2Src *v4l2src);
/* hacky */
gboolean gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
@@ -49,4 +48,6 @@ gboolean gst_v4l2src_get_size_limits (GstV4l2Src *v4l2src,
gint *min_w, gint *max_w,
gint *min_h, gint *max_h);
-#endif /* __V4L2_SRC_CALLS_H__ */
+void gst_v4l2src_free_buffer (GstBuffer *buffer);
+
+#endif /* __V4L2SRC_CALLS_H__ */