summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDave Robillard <dave@drobilla.net>2009-06-22 17:04:22 -0400
committerDave Robillard <dave@drobilla.net>2009-06-22 17:04:22 -0400
commit7a7083ba1f8e38b0b51820d4a37f279c6b2c73bd (patch)
tree5fb2f44346d722fa2c707854a187cb2ab8dff1af
parenta6389f9a9220e11110778ec46b6c12abeb058d94 (diff)
parent218878de5ea48b5acdf36070a73a50fd71f41741 (diff)
downloadgst-plugins-bad-7a7083ba1f8e38b0b51820d4a37f279c6b2c73bd.tar.gz
gst-plugins-bad-7a7083ba1f8e38b0b51820d4a37f279c6b2c73bd.tar.bz2
gst-plugins-bad-7a7083ba1f8e38b0b51820d4a37f279c6b2c73bd.zip
Merge branch 'fdo' into lv2
-rw-r--r--configure.ac65
-rw-r--r--ext/ladspa/Makefile.am10
-rw-r--r--ext/ladspa/gstladspa.h2
-rw-r--r--ext/lv2/Makefile.am10
-rw-r--r--ext/lv2/gstlv2.h2
-rw-r--r--ext/metadata/metadataexif.c27
-rw-r--r--ext/metadata/metadatatags.c12
-rw-r--r--ext/metadata/metadatatags.h4
-rw-r--r--ext/metadata/metadataxmp.c64
-rw-r--r--gst/camerabin/Makefile.am4
-rw-r--r--gst/camerabin/camerabingeneral.c74
-rw-r--r--gst/camerabin/camerabingeneral.h32
-rw-r--r--gst/camerabin/camerabinimage.c24
-rw-r--r--gst/camerabin/camerabinvideo.c31
-rw-r--r--gst/camerabin/gstcamerabin.c65
-rw-r--r--gst/h264parse/gsth264parse.c10
-rw-r--r--gst/mpegvideoparse/mpegvideoparse.c2
-rw-r--r--sys/Makefile.am10
-rw-r--r--sys/vdpau/Makefile.am27
-rw-r--r--sys/vdpau/gstvdp.c29
-rw-r--r--sys/vdpau/gstvdpdevice.c266
-rw-r--r--sys/vdpau/gstvdpdevice.h132
-rw-r--r--sys/vdpau/gstvdpmpegdec.c1156
-rw-r--r--sys/vdpau/gstvdpmpegdec.h105
-rw-r--r--sys/vdpau/gstvdpvideobuffer.c138
-rw-r--r--sys/vdpau/gstvdpvideobuffer.h59
-rw-r--r--sys/vdpau/gstvdpvideoyuv.c462
-rw-r--r--sys/vdpau/gstvdpvideoyuv.h60
-rw-r--r--sys/vdpau/gstvdpyuvvideo.c476
-rw-r--r--sys/vdpau/gstvdpyuvvideo.h62
-rw-r--r--sys/vdpau/mpegutil.c430
-rw-r--r--sys/vdpau/mpegutil.h150
-rw-r--r--tests/check/elements/camerabin.c23
-rw-r--r--tests/examples/Makefile.am6
-rw-r--r--tests/examples/camerabin/.gitignore3
-rw-r--r--tests/examples/camerabin/Makefile.am39
-rw-r--r--tests/examples/camerabin/gst-camera-perf.c726
-rw-r--r--tests/examples/camerabin/gst-camera-perf.glade120
-rw-r--r--tests/examples/camerabin/gst-camera.c1756
-rw-r--r--tests/examples/camerabin/gst-camera.glade397
40 files changed, 6875 insertions, 195 deletions
diff --git a/configure.ac b/configure.ac
index 7cc268ad..ac22aef5 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1430,6 +1430,44 @@ AG_GST_CHECK_FEATURE(ACM, [Windows ACM library], acm, [
], [HAVE_ACM="no"])
])
+dnl *** vdpau ***
+translit(dnm, m, l) AM_CONDITIONAL(USE_VDPAU, true)
+AG_GST_CHECK_FEATURE(VDPAU, [VDPAU], vdpau, [
+ VDPAU_CFLAGS=
+ VDPAU_LIBS=-lvdpau
+ HAVE_VDPAU=no
+
+ saved_CPPFLAGS="$CPPFLAGS"
+ AC_CHECK_HEADER([vdpau/vdpau.h], [HAVE_VDPAU_H=yes])
+ if test -z "$HAVE_VDPAU_H"; then
+ dnl Didn't find VDPAU header straight away.
+ dnl Try /usr/include/nvidia. Need to clear caching vars first
+ AC_MSG_NOTICE([VDPAU header not in standard path. Checking /usr/include/nvidia])
+ unset ac_cv_header_vdpau_vdpau_h
+ unset ac_cv_header_vdpau_vdpau_x11_h
+ VDPAU_CFLAGS="-I/usr/include/nvidia"
+ VDPAU_LIBS="-L/usr/lib/nvidia -lvdpau"
+ CPPFLAGS="$VDPAU_CFLAGS $saved_CPPFLAGS"
+ AC_CHECK_HEADER([vdpau/vdpau.h], [HAVE_VDPAU_H=yes])
+ fi
+ AC_CHECK_HEADER([vdpau/vdpau_x11.h], [HAVE_VDPAU_X11_H=yes])
+ CPPFLAGS="$saved_CPPFLAGS"
+
+ if test "x$HAVE_VDPAU_H" = "xyes" -a "x$HAVE_VDPAU_X11_H" = "xyes"; then
+ dnl Found the headers - look for the lib
+ AC_MSG_NOTICE([VDPAU headers found. Checking libraries])
+ saved_LIBS="$LIBS"
+ LIBS="$VDPAU_LIBS $saved_LIBS"
+ AC_CHECK_LIB(vdpau,vdp_device_create_x11,[HAVE_VDPAU="yes"])
+ LIBS="$saved_LIBS"
+ fi
+ if test "$HAVE_VDPAU" = "yes"; then
+ AC_MSG_NOTICE([Found up to date VDPAU installation])
+ AC_SUBST(VDPAU_CFLAGS)
+ AC_SUBST(VDPAU_LIBS)
+ fi
+])
+
else
dnl not building plugins with external dependencies,
@@ -1484,6 +1522,7 @@ AM_CONDITIONAL(USE_XVID, false)
AM_CONDITIONAL(USE_WILDMIDI, false)
AM_CONDITIONAL(USE_WININET, false)
AM_CONDITIONAL(USE_ACM, false)
+AM_CONDITIONAL(USE_VDPAU, false)
fi dnl of EXT plugins
@@ -1546,8 +1585,10 @@ dnl po/Makefile.in
AC_CONFIG_FILES(
Makefile
+common/Makefile
common/shave
common/shave-libtool
+common/m4/Makefile
gst-plugins-bad.spec
gst/Makefile
gst/aacparse/Makefile
@@ -1555,8 +1596,6 @@ gst/adpcmdec/Makefile
gst/aiffparse/Makefile
gst/amrparse/Makefile
gst/autoconvert/Makefile
-gst/legacyresample/Makefile
-gst/liveadder/Makefile
gst/bayer/Makefile
gst/camerabin/Makefile
gst/cdxaparse/Makefile
@@ -1570,7 +1609,9 @@ gst/frei0r/Makefile
gst/h264parse/Makefile
gst/hdvparse/Makefile
gst/id3tag/Makefile
+gst/legacyresample/Makefile
gst/librfb/Makefile
+gst/liveadder/Makefile
gst/mpegdemux/Makefile
gst/mpegtsmux/Makefile
gst/mpegtsmux/tsmux/Makefile
@@ -1583,6 +1624,7 @@ gst/nuvdemux/Makefile
gst/pcapparse/Makefile
gst/qtmux/Makefile
gst/rawparse/Makefile
+gst/real/Makefile
gst/rtpmanager/Makefile
gst/rtpmux/Makefile
gst/scaletempo/Makefile
@@ -1597,7 +1639,6 @@ gst/tta/Makefile
gst/valve/Makefile
gst/videosignal/Makefile
gst/vmnc/Makefile
-gst/real/Makefile
gst/xdgmime/Makefile
gst-libs/Makefile
gst-libs/gst/Makefile
@@ -1617,18 +1658,23 @@ sys/oss4/Makefile
sys/osxvideo/Makefile
sys/qtwrapper/Makefile
sys/vcd/Makefile
+sys/vdpau/Makefile
sys/wasapi/Makefile
sys/wininet/Makefile
sys/winks/Makefile
sys/winscreencap/Makefile
+tests/Makefile
+tests/check/Makefile
tests/examples/Makefile
+tests/examples/camerabin/Makefile
tests/examples/directfb/Makefile
tests/examples/mxf/Makefile
-tests/examples/shapewipe/Makefile
tests/examples/scaletempo/Makefile
+tests/examples/shapewipe/Makefile
tests/examples/switch/Makefile
-ext/amrwb/Makefile
+tests/icles/Makefile
ext/alsaspdif/Makefile
+ext/amrwb/Makefile
ext/assrender/Makefile
ext/apexsink/Makefile
ext/bz2/Makefile
@@ -1639,7 +1685,6 @@ ext/dirac/Makefile
ext/directfb/Makefile
ext/divx/Makefile
ext/dts/Makefile
-ext/metadata/Makefile
ext/faac/Makefile
ext/faad/Makefile
ext/gsm/Makefile
@@ -1650,7 +1695,7 @@ ext/ladspa/Makefile
ext/lv2/Makefile
ext/libmms/Makefile
ext/Makefile
-ext/nas/Makefile
+ext/metadata/Makefile
ext/modplug/Makefile
ext/mpeg2enc/Makefile
ext/mimic/Makefile
@@ -1658,6 +1703,7 @@ ext/mplex/Makefile
ext/musepack/Makefile
ext/musicbrainz/Makefile
ext/mythtv/Makefile
+ext/nas/Makefile
ext/neon/Makefile
ext/ofa/Makefile
ext/resindvd/Makefile
@@ -1674,12 +1720,7 @@ docs/Makefile
docs/plugins/Makefile
docs/plugins/figures/Makefile
docs/version.entities
-common/Makefile
-common/m4/Makefile
m4/Makefile
-tests/Makefile
-tests/check/Makefile
-tests/icles/Makefile
win32/common/config.h
)
AC_OUTPUT
diff --git a/ext/ladspa/Makefile.am b/ext/ladspa/Makefile.am
index 00582ed7..23c84f57 100644
--- a/ext/ladspa/Makefile.am
+++ b/ext/ladspa/Makefile.am
@@ -1,8 +1,14 @@
plugin_LTLIBRARIES = libgstladspa.la
libgstladspa_la_SOURCES = gstladspa.c
-libgstladspa_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CONTROLLER_CFLAGS) $(GST_CFLAGS) $(LRDF_CFLAGS)
-libgstladspa_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LIBM) $(LRDF_LIBS) ../../gst-libs/gst/signalprocessor/libgstsignalprocessor.la
+libgstladspa_la_CFLAGS = \
+ -I$(top_builddir)/gst-libs \
+ $(GST_PLUGINS_BASE_CFLAGS) \
+ $(GST_CONTROLLER_CFLAGS) $(GST_CFLAGS) $(LRDF_CFLAGS)
+libgstladspa_la_LIBADD = \
+ $(top_builddir)/gst-libs/gst/signalprocessor/libgstsignalprocessor.la \
+ $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) \
+ $(LIBM) $(LRDF_LIBS)
libgstladspa_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstladspa_la_LIBTOOLFLAGS = --tag=disable-static
diff --git a/ext/ladspa/gstladspa.h b/ext/ladspa/gstladspa.h
index f51d6233..14532318 100644
--- a/ext/ladspa/gstladspa.h
+++ b/ext/ladspa/gstladspa.h
@@ -28,7 +28,7 @@
#include <gst/gst.h>
-#include "../../gst-libs/gst/signalprocessor/gstsignalprocessor.h"
+#include <gst/signalprocessor/gstsignalprocessor.h>
G_BEGIN_DECLS
diff --git a/ext/lv2/Makefile.am b/ext/lv2/Makefile.am
index f10c7acf..18fe6481 100644
--- a/ext/lv2/Makefile.am
+++ b/ext/lv2/Makefile.am
@@ -1,8 +1,14 @@
plugin_LTLIBRARIES = libgstlv2.la
libgstlv2_la_SOURCES = gstlv2.c
-libgstlv2_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CONTROLLER_CFLAGS) $(GST_CFLAGS) $(SLV2_CFLAGS)
-libgstlv2_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) $(LIBM) $(SLV2_LIBS) ../../gst-libs/gst/signalprocessor/libgstsignalprocessor.la
+libgstlv2_la_CFLAGS = \
+ -I$(top_builddir)/gst-libs \
+ $(GST_PLUGINS_BASE_CFLAGS) \
+ $(GST_CONTROLLER_CFLAGS) $(GST_CFLAGS) $(SLV2_CFLAGS)
+libgstlv2_la_LIBADD = \
+ $(top_builddir)/gst-libs/gst/signalprocessor/libgstsignalprocessor.la \
+ $(GST_PLUGINS_BASE_LIBS) -lgstaudio-$(GST_MAJORMINOR) \
+ $(LIBM) $(SLV2_LIBS)
libgstlv2_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstlv2_la_LIBTOOLFLAGS = --tag=disable-static
diff --git a/ext/lv2/gstlv2.h b/ext/lv2/gstlv2.h
index 9fac262a..836f1a0b 100644
--- a/ext/lv2/gstlv2.h
+++ b/ext/lv2/gstlv2.h
@@ -28,7 +28,7 @@
#include <gst/gst.h>
-#include "../../gst-libs/gst/signalprocessor/gstsignalprocessor.h"
+#include <gst/signalprocessor/gstsignalprocessor.h>
G_BEGIN_DECLS
diff --git a/ext/metadata/metadataexif.c b/ext/metadata/metadataexif.c
index 58d83e2d..0769dab4 100644
--- a/ext/metadata/metadataexif.c
+++ b/ext/metadata/metadataexif.c
@@ -694,16 +694,16 @@ metadataparse_exif_content_foreach_entry_func (ExifEntry * entry,
/* DDD - degrees */
value = (gdouble) rt->numerator / (gdouble) rt->denominator;
+ GST_DEBUG ("deg: %lu / %lu", rt->numerator, rt->denominator);
rt++;
/* MM - minutes and SS - seconds */
- if (rt->numerator % rt->denominator) {
- value += (gdouble) rt->numerator / (gdouble) rt->denominator;
- } else {
- value += rt->numerator / rt->denominator;
- rt++;
- value += rt->numerator / rt->denominator;
- }
+ GST_DEBUG ("min: %lu / %lu", rt->numerator, rt->denominator);
+ value += (gdouble) rt->numerator / ((gdouble) rt->denominator * 60.0);
+ rt++;
+ GST_DEBUG ("sec: %lu / %lu", rt->numerator, rt->denominator);
+ value +=
+ (gdouble) rt->numerator / ((gdouble) rt->denominator * 3600.0);
/* apply sign */
if (entry->tag == EXIF_TAG_GPS_LATITUDE) {
@@ -1049,17 +1049,24 @@ metadatamux_exif_for_each_tag_in_list (const GstTagList * list,
const ExifTag ref_tag = entry->tag == EXIF_TAG_GPS_LATITUDE ?
EXIF_TAG_GPS_LATITUDE_REF : EXIF_TAG_GPS_LONGITUDE_REF;
+ /* DDD - degrees */
rt->numerator = (gulong) v;
rt->denominator = 1;
+ GST_DEBUG ("deg: %lf : %lu / %lu", v, rt->numerator, rt->denominator);
v -= rt->numerator;
rt++;
- rt->numerator = (gulong) (0.5 + v * 100.0);
- rt->denominator = 100;
+ /* MM - minutes */
+ rt->numerator = (gulong) (v * 60.0);
+ rt->denominator = 1;
+ GST_DEBUG ("min: %lf : %lu / %lu", v, rt->numerator, rt->denominator);
+ v -= ((gdouble) rt->numerator / 60.0);
rt++;
- rt->numerator = 0;
+ /* SS - seconds */
+ rt->numerator = (gulong) (0.5 + v * 3600.0);
rt->denominator = 1;
+ GST_DEBUG ("sec: %lf : %lu / %lu", v, rt->numerator, rt->denominator);
if (entry->tag == EXIF_TAG_GPS_LONGITUDE) {
GST_DEBUG ("longitude : %lf", value);
diff --git a/ext/metadata/metadatatags.c b/ext/metadata/metadatatags.c
index 82e6c381..4beee172 100644
--- a/ext/metadata/metadatatags.c
+++ b/ext/metadata/metadatatags.c
@@ -446,5 +446,17 @@ metadata_tags_iptc_register (void)
static void
metadata_tags_xmp_register (void)
{
+ gst_tag_register (GST_TAG_XMP_GEO_LOCATION_COUNTRY, GST_TAG_FLAG_META,
+ G_TYPE_STRING, GST_TAG_XMP_GEO_LOCATION_COUNTRY,
+ "human readable english country name of where the media has been recorded or produced",
+ NULL);
+ gst_tag_register (GST_TAG_XMP_GEO_LOCATION_CITY, GST_TAG_FLAG_META,
+ G_TYPE_STRING, GST_TAG_XMP_GEO_LOCATION_CITY,
+ "human readable english city name of where the media has been recorded or produced",
+ NULL);
+ gst_tag_register (GST_TAG_XMP_GEO_LOCATION_SUBLOCATION, GST_TAG_FLAG_META,
+ G_TYPE_STRING, GST_TAG_XMP_GEO_LOCATION_SUBLOCATION,
+ "human readable location detail of where the media has been recorded or produced",
+ NULL);
}
diff --git a/ext/metadata/metadatatags.h b/ext/metadata/metadatatags.h
index 8500f15a..4d718738 100644
--- a/ext/metadata/metadatatags.h
+++ b/ext/metadata/metadatatags.h
@@ -126,6 +126,10 @@ typedef enum {
#define GST_TAG_GPS_SPEED ""
#define GST_TAG_GPS_TRACK ""
+#define GST_TAG_XMP_GEO_LOCATION_COUNTRY "geo-location-country"
+#define GST_TAG_XMP_GEO_LOCATION_CITY "geo-location-city"
+#define GST_TAG_XMP_GEO_LOCATION_SUBLOCATION "geo-location-sublocation"
+
/* *INDENT-ON* */
/*
diff --git a/ext/metadata/metadataxmp.c b/ext/metadata/metadataxmp.c
index 4da279df..5d5bdefa 100644
--- a/ext/metadata/metadataxmp.c
+++ b/ext/metadata/metadataxmp.c
@@ -155,13 +155,26 @@ typedef struct _tag_SchemaMap
#define XMP_SCHEMA_NODE 0x80000000UL
/* *INDENT-OFF* */
-/* When changing this table, update 'metadata_mapping.htm' file too. */
+/* When changing these tables, update 'metadata_mapping.htm' file too. */
static const SchemaTagMap schema_map_dublin_tags_map[] = {
+ {"creator", GST_TAG_ARTIST },
{"description", GST_TAG_DESCRIPTION },
- {"title", GST_TAG_TITLE },
+ {"format", GST_TAG_VIDEO_CODEC },
{"rights", GST_TAG_COPYRIGHT },
+ {"subject", GST_TAG_KEYWORDS },
+ {"title", GST_TAG_TITLE },
{"type", GST_TAG_CODEC },
- {"format", GST_TAG_VIDEO_CODEC },
+ {NULL, NULL}
+};
+
+static const SchemaTagMap schema_map_photoshop_tags_map[] = {
+ {"country", GST_TAG_XMP_GEO_LOCATION_COUNTRY },
+ {"city", GST_TAG_XMP_GEO_LOCATION_CITY },
+ {NULL, NULL}
+};
+
+static const SchemaTagMap schema_map_iptc4xmpcore_tags_map[] = {
+ {"location", GST_TAG_XMP_GEO_LOCATION_SUBLOCATION },
{NULL, NULL}
};
/* *INDENT-ON* */
@@ -173,9 +186,26 @@ static const SchemaMap schema_map_dublin = {
schema_map_dublin_tags_map
};
-/* When changing this table, update 'metadata_mapping.htm' file too. */
+/* http://www.adobe.com/devnet/xmp/pdfs/xmp_specification.pdf */
+static const SchemaMap schema_map_photoshop = {
+ "http://ns.adobe.com/photoshop/1.0/",
+ "photoshop:",
+ 10,
+ schema_map_photoshop_tags_map
+};
+
+/* http://www.iptc.org/std/Iptc4xmpCore/1.0/specification/Iptc4xmpCore_1.0-spec-XMPSchema_8.pdf */
+static const SchemaMap schema_map_iptc4xmpcore = {
+ "http://iptc.org/std/Iptc4xmpCore/1.0/xmlns/",
+ "Iptc4xmpCore:",
+ 13,
+ schema_map_iptc4xmpcore_tags_map
+};
+
static const SchemaMap *schemas_map[] = {
&schema_map_dublin,
+ &schema_map_photoshop,
+ &schema_map_iptc4xmpcore,
NULL
};
@@ -474,7 +504,6 @@ metadatamux_xmp_get_tagsmap_from_gsttag (const SchemaMap * schema_map,
if (NULL == schema_map)
goto done;
-
for (i = 0; schema_map->tags_map[i].gst_tag; i++) {
if (0 == strcmp (schema_map->tags_map[i].gst_tag, tag)) {
tags_map = (SchemaTagMap *) & schema_map->tags_map[i];
@@ -567,10 +596,14 @@ void
metadataparse_xmp_iter_node_schema (GstTagList * taglist, GstTagMergeMode mode,
XmpPtr xmp, const char *schema, const char *path)
{
- SchemaMap *schema_map = NULL;
+ const SchemaMap *schema_map = NULL;
+ gint i;
- if (0 == strcmp (schema, "http://purl.org/dc/elements/1.1/")) {
- schema_map = (SchemaMap *) & schema_map_dublin;
+ for (i = 0; schemas_map[i]; i++) {
+ if (0 == strcmp (schema, schemas_map[i]->schema)) {
+ schema_map = schemas_map[i];
+ break;
+ }
}
metadataparse_xmp_iter_array (taglist, mode, xmp, schema, path, schema_map);
@@ -805,6 +838,8 @@ metadatamux_xmp_for_each_tag_in_list (const GstTagList * list,
XmpPtr xmp = (XmpPtr) user_data;
int i;
+ GST_DEBUG ("trying to map tag '%s' to xmp", tag);
+
for (i = 0; schemas_map[i]; i++) {
/* FIXME: should try to get all of values (index) for the tag */
@@ -814,9 +849,7 @@ metadatamux_xmp_for_each_tag_in_list (const GstTagList * list,
metadatamux_xmp_get_tagsmap_from_gsttag (smap, tag);
if (stagmap) {
-
gchar *value = NULL;
-
GType type = gst_tag_get_type (tag);
switch (type) {
@@ -827,8 +860,10 @@ metadatamux_xmp_for_each_tag_in_list (const GstTagList * list,
break;
}
- if (value) {
+ GST_DEBUG ("found mapping for tag '%s' in schema %s", tag,
+ schemas_map[i]->prefix);
+ if (value) {
uint32_t options = 0;
#ifdef XMP_1_99_5
@@ -857,13 +892,12 @@ metadatamux_xmp_for_each_tag_in_list (const GstTagList * list,
}
g_free (value);
-
}
-
+ } else {
+ GST_DEBUG ("no xmp mapping for tag '%s' in schema %s found", tag,
+ schemas_map[i]->prefix);
}
-
}
-
}
#endif /* else (ifndef HAVE_XMP) */
diff --git a/gst/camerabin/Makefile.am b/gst/camerabin/Makefile.am
index d5085da9..e3fdfae0 100644
--- a/gst/camerabin/Makefile.am
+++ b/gst/camerabin/Makefile.am
@@ -23,7 +23,9 @@ libgstcamerabin_la_SOURCES = gstcamerabin.c \
nodist_libgstcamerabin_la_SOURCES = $(built_sources)
libgstcamerabin_la_CFLAGS = \
- $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) -DGST_USE_UNSTABLE_API
+ -I$(top_builddir)/gst-libs \
+ $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
+ -DGST_USE_UNSTABLE_API
libgstcamerabin_la_LIBADD = \
$(top_builddir)/gst-libs/gst/interfaces/libgstphotography-$(GST_MAJORMINOR).la \
$(GST_LIBS) $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) \
diff --git a/gst/camerabin/camerabingeneral.c b/gst/camerabin/camerabingeneral.c
index d9d9a202..ba6b82a9 100644
--- a/gst/camerabin/camerabingeneral.c
+++ b/gst/camerabin/camerabingeneral.c
@@ -32,74 +32,6 @@
GST_DEBUG_CATEGORY (gst_camerabin_debug);
-static gboolean
-camerabin_general_dbg_have_event (GstPad * pad, GstEvent * event,
- gpointer u_data)
-{
- switch (GST_EVENT_TYPE (event)) {
- case GST_EVENT_NEWSEGMENT:
- {
- GstElement *elem = (GstElement *) u_data;
- gchar *elem_name = gst_element_get_name (elem);
- gchar *pad_name = gst_pad_get_name (pad);
-
- gboolean update;
- gdouble rate;
- GstFormat format;
- gint64 start, stop, pos;
- gst_event_parse_new_segment (event, &update, &rate, &format, &start,
- &stop, &pos);
-
- GST_DEBUG ("element %s, pad %s, new_seg_start =%" GST_TIME_FORMAT
- ", new_seg_stop =%" GST_TIME_FORMAT
- ", new_seg_pos =%" GST_TIME_FORMAT "\n", elem_name, pad_name,
- GST_TIME_ARGS (start), GST_TIME_ARGS (stop), GST_TIME_ARGS (pos));
-
- g_free (pad_name);
- g_free (elem_name);
- }
- break;
- default:
- break;
- }
-
- return TRUE;
-}
-
-static gboolean
-camerabin_general_dbg_have_buffer (GstPad * pad, GstBuffer * buffer,
- gpointer u_data)
-{
- GstElement *elem = (GstElement *) u_data;
- gchar *elem_name = gst_element_get_name (elem);
- gchar *pad_name = gst_pad_get_name (pad);
-
- GST_DEBUG ("element %s, pad %s, buf_ts =%" GST_TIME_FORMAT "\n", elem_name,
- pad_name, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
-
- g_free (pad_name);
- g_free (elem_name);
-
- return TRUE;
-
-}
-
-void
-camerabin_general_dbg_set_probe (GstElement * elem, gchar * pad_name,
- gboolean buf, gboolean evt)
-{
- GstPad *pad = gst_element_get_static_pad (elem, pad_name);
-
- if (buf)
- gst_pad_add_buffer_probe (pad,
- G_CALLBACK (camerabin_general_dbg_have_buffer), elem);
- if (evt)
- gst_pad_add_event_probe (pad,
- G_CALLBACK (camerabin_general_dbg_have_event), elem);
-
- gst_object_unref (pad);
-}
-
/**
* gst_camerabin_add_element:
* @bin: add an element to this bin
@@ -151,12 +83,12 @@ gst_camerabin_try_add_element (GstBin * bin, GstElement * new_elem)
/* Get pads for linking */
bin_pad = gst_bin_find_unlinked_pad (bin, GST_PAD_SRC);
- GST_DEBUG ("adding %" GST_PTR_FORMAT " to %s:%s", new_elem,
- GST_DEBUG_PAD_NAME (bin_pad));
/* Add to bin */
gst_bin_add (GST_BIN (bin), new_elem);
/* Link, if unconnected pad was found, otherwise just add it to bin */
if (bin_pad) {
+ GST_DEBUG_OBJECT (bin, "linking %s to %s:%s", GST_OBJECT_NAME (new_elem),
+ GST_DEBUG_PAD_NAME (bin_pad));
bin_elem = gst_pad_get_parent_element (bin_pad);
gst_object_unref (bin_pad);
if (!gst_element_link (bin_elem, new_elem)) {
@@ -164,6 +96,8 @@ gst_camerabin_try_add_element (GstBin * bin, GstElement * new_elem)
ret = FALSE;
}
gst_object_unref (bin_elem);
+ } else {
+ GST_INFO_OBJECT (bin, "no unlinked source pad in bin");
}
return ret;
diff --git a/gst/camerabin/camerabingeneral.h b/gst/camerabin/camerabingeneral.h
index a84ab64b..13eea756 100644
--- a/gst/camerabin/camerabingeneral.h
+++ b/gst/camerabin/camerabingeneral.h
@@ -21,43 +21,17 @@
#ifndef __CAMERABIN_GENERAL_H_
#define __CAMERABIN_GENERAL_H_
-#ifdef HAVE_SYS_TIME_H
-#include <sys/time.h>
-#endif
-#include <time.h>
-
#include <gst/gst.h>
-
-typedef struct timeval TIME_TYPE;
-#define GET_TIME(t) do { gettimeofday(&(t), NULL); } while(0)
-#define DIFF_TIME(t2,t1,d) do { d = ((t2).tv_sec - (t1).tv_sec) * 1000000 + \
- (t2).tv_usec - (t1).tv_usec; } while(0)
-
-#define _INIT_TIMER_BLOCK TIME_TYPE t1, t2; guint32 d; do {;}while (0)
-
-#define _OPEN_TIMER_BLOCK { GET_TIME(t1); do {;}while (0)
-#define _CLOSE_TIMER_BLOCK GET_TIME(t2); DIFF_TIME(t2,t1,d); \
- GST_DEBUG("elapsed time = %u\n", d); \
- } do {;}while (0)
-
-
-extern void
-camerabin_general_dbg_set_probe (GstElement * elem, gchar * pad_name,
- gboolean buf, gboolean evt);
-
gboolean gst_camerabin_try_add_element (GstBin * bin, GstElement * new_elem);
-
gboolean gst_camerabin_add_element (GstBin * bin, GstElement * new_elem);
-
-GstElement *gst_camerabin_create_and_add_element (GstBin * bin,
- const gchar * elem_name);
+GstElement *gst_camerabin_create_and_add_element (GstBin * bin, const gchar * elem_name);
void gst_camerabin_remove_elements_from_bin (GstBin * bin);
-gboolean
-gst_camerabin_drop_eos_probe (GstPad * pad, GstEvent * event, gpointer u_data);
+gboolean gst_camerabin_drop_eos_probe (GstPad * pad, GstEvent * event, gpointer u_data);
+/* debug logging category */
GST_DEBUG_CATEGORY_EXTERN (gst_camerabin_debug);
#define GST_CAT_DEFAULT gst_camerabin_debug
diff --git a/gst/camerabin/camerabinimage.c b/gst/camerabin/camerabinimage.c
index da979f0f..8eea8d91 100644
--- a/gst/camerabin/camerabinimage.c
+++ b/gst/camerabin/camerabinimage.c
@@ -162,6 +162,8 @@ gst_camerabin_image_init (GstCameraBinImage * img,
static void
gst_camerabin_image_dispose (GstCameraBinImage * img)
{
+ GST_DEBUG_OBJECT (img, "disposing");
+
g_string_free (img->filename, TRUE);
img->filename = NULL;
@@ -184,7 +186,10 @@ gst_camerabin_image_change_state (GstElement * element,
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstCameraBinImage *img = GST_CAMERABIN_IMAGE (element);
- GstObject *camerabin = NULL;
+
+ GST_DEBUG_OBJECT (element, "changing state: %s -> %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
@@ -221,12 +226,10 @@ gst_camerabin_image_change_state (GstElement * element,
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
- camerabin = gst_element_get_parent (img);
/* Write debug graph to file */
- GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camerabin),
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (GST_ELEMENT_PARENT (img)),
GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE |
GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS, "imagebin.playing");
- gst_object_unref (camerabin);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
gst_camerabin_image_destroy_elements (img);
@@ -235,6 +238,11 @@ gst_camerabin_image_change_state (GstElement * element,
break;
}
+ GST_DEBUG_OBJECT (element, "changed state: %s -> %s = %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)),
+ gst_element_state_change_return_get_name (ret));
+
return ret;
}
@@ -308,7 +316,7 @@ gst_camerabin_image_get_property (GObject * object, guint prop_id,
* static helper functions implementation
*/
-/**
+/*
* metadata_write_probe:
* @pad: sink pad of metadata muxer
* @buffer: received buffer
@@ -375,7 +383,7 @@ done:
}
-/**
+/*
* gst_camerabin_image_create_elements:
* @img: a pointer to #GstCameraBinImage object
*
@@ -480,7 +488,7 @@ done:
}
-/**
+/*
* gst_camerabin_image_destroy_elements:
* @img: a pointer to #GstCameraBinImage object
*
@@ -491,7 +499,7 @@ done:
static void
gst_camerabin_image_destroy_elements (GstCameraBinImage * img)
{
- GST_LOG ("destroying img elements");
+ GST_LOG ("destroying image elements");
gst_ghost_pad_set_target (GST_GHOST_PAD (img->sinkpad), NULL);
diff --git a/gst/camerabin/camerabinvideo.c b/gst/camerabin/camerabinvideo.c
index fef9ac26..d3de662e 100644
--- a/gst/camerabin/camerabinvideo.c
+++ b/gst/camerabin/camerabinvideo.c
@@ -242,11 +242,12 @@ gst_camerabin_video_set_property (GObject * object, guint prop_id,
switch (prop_id) {
case PROP_FILENAME:
g_string_assign (bin->filename, g_value_get_string (value));
+ GST_INFO_OBJECT (bin, "received filename: '%s'", bin->filename->str);
if (bin->sink) {
g_object_set (G_OBJECT (bin->sink), "location", bin->filename->str,
NULL);
} else {
- GST_INFO ("no sink, not setting name yet");
+ GST_INFO_OBJECT (bin, "no sink, not setting name yet");
}
break;
default:
@@ -290,7 +291,10 @@ gst_camerabin_video_change_state (GstElement * element,
{
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstCameraBinVideo *vid = GST_CAMERABIN_VIDEO (element);
- GstObject *camerabin = NULL;
+
+ GST_DEBUG_OBJECT (element, "changing state: %s -> %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
@@ -314,7 +318,7 @@ gst_camerabin_video_change_state (GstElement * element,
case GST_STATE_CHANGE_PAUSED_TO_READY:
/* Set sink to NULL in order to write the file _now_ */
- GST_INFO ("write vid file: %s", vid->filename->str);
+ GST_INFO ("write video file: %s", vid->filename->str);
gst_element_set_locked_state (vid->sink, TRUE);
gst_element_set_state (vid->sink, GST_STATE_NULL);
break;
@@ -326,12 +330,10 @@ gst_camerabin_video_change_state (GstElement * element,
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
- camerabin = gst_element_get_parent (vid);
/* Write debug graph to file */
- GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camerabin),
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (GST_ELEMENT_PARENT (vid)),
GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE |
GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS, "videobin.playing");
- gst_object_unref (camerabin);
if (vid->pending_eos) {
/* Video bin is still paused, so push eos directly to video queue */
@@ -359,6 +361,11 @@ gst_camerabin_video_change_state (GstElement * element,
break;
}
+ GST_DEBUG_OBJECT (element, "changed state: %s -> %s = %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)),
+ gst_element_state_change_return_get_name (ret));
+
return ret;
}
@@ -366,7 +373,7 @@ gst_camerabin_video_change_state (GstElement * element,
* static helper functions implementation
*/
-/**
+/*
* camerabin_video_pad_tee_src0_have_buffer:
* @pad: tee src pad leading to video encoding
* @event: received buffer
@@ -414,7 +421,7 @@ camerabin_video_pad_tee_src0_have_buffer (GstPad * pad, GstBuffer * buffer,
return TRUE;
}
-/**
+/*
* camerabin_video_pad_aud_src_have_buffer:
* @pad: audio source src pad
* @event: received buffer
@@ -455,7 +462,7 @@ camerabin_video_pad_aud_src_have_buffer (GstPad * pad, GstBuffer * buffer,
return TRUE;
}
-/**
+/*
* camerabin_video_sink_have_event:
* @pad: video bin sink pad
* @event: received event
@@ -498,7 +505,7 @@ camerabin_video_sink_have_event (GstPad * pad, GstEvent * event,
return ret;
}
-/**
+/*
* gst_camerabin_video_create_elements:
* @vid: a pointer to #GstCameraBinVideo
*
@@ -675,7 +682,7 @@ error:
}
-/**
+/*
* gst_camerabin_video_destroy_elements:
* @vid: a pointer to #GstCameraBinVideo
*
@@ -716,8 +723,6 @@ gst_camerabin_video_destroy_elements (GstCameraBinVideo * vid)
gst_event_unref (vid->pending_eos);
vid->pending_eos = NULL;
}
-
- return;
}
/*
diff --git a/gst/camerabin/gstcamerabin.c b/gst/camerabin/gstcamerabin.c
index f1cd897a..ca2e8e18 100644
--- a/gst/camerabin/gstcamerabin.c
+++ b/gst/camerabin/gstcamerabin.c
@@ -207,7 +207,11 @@ static guint camerabin_signals[LAST_SIGNAL];
#define DEFAULT_CAPTURE_HEIGHT 600
#define DEFAULT_FPS_N 0 /* makes it use the default */
#define DEFAULT_FPS_D 1
+
#define CAMERABIN_DEFAULT_VF_CAPS "video/x-raw-yuv,format=(fourcc)I420"
+#define CAMERABIN_MAX_VF_WIDTH 848
+#define CAMERABIN_MAX_VF_HEIGHT 848
+
/* Using "bilinear" as default zoom method */
#define CAMERABIN_DEFAULT_ZOOM_METHOD 1
@@ -215,19 +219,20 @@ static guint camerabin_signals[LAST_SIGNAL];
#define MAX_ZOOM 1000
#define ZOOM_1X MIN_ZOOM
+/* FIXME: this is v4l2camsrc specific */
#define DEFAULT_V4L2CAMSRC_DRIVER_NAME "omap3cam"
-/* internal element names */
+/* pipeline configuration */
+//#define USE_VIEWFINDER_COLOR_CONVERTER 1
+//#define USE_VIEWFINDER_SCALE 1
-#define USE_COLOR_CONVERTER 1
+/* internal element names */
/* FIXME: Make sure this can work with autovideosrc and use that. */
#define DEFAULT_SRC_VID_SRC "v4l2src"
-
#define DEFAULT_VIEW_SINK "autovideosink"
-#define CAMERABIN_MAX_VF_WIDTH 848
-#define CAMERABIN_MAX_VF_HEIGHT 848
+/* message names */
#define PREVIEW_MESSAGE_NAME "preview-image"
#define IMG_CAPTURED_MESSAGE_NAME "image-captured"
@@ -579,6 +584,7 @@ camerabin_create_src_elements (GstCameraBin * camera)
goto done;
/* Set default "driver-name" for v4l2camsrc if not set */
+ /* FIXME: v4l2camsrc specific */
if (g_object_class_find_property (G_OBJECT_GET_CLASS (camera->src_vid_src),
"driver-name")) {
g_object_get (G_OBJECT (camera->src_vid_src), "driver-name",
@@ -650,6 +656,7 @@ camerabin_create_view_elements (GstCameraBin * camera)
}
camera->pad_view_src = GST_PAD (pads->data);
+#ifdef USE_VIEWFINDER_CONVERTERS
/* Add videoscale in case we need to downscale frame for view finder */
if (!(camera->view_scale =
gst_camerabin_create_and_add_element (GST_BIN (camera),
@@ -663,7 +670,8 @@ camerabin_create_view_elements (GstCameraBin * camera)
"capsfilter"))) {
goto error;
}
-#ifdef USE_COLOR_CONVERTER
+#endif
+#ifdef USE_VIEWFINDER_COLOR_CONVERTER
if (!gst_camerabin_create_and_add_element (GST_BIN (camera),
"ffmpegcolorspace")) {
goto error;
@@ -822,6 +830,7 @@ camerabin_destroy_elements (GstCameraBin * camera)
}
camera->view_sink = NULL;
+ camera->aspect_filter = NULL;
camera->view_scale = NULL;
camera->view_in_sel = NULL;
@@ -943,18 +952,24 @@ static void
gst_camerabin_change_mode (GstCameraBin * camera, gint mode)
{
if (camera->mode != mode || !camera->active_bin) {
- GST_DEBUG_OBJECT (camera, "setting mode: %d", mode);
+ GST_DEBUG_OBJECT (camera, "setting mode: %d (old_mode=%d)",
+ mode, camera->mode);
/* Interrupt ongoing capture */
gst_camerabin_do_stop (camera);
camera->mode = mode;
if (camera->active_bin) {
+ GST_DEBUG_OBJECT (camera, "stopping active bin");
gst_element_set_state (camera->active_bin, GST_STATE_NULL);
}
if (camera->mode == MODE_IMAGE) {
GstStateChangeReturn state_ret;
camera->active_bin = camera->imgbin;
+ /* we can't go to playing as filesink would error out if it does not have
+ * a filename yet, we set the filename async with the buffer flow */
state_ret = gst_element_set_state (camera->active_bin, GST_STATE_READY);
+ GST_DEBUG_OBJECT (camera, "setting imagebin to ready: %s",
+ gst_element_state_change_return_get_name (state_ret));
if (state_ret == GST_STATE_CHANGE_FAILURE) {
GST_WARNING_OBJECT (camera, "state change failed");
@@ -979,7 +994,7 @@ static void
gst_camerabin_change_filename (GstCameraBin * camera, const gchar * name)
{
if (0 != strcmp (camera->filename->str, name)) {
- GST_DEBUG_OBJECT (camera, "changing filename from %s to %s",
+ GST_DEBUG_OBJECT (camera, "changing filename from '%s' to '%s'",
camera->filename->str, name);
g_string_assign (camera->filename, name);
}
@@ -1567,6 +1582,7 @@ gst_camerabin_start_image_capture (GstCameraBin * camera)
gst_element_state_change_return_get_name (state_ret));
if (state_ret != GST_STATE_CHANGE_FAILURE) {
+ GST_INFO_OBJECT (camera, "imagebin is PAUSED");
g_mutex_lock (camera->capture_mutex);
g_object_set (G_OBJECT (camera->src_out_sel), "resend-latest", TRUE,
"active-pad", camera->pad_src_img, NULL);
@@ -1766,6 +1782,8 @@ done:
after one captured still image */
gst_camerabin_finish_image_capture (camera);
+ GST_DEBUG_OBJECT (camera, "image captured, switching to viewfinder");
+
gst_camerabin_reset_to_view_finder (camera);
GST_DEBUG_OBJECT (camera, "switched back to viewfinder");
@@ -1823,7 +1841,7 @@ gst_camerabin_have_src_buffer (GstPad * pad, GstBuffer * buffer,
/* We can't send real EOS event, since it would switch the image queue
into "draining mode". Therefore we send our own custom eos and
catch & drop it later in queue's srcpad data probe */
- GST_DEBUG_OBJECT (camera, "sending eos to image queue");
+ GST_DEBUG_OBJECT (camera, "sending img-eos to image queue");
gst_camerabin_send_img_queue_custom_event (camera,
gst_structure_new ("img-eos", NULL));
@@ -1886,13 +1904,14 @@ gst_camerabin_have_queue_data (GstPad * pad, GstMiniObject * mini_obj,
if (GST_EVENT_TYPE (event) == GST_EVENT_TAG) {
GstTagList *tlist;
+ GST_DEBUG_OBJECT (camera, "queue sending taglist to image pipeline");
gst_event_parse_tag (event, &tlist);
gst_tag_list_insert (camera->event_tags, tlist, GST_TAG_MERGE_REPLACE);
ret = FALSE;
} else if (evs && gst_structure_has_name (evs, "img-filename")) {
const gchar *fname;
- GST_LOG_OBJECT (camera, "queue setting image filename to imagebin");
+ GST_DEBUG_OBJECT (camera, "queue setting image filename to imagebin");
fname = gst_structure_get_string (evs, "filename");
g_object_set (G_OBJECT (camera->imgbin), "filename", fname, NULL);
@@ -1902,7 +1921,7 @@ gst_camerabin_have_queue_data (GstPad * pad, GstMiniObject * mini_obj,
ret = FALSE;
} else if (evs && gst_structure_has_name (evs, "img-eos")) {
- GST_LOG_OBJECT (camera, "queue sending EOS to image pipeline");
+ GST_DEBUG_OBJECT (camera, "queue sending EOS to image pipeline");
gst_pad_set_blocked_async (camera->pad_src_queue, TRUE,
(GstPadBlockCallback) image_pad_blocked, camera);
gst_element_send_event (camera->imgbin, gst_event_new_eos ());
@@ -2140,10 +2159,10 @@ gst_camerabin_find_better_framerate (GstCameraBin * camera, GstStructure * st,
gint res, comparison;
if (camera->night_mode) {
- GST_LOG_OBJECT (camera, "finding min framerate");
+ GST_LOG_OBJECT (camera, "finding min framerate in %" GST_PTR_FORMAT, st);
comparison = GST_VALUE_LESS_THAN;
} else {
- GST_LOG_OBJECT (camera, "finding max framerate");
+ GST_LOG_OBJECT (camera, "finding max framerate in %" GST_PTR_FORMAT, st);
comparison = GST_VALUE_GREATER_THAN;
}
@@ -2198,6 +2217,7 @@ gst_camerabin_find_better_framerate (GstCameraBin * camera, GstStructure * st,
static void
gst_camerabin_update_aspect_filter (GstCameraBin * camera, GstCaps * new_caps)
{
+#ifdef USE_VIEWFINDER_SCALE
GstCaps *sink_caps, *ar_caps;
GstStructure *st;
gint in_w = 0, in_h = 0, sink_w = 0, sink_h = 0, target_w = 0, target_h = 0;
@@ -2266,6 +2286,7 @@ gst_camerabin_update_aspect_filter (GstCameraBin * camera, GstCaps * new_caps)
ar_caps);
g_object_set (G_OBJECT (camera->aspect_filter), "caps", ar_caps, NULL);
gst_caps_unref (ar_caps);
+#endif
}
/*
@@ -2725,6 +2746,7 @@ gst_camerabin_init (GstCameraBin * camera, GstCameraBinClass * gclass)
/* view finder elements */
camera->view_in_sel = NULL;
camera->view_scale = NULL;
+ camera->aspect_filter = NULL;
camera->view_sink = NULL;
memset (&camera->photo_settings, 0, sizeof (GstPhotoSettings));
@@ -2971,6 +2993,10 @@ gst_camerabin_change_state (GstElement * element, GstStateChange transition)
GstCameraBin *camera = GST_CAMERABIN (element);
GstStateChangeReturn ret;
+ GST_DEBUG_OBJECT (element, "changing state: %s -> %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)));
+
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
if (!camerabin_create_elements (camera)) {
@@ -3000,9 +3026,13 @@ gst_camerabin_change_state (GstElement * element, GstStateChange transition)
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ GST_DEBUG_OBJECT (element, "after chaining up: %s -> %s = %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)),
+ gst_element_state_change_return_get_name (ret));
+
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
- GST_LOG_OBJECT (camera, "PAUSED to READY");
g_mutex_lock (camera->capture_mutex);
if (camera->capturing) {
GST_WARNING_OBJECT (camera, "was capturing when changing to READY");
@@ -3022,6 +3052,10 @@ gst_camerabin_change_state (GstElement * element, GstStateChange transition)
}
done:
+ GST_DEBUG_OBJECT (element, "changed state: %s -> %s = %s",
+ gst_element_state_get_name (GST_STATE_TRANSITION_CURRENT (transition)),
+ gst_element_state_get_name (GST_STATE_TRANSITION_NEXT (transition)),
+ gst_element_state_change_return_get_name (ret));
return ret;
}
@@ -3128,8 +3162,11 @@ gst_camerabin_user_start (GstCameraBin * camera)
if (camera->active_bin) {
if (camera->active_bin == camera->imgbin) {
+ GST_INFO_OBJECT (camera, "starting image capture");
gst_camerabin_start_image_capture (camera);
} else if (camera->active_bin == camera->vidbin) {
+ GST_INFO_OBJECT (camera,
+ "setting video filename and starting video capture");
g_object_set (G_OBJECT (camera->active_bin), "filename",
camera->filename->str, NULL);
gst_camerabin_start_video_recording (camera);
diff --git a/gst/h264parse/gsth264parse.c b/gst/h264parse/gsth264parse.c
index bea4a600..ebc4360d 100644
--- a/gst/h264parse/gsth264parse.c
+++ b/gst/h264parse/gsth264parse.c
@@ -464,6 +464,16 @@ gst_h264_parse_chain_forward (GstH264Parse * h264parse, gboolean discont,
for (i = 0; i < h264parse->nal_length_size; i++)
nalu_size = (nalu_size << 8) | data[i];
+ GST_LOG_OBJECT (h264parse, "got NALU size %u", nalu_size);
+
+ /* check for invalid NALU sizes, assume the size if the available bytes
+ * when something is fishy */
+ if (nalu_size <= 1 || nalu_size + h264parse->nal_length_size > avail) {
+ nalu_size = avail - h264parse->nal_length_size;
+ GST_DEBUG_OBJECT (h264parse, "fixing invalid NALU size to %u",
+ nalu_size);
+ }
+
/* Packetized format, see if we have to split it, usually splitting is not
* a good idea as decoders have no way of handling it. */
if (h264parse->split_packetized) {
diff --git a/gst/mpegvideoparse/mpegvideoparse.c b/gst/mpegvideoparse/mpegvideoparse.c
index d0badbfc..eefa6843 100644
--- a/gst/mpegvideoparse/mpegvideoparse.c
+++ b/gst/mpegvideoparse/mpegvideoparse.c
@@ -961,7 +961,7 @@ plugin_init (GstPlugin * plugin)
"MPEG Video Parser");
return gst_element_register (plugin, "mpegvideoparse",
- GST_RANK_SECONDARY - 1, GST_TYPE_MPEGVIDEOPARSE);
+ GST_RANK_PRIMARY, GST_TYPE_MPEGVIDEOPARSE);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
diff --git a/sys/Makefile.am b/sys/Makefile.am
index 161f3339..15c89e17 100644
--- a/sys/Makefile.am
+++ b/sys/Makefile.am
@@ -70,8 +70,14 @@ else
ACM_DIR=
endif
-SUBDIRS = $(ACM_DIR) $(DIRECTDRAW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(OSS4_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(VCD_DIR) $(WININET_DIR)
+if USE_VDPAU
+VDPAU_DIR=vdpau
+else
+VDPAU_DIR=
+endif
+
+SUBDIRS = $(ACM_DIR) $(DIRECTDRAW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(OSS4_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR)
DIST_SUBDIRS = acmenc acmmp3dec directdraw dvb fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \
- oss4 osxvideo qtwrapper vcd wasapi wininet winks winscreencap
+ oss4 osxvideo qtwrapper vcd vdpau wasapi wininet winks winscreencap
diff --git a/sys/vdpau/Makefile.am b/sys/vdpau/Makefile.am
new file mode 100644
index 00000000..93a7513a
--- /dev/null
+++ b/sys/vdpau/Makefile.am
@@ -0,0 +1,27 @@
+plugin_LTLIBRARIES = libgstvdpau.la
+
+libgstvdpau_la_SOURCES = \
+ gstvdpdevice.c \
+ gstvdpmpegdec.c \
+ mpegutil.c \
+ gstvdpvideoyuv.c \
+ gstvdpvideobuffer.c \
+ gstvdp.c \
+ gstvdpyuvvideo.c
+
+libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
+libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
+ $(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
+ $(VDPAU_LIBS)
+libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static
+
+noinst_HEADERS = \
+ gstvdpdevice.h \
+ gstvdpmpegdec.h \
+ mpegutil.h \
+ gstvdpvideoyuv.h \
+ gstvdpvideobuffer.h \
+ gstvdpyuvvideo.h
+
+
diff --git a/sys/vdpau/gstvdp.c b/sys/vdpau/gstvdp.c
new file mode 100644
index 00000000..5c524968
--- /dev/null
+++ b/sys/vdpau/gstvdp.c
@@ -0,0 +1,29 @@
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+
+#include <gst/gst.h>
+
+#include "gstvdpmpegdec.h"
+#include "gstvdpvideoyuv.h"
+#include "gstvdpyuvvideo.h"
+
+static gboolean
+vdpau_init (GstPlugin * vdpau_plugin)
+{
+ gst_element_register (vdpau_plugin, "vdpaumpegdec",
+ GST_RANK_PRIMARY - 1, GST_TYPE_VDP_MPEG_DEC);
+ gst_element_register (vdpau_plugin, "vdpauvideoyuv",
+ GST_RANK_PRIMARY, GST_TYPE_VDP_VIDEO_YUV);
+ gst_element_register (vdpau_plugin, "vdpauyuvvideo",
+ GST_RANK_PRIMARY, GST_TYPE_VDP_YUV_VIDEO);
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "vdpau",
+ "Various elements utilizing VDPAU",
+ vdpau_init, VERSION, "LGPL", "GStreamer", "http://gstreamer.net/")
diff --git a/sys/vdpau/gstvdpdevice.c b/sys/vdpau/gstvdpdevice.c
new file mode 100644
index 00000000..4ed1b177
--- /dev/null
+++ b/sys/vdpau/gstvdpdevice.c
@@ -0,0 +1,266 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <vdpau/vdpau_x11.h>
+#include <gst/gst.h>
+
+#include "gstvdpdevice.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_device_debug);
+#define GST_CAT_DEFAULT gst_vdp_device_debug
+
+enum
+{
+ PROP_0,
+ PROP_DISPLAY
+};
+
+
+
+G_DEFINE_TYPE (GstVdpDevice, gst_vdp_device, G_TYPE_OBJECT);
+
+static void
+gst_vdp_device_init (GstVdpDevice * device)
+{
+ device->display_name = NULL;
+ device->display = NULL;
+ device->device = VDP_INVALID_HANDLE;
+}
+
+static void
+gst_vdp_device_finalize (GObject * object)
+{
+ GstVdpDevice *device = (GstVdpDevice *) object;
+
+ device->vdp_device_destroy (device->device);
+ XCloseDisplay (device->display);
+ g_free (device->display_name);
+
+ G_OBJECT_CLASS (gst_vdp_device_parent_class)->finalize (object);
+}
+
+static void
+gst_vdp_device_constructed (GObject * object)
+{
+ GstVdpDevice *device = (GstVdpDevice *) object;
+ gint screen;
+ VdpStatus status;
+ gint i;
+
+ typedef struct
+ {
+ gint id;
+ void *func;
+ } VdpFunction;
+
+ VdpFunction vdp_function[] = {
+ {VDP_FUNC_ID_DEVICE_DESTROY, &device->vdp_device_destroy},
+ {VDP_FUNC_ID_VIDEO_SURFACE_CREATE,
+ &device->vdp_video_surface_create},
+ {VDP_FUNC_ID_VIDEO_SURFACE_DESTROY,
+ &device->vdp_video_surface_destroy},
+ {VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
+ &device->vdp_video_surface_query_capabilities},
+ {VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
+ &device->vdp_video_surface_query_ycbcr_capabilities},
+ {VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR,
+ &device->vdp_video_surface_get_bits_ycbcr},
+ {VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR,
+ &device->vdp_video_surface_put_bits_ycbcr},
+ {VDP_FUNC_ID_VIDEO_SURFACE_GET_PARAMETERS,
+ &device->vdp_video_surface_get_parameters},
+ {VDP_FUNC_ID_DECODER_CREATE, &device->vdp_decoder_create},
+ {VDP_FUNC_ID_DECODER_RENDER, &device->vdp_decoder_render},
+ {VDP_FUNC_ID_DECODER_DESTROY, &device->vdp_decoder_destroy},
+ {VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
+ &device->vdp_decoder_query_capabilities},
+ {VDP_FUNC_ID_DECODER_GET_PARAMETERS,
+ &device->vdp_decoder_get_parameters},
+ {0, NULL}
+ };
+
+ device->display = XOpenDisplay (device->display_name);
+ if (!device->display) {
+ GST_ERROR_OBJECT (device, "Could not open X display with name: %s",
+ device->display_name);
+ return;
+ }
+
+ screen = DefaultScreen (device->display);
+ status =
+ vdp_device_create_x11 (device->display, screen, &device->device,
+ &device->vdp_get_proc_address);
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR_OBJECT (device, "Could not create VDPAU device");
+ XCloseDisplay (device->display);
+ device->display = NULL;
+
+ return;
+ }
+
+ status = device->vdp_get_proc_address (device->device,
+ VDP_FUNC_ID_GET_ERROR_STRING, (void **) &device->vdp_get_error_string);
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR_OBJECT (device,
+ "Could not get vdp_get_error_string function pointer from VDPAU");
+ goto error;
+ }
+
+ for (i = 0; vdp_function[i].func != NULL; i++) {
+ status = device->vdp_get_proc_address (device->device,
+ vdp_function[i].id, vdp_function[i].func);
+
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR_OBJECT (device, "Could not get function pointer from VDPAU,"
+ " error returned was: %s", device->vdp_get_error_string (status));
+ goto error;
+ }
+ }
+
+ return;
+
+error:
+ XCloseDisplay (device->display);
+ device->display = NULL;
+
+ if (device->device != VDP_INVALID_HANDLE) {
+ device->vdp_device_destroy (device->device);
+ device->device = VDP_INVALID_HANDLE;
+ }
+}
+
+static void
+gst_vdp_device_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVdpDevice *device;
+
+ g_return_if_fail (GST_IS_VDP_DEVICE (object));
+
+ device = (GstVdpDevice *) object;
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ device->display_name = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_device_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVdpDevice *device;
+
+ g_return_if_fail (GST_IS_VDP_DEVICE (object));
+
+ device = (GstVdpDevice *) object;
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_value_set_string (value, device->display_name);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_device_class_init (GstVdpDeviceClass * klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->constructed = gst_vdp_device_constructed;
+ object_class->finalize = gst_vdp_device_finalize;
+ object_class->get_property = gst_vdp_device_get_property;
+ object_class->set_property = gst_vdp_device_set_property;
+
+ g_object_class_install_property (object_class,
+ PROP_DISPLAY,
+ g_param_spec_string ("display",
+ "Display",
+ "X Display Name",
+ "", G_PARAM_READABLE | G_PARAM_WRITABLE | G_PARAM_CONSTRUCT_ONLY));
+}
+
+GstVdpDevice *
+gst_vdp_device_new (const gchar * display_name)
+{
+ GstVdpDevice *device;
+
+ device = g_object_new (GST_TYPE_VDP_DEVICE, "display", display_name, NULL);
+
+ return device;
+}
+
+static void
+device_destroyed_cb (gpointer data, GObject * object)
+{
+ GHashTable *devices_hash = data;
+ GHashTableIter iter;
+ gpointer device;
+
+ GST_DEBUG ("Removing object from hash table");
+
+ g_hash_table_iter_init (&iter, devices_hash);
+ while (g_hash_table_iter_next (&iter, NULL, &device)) {
+ if (device == object) {
+ g_hash_table_iter_remove (&iter);
+ break;
+ }
+ }
+}
+
+GstVdpDevice *
+gst_vdp_get_device (const gchar * display_name)
+{
+ static gsize once = 0;
+ static GHashTable *devices_hash;
+ GstVdpDevice *device;
+
+ if (g_once_init_enter (&once)) {
+ GST_DEBUG_CATEGORY_INIT (gst_vdp_device_debug, "vdpaudevice",
+ 0, "vdpaudevice");
+ devices_hash =
+ g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);
+ g_once_init_leave (&once, 1);
+ }
+
+ if (display_name)
+ device = g_hash_table_lookup (devices_hash, display_name);
+ else
+ device = g_hash_table_lookup (devices_hash, "");
+
+ if (!device) {
+ device = gst_vdp_device_new (display_name);
+ g_object_weak_ref (G_OBJECT (device), device_destroyed_cb, devices_hash);
+ if (display_name)
+ g_hash_table_insert (devices_hash, g_strdup (display_name), device);
+ else
+ g_hash_table_insert (devices_hash, g_strdup (""), device);
+ } else
+ g_object_ref (device);
+
+ return device;
+}
diff --git a/sys/vdpau/gstvdpdevice.h b/sys/vdpau/gstvdpdevice.h
new file mode 100644
index 00000000..ac036d73
--- /dev/null
+++ b/sys/vdpau/gstvdpdevice.h
@@ -0,0 +1,132 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_VDP_DEVICE_H_
+#define _GST_VDP_DEVICE_H_
+
+#include <X11/Xlib.h>
+#include <vdpau/vdpau.h>
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_DEVICE (gst_vdp_device_get_type ())
+#define GST_VDP_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_DEVICE, GstVdpDevice))
+#define GST_VDP_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_DEVICE, GstVdpDeviceClass))
+#define GST_IS_VDP_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_DEVICE))
+#define GST_IS_VDP_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_DEVICE))
+#define GST_VDP_DEVICE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_DEVICE, GstVdpDeviceClass))
+
+typedef struct _GstVdpDeviceClass GstVdpDeviceClass;
+typedef struct _GstVdpDevice GstVdpDevice;
+
+struct _GstVdpDeviceClass
+{
+ GObjectClass parent_class;
+};
+
+struct _GstVdpDevice
+{
+ GObject object;
+
+ gchar *display_name;
+ Display *display;
+ VdpDevice device;
+
+ VdpDeviceDestroy *vdp_device_destroy;
+ VdpGetProcAddress *vdp_get_proc_address;
+ VdpGetErrorString *vdp_get_error_string;
+
+ VdpVideoSurfaceCreate *vdp_video_surface_create;
+ VdpVideoSurfaceDestroy *vdp_video_surface_destroy;
+ VdpVideoSurfaceQueryCapabilities *vdp_video_surface_query_capabilities;
+ VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *vdp_video_surface_query_ycbcr_capabilities;
+ VdpVideoSurfaceGetParameters *vdp_video_surface_get_parameters;
+ VdpVideoSurfaceGetBitsYCbCr *vdp_video_surface_get_bits_ycbcr;
+ VdpVideoSurfacePutBitsYCbCr *vdp_video_surface_put_bits_ycbcr;
+
+ VdpDecoderCreate *vdp_decoder_create;
+ VdpDecoderDestroy *vdp_decoder_destroy;
+ VdpDecoderRender *vdp_decoder_render;
+ VdpDecoderQueryCapabilities *vdp_decoder_query_capabilities;
+ VdpDecoderGetParameters *vdp_decoder_get_parameters;
+};
+
+typedef struct
+{
+ VdpChromaType chroma_type;
+ VdpYCbCrFormat format;
+ guint32 fourcc;
+} VdpauFormats;
+
+#define N_CHROMA_TYPES 3
+#define N_FORMATS 7
+
+static const VdpChromaType chroma_types[N_CHROMA_TYPES] =
+ { VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 };
+
+static const VdpauFormats formats[N_FORMATS] = {
+ {
+ VDP_CHROMA_TYPE_420,
+ VDP_YCBCR_FORMAT_NV12,
+ GST_MAKE_FOURCC ('N', 'V', '1', '2')
+ },
+ {
+ VDP_CHROMA_TYPE_422,
+ VDP_YCBCR_FORMAT_UYVY,
+ GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
+ },
+ {
+ VDP_CHROMA_TYPE_444,
+ VDP_YCBCR_FORMAT_V8U8Y8A8,
+ GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')
+ },
+ {
+ VDP_CHROMA_TYPE_444,
+ VDP_YCBCR_FORMAT_Y8U8V8A8,
+ GST_MAKE_FOURCC ('A', 'V', 'U', 'Y')
+ },
+ {
+ VDP_CHROMA_TYPE_422,
+ VDP_YCBCR_FORMAT_YUYV,
+ GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V')
+ },
+ {
+ VDP_CHROMA_TYPE_420,
+ VDP_YCBCR_FORMAT_YV12,
+ GST_MAKE_FOURCC ('Y', 'V', '1', '2')
+ },
+ {
+ VDP_CHROMA_TYPE_420,
+ VDP_YCBCR_FORMAT_YV12,
+ GST_MAKE_FOURCC ('I', '4', '2', '0')
+ }
+};
+
+GType gst_vdp_device_get_type (void) G_GNUC_CONST;
+
+GstVdpDevice *gst_vdp_device_new (const gchar *display_name);
+
+GstVdpDevice *gst_vdp_get_device (const gchar *display_name);
+
+G_END_DECLS
+
+#endif /* _GST_VDP_DEVICE_H_ */
diff --git a/sys/vdpau/gstvdpmpegdec.c b/sys/vdpau/gstvdpmpegdec.c
new file mode 100644
index 00000000..674146c4
--- /dev/null
+++ b/sys/vdpau/gstvdpmpegdec.c
@@ -0,0 +1,1156 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-vdpaumpegdec
+ *
+ * FIXME:Describe vdpaumpegdec here.
+ *
+ * <refsect2>
+ * <title>Example launch line</title>
+ * |[
+ * gst-launch -v -m fakesrc ! vdpaumpegdec ! fakesink silent=TRUE
+ * ]|
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/base/gstbitreader.h>
+#include <string.h>
+
+#include "mpegutil.h"
+#include "gstvdpmpegdec.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug);
+#define GST_CAT_DEFAULT gst_vdp_mpeg_dec_debug
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_DISPLAY
+};
+
+/* the capabilities of the inputs and outputs.
+ *
+ * describe the real formats here.
+ */
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg, mpegversion = (int) [ 1, 2 ], "
+ "systemstream = (boolean) false, parsed = (boolean) true")
+ );
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
+
+#define DEBUG_INIT(bla) \
+GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_dec_debug, "vdpaumpegdec", 0, "VDPAU powered mpeg decoder");
+
+GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec,
+ GstElement, GST_TYPE_ELEMENT, DEBUG_INIT);
+
+static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info);
+static void gst_vdp_mpeg_dec_finalize (GObject * object);
+static void gst_vdp_mpeg_dec_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_vdp_mpeg_dec_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+guint8 *
+mpeg_util_find_start_code (guint32 * sync_word, guint8 * cur, guint8 * end)
+{
+ guint32 code;
+
+ if (G_UNLIKELY (cur == NULL))
+ return NULL;
+
+ code = *sync_word;
+
+ while (cur < end) {
+ code <<= 8;
+
+ if (code == 0x00000100) {
+ /* Reset the sync word accumulator */
+ *sync_word = 0xffffffff;
+ return cur;
+ }
+
+ /* Add the next available byte to the collected sync word */
+ code |= *cur++;
+ }
+
+ *sync_word = code;
+ return NULL;
+}
+
+typedef struct
+{
+ GstBuffer *buffer;
+ guint8 *cur;
+ guint8 *end;
+} GstVdpMpegPacketizer;
+
+static GstBuffer *
+gst_vdp_mpeg_packetizer_get_next_packet (GstVdpMpegPacketizer * packetizer)
+{
+ guint32 sync_word = 0xffffff;
+ guint8 *packet_start;
+ guint8 *packet_end;
+
+ if (!packetizer->cur)
+ return NULL;
+
+ packet_start = packetizer->cur - 3;
+ packetizer->cur = packet_end = mpeg_util_find_start_code (&sync_word,
+ packetizer->cur, packetizer->end);
+
+ if (packet_end)
+ packet_end -= 3;
+ else
+ packet_end = packetizer->end;
+
+ return gst_buffer_create_sub (packetizer->buffer,
+ packet_start - GST_BUFFER_DATA (packetizer->buffer),
+ packet_end - packet_start);
+}
+
+static void
+gst_vdp_mpeg_packetizer_init (GstVdpMpegPacketizer * packetizer,
+ GstBuffer * buffer)
+{
+ guint32 sync_word = 0xffffffff;
+
+ packetizer->buffer = buffer;
+ packetizer->end = GST_BUFFER_DATA (buffer) + GST_BUFFER_SIZE (buffer);
+ packetizer->cur = mpeg_util_find_start_code (&sync_word,
+ GST_BUFFER_DATA (buffer), packetizer->end);
+}
+
+static gboolean
+gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ GstStructure *structure;
+
+ gint width, height;
+ gint fps_n, fps_d;
+ gint par_n, par_d;
+ gboolean interlaced = FALSE;
+
+ GstCaps *src_caps;
+ gboolean res;
+
+ const GValue *value;
+ VdpDecoderProfile profile;
+ GstVdpDevice *device;
+ VdpStatus status;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* create src_pad caps */
+ gst_structure_get_int (structure, "width", &width);
+ gst_structure_get_int (structure, "height", &height);
+ gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d);
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d);
+ gst_structure_get_boolean (structure, "interlaced", &interlaced);
+
+ src_caps = gst_caps_new_simple ("video/x-vdpau-video",
+ "device", G_TYPE_OBJECT, mpeg_dec->device,
+ "chroma-type", G_TYPE_INT, VDP_CHROMA_TYPE_420,
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ "framerate", GST_TYPE_FRACTION, fps_n, fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d,
+ "interlaced", G_TYPE_BOOLEAN, interlaced, NULL);
+
+ GST_DEBUG_OBJECT (mpeg_dec, "Setting source caps to %" GST_PTR_FORMAT,
+ src_caps);
+
+ res = gst_pad_set_caps (mpeg_dec->src, src_caps);
+ gst_caps_unref (src_caps);
+ if (!res)
+ goto done;
+
+ mpeg_dec->width = width;
+ mpeg_dec->height = height;
+ mpeg_dec->fps_n = fps_n;
+ mpeg_dec->fps_d = fps_d;
+ mpeg_dec->interlaced = interlaced;
+
+ /* parse caps to setup decoder */
+ gst_structure_get_int (structure, "mpegversion", &mpeg_dec->version);
+ if (mpeg_dec->version == 1)
+ profile = VDP_DECODER_PROFILE_MPEG1;
+
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value) {
+ GstBuffer *codec_data, *buf;
+ GstVdpMpegPacketizer packetizer;
+
+ codec_data = gst_value_get_buffer (value);
+ gst_vdp_mpeg_packetizer_init (&packetizer, codec_data);
+ if ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) {
+ MPEGSeqHdr hdr;
+ guint32 bitrate;
+
+ mpeg_util_parse_sequence_hdr (&hdr, buf);
+
+ memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
+ &hdr.intra_quantizer_matrix, 64);
+ memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
+ &hdr.non_intra_quantizer_matrix, 64);
+
+ bitrate = hdr.bitrate;
+ gst_buffer_unref (buf);
+
+ if ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) {
+ MPEGSeqExtHdr ext;
+
+ mpeg_util_parse_sequence_extension (&ext, buf);
+ if (mpeg_dec->version != 1) {
+ switch (ext.profile) {
+ case 5:
+ profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
+ break;
+ default:
+ profile = VDP_DECODER_PROFILE_MPEG2_MAIN;
+ break;
+ }
+ }
+
+ bitrate += (ext.bitrate_ext << 18);
+ gst_buffer_unref (buf);
+ }
+
+ mpeg_dec->duration =
+ gst_util_uint64_scale (1, GST_SECOND * mpeg_dec->fps_d,
+ mpeg_dec->fps_n);
+
+ mpeg_dec->byterate = bitrate * 50;
+ GST_DEBUG ("byterate: %" G_GINT64_FORMAT, mpeg_dec->byterate);
+ }
+ }
+
+ device = mpeg_dec->device;
+
+ if (mpeg_dec->decoder != VDP_INVALID_HANDLE) {
+ device->vdp_decoder_destroy (mpeg_dec->decoder);
+ mpeg_dec->decoder = VDP_INVALID_HANDLE;
+ }
+
+ status = device->vdp_decoder_create (device->device, profile, mpeg_dec->width,
+ mpeg_dec->height, 2, &mpeg_dec->decoder);
+ if (status != VDP_STATUS_OK) {
+ GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
+ ("Could not create vdpau decoder"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ res = FALSE;
+ goto done;
+ }
+ res = TRUE;
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+GstFlowReturn
+gst_vdp_mpeg_dec_push_video_buffer (GstVdpMpegDec * mpeg_dec,
+ GstVdpVideoBuffer * buffer)
+{
+ gint64 byterate;
+
+ if (GST_BUFFER_TIMESTAMP (buffer) == GST_CLOCK_TIME_NONE
+ && GST_CLOCK_TIME_IS_VALID (mpeg_dec->next_timestamp)) {
+ GST_BUFFER_TIMESTAMP (buffer) = mpeg_dec->next_timestamp;
+ } else if (GST_BUFFER_TIMESTAMP (buffer) == GST_CLOCK_TIME_NONE) {
+ GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (mpeg_dec->frame_nr,
+ GST_SECOND * mpeg_dec->fps_d, mpeg_dec->fps_n);
+ }
+
+ if (mpeg_dec->seeking) {
+ GstEvent *event;
+
+ event = gst_event_new_new_segment (FALSE,
+ mpeg_dec->segment.rate, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer),
+ mpeg_dec->segment.stop, GST_BUFFER_TIMESTAMP (buffer));
+
+ gst_pad_push_event (mpeg_dec->src, event);
+
+ mpeg_dec->seeking = FALSE;
+ }
+
+ mpeg_dec->next_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
+ GST_BUFFER_DURATION (buffer);
+
+ gst_segment_set_last_stop (&mpeg_dec->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buffer));
+
+ mpeg_dec->accumulated_duration += GST_BUFFER_DURATION (buffer);
+ mpeg_dec->accumulated_size += GST_BUFFER_SIZE (buffer);
+ byterate = gst_util_uint64_scale (mpeg_dec->accumulated_size, GST_SECOND,
+ mpeg_dec->accumulated_duration);
+ GST_DEBUG ("byterate: %" G_GINT64_FORMAT, mpeg_dec->byterate);
+
+ mpeg_dec->byterate = (mpeg_dec->byterate + byterate) / 2;
+
+ gst_buffer_set_caps (GST_BUFFER (buffer), GST_PAD_CAPS (mpeg_dec->src));
+
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Pushing buffer with timestamp: %" GST_TIME_FORMAT
+ " frame_nr: %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_BUFFER_OFFSET (buffer));
+
+ return gst_pad_push (mpeg_dec->src, GST_BUFFER (buffer));
+}
+
+static GstFlowReturn
+gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
+ GstClockTime timestamp, gint64 size)
+{
+ VdpPictureInfoMPEG1Or2 *info;
+ GstBuffer *buffer;
+ GstVdpVideoBuffer *outbuf;
+ VdpVideoSurface surface;
+ GstVdpDevice *device;
+ VdpBitstreamBuffer vbit[1];
+ VdpStatus status;
+
+ info = &mpeg_dec->vdp_info;
+
+ buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
+ gst_adapter_available (mpeg_dec->adapter));
+
+ outbuf = gst_vdp_video_buffer_new (mpeg_dec->device, VDP_CHROMA_TYPE_420,
+ mpeg_dec->width, mpeg_dec->height);
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration;
+ GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr;
+ GST_BUFFER_SIZE (outbuf) = size;
+
+ if (info->picture_coding_type == I_FRAME)
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (info->top_field_first)
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF);
+ else
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);
+
+
+ if (info->picture_coding_type != B_FRAME) {
+ if (info->backward_reference != VDP_INVALID_HANDLE) {
+ gst_buffer_ref (mpeg_dec->b_buffer);
+ gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
+ GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));
+ }
+
+ if (info->forward_reference != VDP_INVALID_HANDLE) {
+ gst_buffer_unref (mpeg_dec->f_buffer);
+ info->forward_reference = VDP_INVALID_HANDLE;
+ }
+
+ info->forward_reference = info->backward_reference;
+ mpeg_dec->f_buffer = mpeg_dec->b_buffer;
+
+ info->backward_reference = VDP_INVALID_HANDLE;
+ }
+
+ if (info->forward_reference != VDP_INVALID_HANDLE &&
+ info->picture_coding_type != I_FRAME)
+ gst_vdp_video_buffer_add_reference (outbuf,
+ GST_VDP_VIDEO_BUFFER (mpeg_dec->f_buffer));
+
+ if (info->backward_reference != VDP_INVALID_HANDLE
+ && info->picture_coding_type == B_FRAME)
+ gst_vdp_video_buffer_add_reference (outbuf,
+ GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));
+
+ surface = outbuf->surface;
+
+ device = mpeg_dec->device;
+
+ vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
+ vbit[0].bitstream = GST_BUFFER_DATA (buffer);
+ vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer);
+
+ status = device->vdp_decoder_render (mpeg_dec->decoder, surface,
+ (VdpPictureInfo *) info, 1, vbit);
+ gst_buffer_unref (buffer);
+ info->slice_count = 0;
+
+ if (status != VDP_STATUS_OK) {
+ GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
+ ("Could not decode"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ gst_buffer_unref (GST_BUFFER (outbuf));
+
+ return GST_FLOW_ERROR;
+ }
+
+ if (info->picture_coding_type == B_FRAME) {
+ gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
+ GST_VDP_VIDEO_BUFFER (outbuf));
+ } else {
+ info->backward_reference = surface;
+ mpeg_dec->b_buffer = GST_BUFFER (outbuf);
+ }
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_picture_coding (GstVdpMpegDec * mpeg_dec,
+ GstBuffer * buffer)
+{
+ MPEGPictureExt pic_ext;
+ VdpPictureInfoMPEG1Or2 *info;
+ gint fields;
+
+ info = &mpeg_dec->vdp_info;
+
+ if (!mpeg_util_parse_picture_coding_extension (&pic_ext, buffer))
+ return FALSE;
+
+ memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext.f_code, 4);
+
+ info->intra_dc_precision = pic_ext.intra_dc_precision;
+ info->picture_structure = pic_ext.picture_structure;
+ info->top_field_first = pic_ext.top_field_first;
+ info->frame_pred_frame_dct = pic_ext.frame_pred_frame_dct;
+ info->concealment_motion_vectors = pic_ext.concealment_motion_vectors;
+ info->q_scale_type = pic_ext.q_scale_type;
+ info->intra_vlc_format = pic_ext.intra_vlc_format;
+ info->alternate_scan = pic_ext.alternate_scan;
+
+ fields = 2;
+ if (pic_ext.picture_structure == 3) {
+ if (mpeg_dec->interlaced) {
+ if (pic_ext.progressive_frame == 0)
+ fields = 2;
+ if (pic_ext.progressive_frame == 0 && pic_ext.repeat_first_field == 0)
+ fields = 2;
+ if (pic_ext.progressive_frame == 1 && pic_ext.repeat_first_field == 1)
+ fields = 3;
+ } else {
+ if (pic_ext.repeat_first_field == 0)
+ fields = 2;
+ if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 0)
+ fields = 4;
+ if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 1)
+ fields = 6;
+ }
+ } else
+ fields = 1;
+
+ GST_DEBUG ("fields: %d", fields);
+
+ mpeg_dec->duration = gst_util_uint64_scale (fields,
+ GST_SECOND * mpeg_dec->fps_d, 2 * mpeg_dec->fps_n);
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_sequence (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
+{
+ MPEGSeqHdr hdr;
+
+ if (!mpeg_util_parse_sequence_hdr (&hdr, buffer))
+ return FALSE;
+
+ memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
+ &hdr.intra_quantizer_matrix, 64);
+ memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
+ &hdr.non_intra_quantizer_matrix, 64);
+
+ if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_SEQUENCE)
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_DATA;
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_picture (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
+{
+ MPEGPictureHdr pic_hdr;
+
+ if (!mpeg_util_parse_picture_hdr (&pic_hdr, buffer))
+ return FALSE;
+
+ if (pic_hdr.pic_type != I_FRAME
+ && mpeg_dec->vdp_info.backward_reference == VDP_INVALID_HANDLE) {
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Drop frame since we haven't got an I_FRAME yet");
+ return FALSE;
+ }
+ if (pic_hdr.pic_type == B_FRAME
+ && mpeg_dec->vdp_info.forward_reference == VDP_INVALID_HANDLE) {
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Drop frame since we haven't got two non B_FRAMES yet");
+ return FALSE;
+ }
+
+ mpeg_dec->vdp_info.picture_coding_type = pic_hdr.pic_type;
+
+ if (mpeg_dec->version == 1) {
+ mpeg_dec->vdp_info.full_pel_forward_vector =
+ pic_hdr.full_pel_forward_vector;
+ mpeg_dec->vdp_info.full_pel_backward_vector =
+ pic_hdr.full_pel_backward_vector;
+ memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr.f_code, 4);
+ }
+
+ mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr.tsn;
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_gop (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
+{
+ MPEGGop gop;
+ GstClockTime time;
+
+ if (!mpeg_util_parse_gop (&gop, buffer))
+ return FALSE;
+
+ time = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second);
+
+ GST_DEBUG ("gop timestamp: %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
+
+ mpeg_dec->gop_frame =
+ gst_util_uint64_scale (time, mpeg_dec->fps_n,
+ mpeg_dec->fps_d * GST_SECOND) + gop.frame;
+
+ if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_GOP)
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_DATA;
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_quant_matrix (GstVdpMpegDec * mpeg_dec,
+ GstBuffer * buffer)
+{
+ MPEGQuantMatrix qm;
+
+ if (!mpeg_util_parse_quant_matrix (&qm, buffer))
+ return FALSE;
+
+ memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
+ &qm.intra_quantizer_matrix, 64);
+ memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
+ &qm.non_intra_quantizer_matrix, 64);
+ return TRUE;
+}
+
+static void
+gst_vdp_mpeg_dec_flush (GstVdpMpegDec * mpeg_dec)
+{
+ if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
+ gst_buffer_unref (mpeg_dec->f_buffer);
+ if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
+ gst_buffer_unref (mpeg_dec->b_buffer);
+
+ gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
+
+ gst_adapter_clear (mpeg_dec->adapter);
+
+ mpeg_dec->next_timestamp = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_vdp_mpeg_dec_reset (GstVdpMpegDec * mpeg_dec)
+{
+ gst_vdp_mpeg_dec_flush (mpeg_dec);
+
+ if (mpeg_dec->decoder != VDP_INVALID_HANDLE)
+ mpeg_dec->device->vdp_decoder_destroy (mpeg_dec->decoder);
+ mpeg_dec->decoder = VDP_INVALID_HANDLE;
+ if (mpeg_dec->device)
+ g_object_unref (mpeg_dec->device);
+ mpeg_dec->device = NULL;
+
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_SEQUENCE;
+
+ gst_segment_init (&mpeg_dec->segment, GST_FORMAT_TIME);
+ mpeg_dec->seeking = FALSE;
+
+ mpeg_dec->accumulated_size = 0;
+ mpeg_dec->accumulated_duration = 0;
+}
+
+static GstFlowReturn
+gst_vdp_mpeg_dec_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ GstVdpMpegPacketizer packetizer;
+ GstBuffer *buf;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
+ GST_DEBUG_OBJECT (mpeg_dec, "Received discont buffer");
+ gst_vdp_mpeg_dec_flush (mpeg_dec);
+ }
+
+
+ gst_vdp_mpeg_packetizer_init (&packetizer, buffer);
+ while ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) {
+ GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ guint32 sync_code;
+ guint8 start_code;
+
+ /* skip sync_code */
+ gst_bit_reader_get_bits_uint32 (&b_reader, &sync_code, 8 * 3);
+
+ /* start_code */
+ gst_bit_reader_get_bits_uint8 (&b_reader, &start_code, 8);
+
+ if (start_code >= MPEG_PACKET_SLICE_MIN
+ && start_code <= MPEG_PACKET_SLICE_MAX) {
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE");
+
+ gst_buffer_ref (buf);
+ gst_adapter_push (mpeg_dec->adapter, buf);
+ mpeg_dec->vdp_info.slice_count++;
+ }
+
+ switch (start_code) {
+ case MPEG_PACKET_PICTURE:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");
+
+ if (!gst_vdp_mpeg_dec_parse_picture (mpeg_dec, buf))
+ goto done;
+
+ break;
+ case MPEG_PACKET_SEQUENCE:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
+ gst_vdp_mpeg_dec_parse_sequence (mpeg_dec, buf);
+ break;
+ case MPEG_PACKET_EXTENSION:
+ {
+ guint8 ext_code;
+
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION");
+
+ /* ext_code */
+ gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4);
+ switch (ext_code) {
+ case MPEG_PACKET_EXT_PICTURE_CODING:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING");
+ gst_vdp_mpeg_dec_parse_picture_coding (mpeg_dec, buf);
+ break;
+ case MPEG_PACKET_EXT_QUANT_MATRIX:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX");
+ gst_vdp_mpeg_dec_parse_quant_matrix (mpeg_dec, buf);
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case MPEG_PACKET_GOP:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
+ gst_vdp_mpeg_dec_parse_gop (mpeg_dec, buf);
+ break;
+ default:
+ break;
+ }
+
+ gst_buffer_unref (buf);
+ }
+
+ if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_SEQUENCE ||
+ mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_GOP) {
+ gst_adapter_clear (mpeg_dec->adapter);
+ goto done;
+ }
+
+ if (mpeg_dec->vdp_info.slice_count > 0)
+ ret = gst_vdp_mpeg_dec_decode (mpeg_dec, GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_SIZE (buffer));
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return ret;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_convert (GstVdpMpegDec * mpeg_dec,
+ GstFormat src_format, gint64 src_value,
+ GstFormat dest_format, gint64 * dest_value)
+{
+
+ if (src_format == dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ if (mpeg_dec->byterate == -1)
+ return FALSE;
+
+ if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_TIME) {
+ *dest_value = gst_util_uint64_scale (GST_SECOND, src_value,
+ mpeg_dec->byterate);
+ return TRUE;
+ }
+
+ if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_BYTES) {
+ *dest_value =
+ gst_util_uint64_scale_int (src_value, mpeg_dec->byterate, GST_SECOND);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static const GstQueryType *
+gst_mpeg_dec_get_querytypes (GstPad * pad)
+{
+ static const GstQueryType list[] = {
+ GST_QUERY_POSITION,
+ GST_QUERY_DURATION,
+ 0
+ };
+
+ return list;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_src_query (GstPad * pad, GstQuery * query)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ if ((res = gst_pad_query_default (pad, query)))
+ goto done;
+
+ gst_query_parse_position (query, &format, NULL);
+ if (format == GST_FORMAT_TIME &&
+ GST_CLOCK_TIME_IS_VALID (mpeg_dec->next_timestamp)) {
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ mpeg_dec->next_timestamp);
+ res = TRUE;
+ }
+ break;
+ }
+
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ if ((res = gst_pad_query_default (pad, query)))
+ goto done;
+
+ gst_query_parse_duration (query, &format, NULL);
+ if (format == GST_FORMAT_TIME) {
+ gint64 bytes;
+
+ format = GST_FORMAT_BYTES;
+ if (gst_pad_query_duration (pad, &format, &bytes)
+ && format == GST_FORMAT_BYTES) {
+ gint64 duration;
+
+ if (gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_BYTES,
+ bytes, GST_FORMAT_TIME, &duration)) {
+ GST_DEBUG ("duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (duration));
+ gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+ res = TRUE;
+ }
+ }
+ }
+ break;
+ }
+
+ default:
+ res = gst_pad_query_default (pad, query);
+ }
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+static gboolean
+normal_seek (GstVdpMpegDec * mpeg_dec, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 time_cur, bytes_cur;
+ gint64 time_stop, bytes_stop;
+ gboolean res;
+ gboolean update;
+ GstEvent *peer_event;
+
+ GST_DEBUG ("normal seek");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &time_cur, &stop_type, &time_stop);
+
+ if (format != GST_FORMAT_TIME)
+ return FALSE;
+
+ gst_segment_set_seek (&mpeg_dec->segment, rate, GST_FORMAT_TIME, flags,
+ cur_type, time_cur, stop_type, time_stop, &update);
+
+ if (update) {
+ /* seek on bytes */
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_TIME, time_cur,
+ GST_FORMAT_BYTES, &bytes_cur))
+ goto convert_failed;
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_TIME, time_stop,
+ GST_FORMAT_BYTES, &bytes_stop))
+ goto convert_failed;
+
+ /* conversion succeeded, create the seek */
+ peer_event =
+ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags,
+ cur_type, bytes_cur, stop_type, bytes_stop);
+
+ g_mutex_lock (mpeg_dec->mutex);
+
+ /* do the seek */
+ res = gst_pad_push_event (mpeg_dec->sink, peer_event);
+
+ if (res) {
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_GOP;
+ mpeg_dec->seeking = TRUE;
+ }
+
+ g_mutex_unlock (mpeg_dec->mutex);
+
+ } else {
+ GstEvent *event;
+
+ /* send segment with new rate */
+ event = gst_event_new_new_segment (TRUE,
+ mpeg_dec->segment.rate, GST_FORMAT_TIME, mpeg_dec->segment.start,
+ mpeg_dec->segment.stop, mpeg_dec->segment.time);
+
+ gst_pad_push_event (mpeg_dec->src, event);
+ res = TRUE;
+ }
+
+ return res;
+
+ /* ERRORS */
+convert_failed:
+ {
+ /* probably unsupported seek format */
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "failed to convert format %u into GST_FORMAT_TIME", format);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_vdp_mpeg_dec_src_event (GstPad * pad, GstEvent * event)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ if ((res = gst_pad_event_default (pad, event)))
+ goto done;
+
+ res = normal_seek (mpeg_dec, event);
+
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ }
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ {
+ GST_DEBUG_OBJECT (mpeg_dec, "flush stop");
+
+ gst_vdp_mpeg_dec_flush (mpeg_dec);
+ res = gst_pad_push_event (mpeg_dec->src, event);
+
+ break;
+ }
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ gdouble rate;
+ GstFormat format;
+ gint64 start;
+ gint64 stop;
+ gint64 position;
+
+ gst_event_parse_new_segment (event, &update, &rate, &format,
+ &start, &stop, &position);
+
+ if (format != GST_FORMAT_TIME) {
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, start,
+ GST_FORMAT_TIME, &start))
+ goto convert_error;
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, stop,
+ GST_FORMAT_TIME, &stop))
+ goto convert_error;
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, position,
+ GST_FORMAT_TIME, &position))
+ goto convert_error;
+
+ gst_event_unref (event);
+ event = gst_event_new_new_segment (update, rate, GST_FORMAT_TIME, start,
+ stop, position);
+ }
+
+ g_mutex_lock (mpeg_dec->mutex);
+ /* if we seek ourselves we don't push out a newsegment now since we
+ * use the calculated timestamp of the first frame for this */
+ if (mpeg_dec->seeking) {
+ gst_event_unref (event);
+ res = TRUE;
+ g_mutex_unlock (mpeg_dec->mutex);
+ goto done;
+ }
+ g_mutex_unlock (mpeg_dec->mutex);
+
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Pushing new segment update %d format %d start %"
+ GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " position %"
+ GST_TIME_FORMAT, update, format, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop), GST_TIME_ARGS (position));
+ convert_error:
+ res = gst_pad_push_event (mpeg_dec->src, event);
+
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ }
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_vdp_mpeg_dec_change_state (GstElement * element, GstStateChange transition)
+{
+ GstVdpMpegDec *mpeg_dec;
+ GstStateChangeReturn ret;
+
+ mpeg_dec = GST_VDP_MPEG_DEC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ mpeg_dec->device = gst_vdp_get_device (mpeg_dec->display_name);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_vdp_mpeg_dec_reset (mpeg_dec);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_vdp_mpeg_dec_base_init (gpointer gclass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
+
+ gst_element_class_set_details_simple (element_class,
+ "VDPAU Mpeg Decoder",
+ "Decoder",
+ "decode mpeg stream with vdpau",
+ "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+}
+
+/* initialize the vdpaumpegdecoder's class */
+static void
+gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_vdp_mpeg_dec_finalize;
+ gobject_class->set_property = gst_vdp_mpeg_dec_set_property;
+ gobject_class->get_property = gst_vdp_mpeg_dec_get_property;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_change_state);
+
+ g_object_class_install_property (gobject_class, PROP_DISPLAY,
+ g_param_spec_string ("display", "Display", "X Display name",
+ NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+}
+
+static void
+gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info)
+{
+ vdp_info->forward_reference = VDP_INVALID_HANDLE;
+ vdp_info->backward_reference = VDP_INVALID_HANDLE;
+ vdp_info->slice_count = 0;
+ vdp_info->picture_structure = 3;
+ vdp_info->picture_coding_type = 0;
+ vdp_info->intra_dc_precision = 0;
+ vdp_info->frame_pred_frame_dct = 1;
+ vdp_info->concealment_motion_vectors = 0;
+ vdp_info->intra_vlc_format = 0;
+ vdp_info->alternate_scan = 0;
+ vdp_info->q_scale_type = 0;
+ vdp_info->top_field_first = 1;
+}
+
+static void
+gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass)
+{
+ mpeg_dec->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_use_fixed_caps (mpeg_dec->src);
+ gst_pad_set_event_function (mpeg_dec->src,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_event));
+ gst_pad_set_query_function (mpeg_dec->src,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_query));
+ gst_pad_set_query_type_function (mpeg_dec->src,
+ GST_DEBUG_FUNCPTR (gst_mpeg_dec_get_querytypes));
+ gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->src);
+
+ mpeg_dec->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_setcaps_function (mpeg_dec->sink,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_set_caps));
+ gst_pad_set_chain_function (mpeg_dec->sink,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_chain));
+ gst_pad_set_event_function (mpeg_dec->sink,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_sink_event));
+ gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->sink);
+
+ mpeg_dec->display_name = NULL;
+ mpeg_dec->adapter = gst_adapter_new ();
+
+ mpeg_dec->device = NULL;
+ mpeg_dec->decoder = VDP_INVALID_HANDLE;
+ mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE;
+ mpeg_dec->vdp_info.backward_reference = VDP_INVALID_HANDLE;
+
+ gst_vdp_mpeg_dec_reset (mpeg_dec);
+
+ mpeg_dec->mutex = g_mutex_new ();
+}
+
+static void
+gst_vdp_mpeg_dec_finalize (GObject * object)
+{
+ GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object;
+
+ g_object_unref (mpeg_dec->adapter);
+ g_mutex_free (mpeg_dec->mutex);
+}
+
+static void
+gst_vdp_mpeg_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_free (mpeg_dec->display_name);
+ mpeg_dec->display_name = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_mpeg_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_value_set_string (value, mpeg_dec->display_name);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/sys/vdpau/gstvdpmpegdec.h b/sys/vdpau/gstvdpmpegdec.h
new file mode 100644
index 00000000..fc36df5b
--- /dev/null
+++ b/sys/vdpau/gstvdpmpegdec.h
@@ -0,0 +1,105 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_VDP_MPEG_DEC_H__
+#define __GST_VDP_MPEG_DEC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#include "gstvdpdevice.h"
+#include "gstvdpvideobuffer.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_MPEG_DEC (gst_vdp_mpeg_dec_get_type())
+#define GST_VDP_MPEG_DEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_MPEG_DEC,GstVdpMpegDec))
+#define GST_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_MPEG_DEC,GstVdpMpegDecClass))
+#define GST_IS_VDP_MPEG_DEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_MPEG_DEC))
+#define GST_IS_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_MPEG_DEC))
+
+typedef enum {
+ GST_VDP_MPEG_DEC_NEED_SEQUENCE,
+ GST_VDP_MPEG_DEC_NEED_GOP,
+ GST_VDP_MPEG_DEC_NEED_DATA
+} GstVdpMpegDecState;
+
+typedef struct _GstVdpMpegDec GstVdpMpegDec;
+typedef struct _GstVdpMpegDecClass GstVdpMpegDecClass;
+
+struct _GstVdpMpegDec
+{
+ GstElement element;
+
+ /* pads */
+ GstPad *src;
+ GstPad *sink;
+
+ gchar *display_name;
+ GstVdpDevice *device;
+ VdpDecoder decoder;
+
+ /* stream info */
+ gint width, height;
+ gint fps_n, fps_d;
+ gboolean interlaced;
+ gint version;
+
+ /* decoder state */
+ GstVdpMpegDecState state;
+
+ /* currently decoded frame info */
+ GstAdapter *adapter;
+ VdpPictureInfoMPEG1Or2 vdp_info;
+ guint64 frame_nr;
+ GstClockTime duration;
+
+ /* frame_nr from GOP */
+ guint64 gop_frame;
+
+ /* forward and backward reference */
+ GstBuffer *f_buffer;
+ GstBuffer *b_buffer;
+
+ /* calculated timestamp, size and duration */
+ GstClockTime next_timestamp;
+ guint64 accumulated_size;
+ guint64 accumulated_duration;
+
+ /* seek data */
+ GstSegment segment;
+ gboolean seeking;
+ gint64 byterate;
+
+ /* mutex */
+ GMutex *mutex;
+
+};
+
+struct _GstVdpMpegDecClass
+{
+ GstElementClass element_class;
+};
+
+GType gst_vdp_mpeg_dec_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_VDP_MPEG_DEC_H__ */
diff --git a/sys/vdpau/gstvdpvideobuffer.c b/sys/vdpau/gstvdpvideobuffer.c
new file mode 100644
index 00000000..8ae14e98
--- /dev/null
+++ b/sys/vdpau/gstvdpvideobuffer.c
@@ -0,0 +1,138 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstvdpvideobuffer.h"
+
+
+void
+gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer * buffer,
+ GstVdpVideoBuffer * buf)
+{
+ g_assert (GST_IS_VDP_VIDEO_BUFFER (buffer));
+ g_assert (GST_IS_VDP_VIDEO_BUFFER (buf));
+
+ gst_buffer_ref (GST_BUFFER (buf));
+ buffer->refs = g_slist_prepend (buffer->refs, buf);
+}
+
+GstVdpVideoBuffer *
+gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type,
+ gint width, gint height)
+{
+ GstVdpVideoBuffer *buffer;
+ VdpStatus status;
+ VdpVideoSurface surface;
+
+ status = device->vdp_video_surface_create (device->device, chroma_type, width,
+ height, &surface);
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR ("Couldn't create a VdpVideoSurface, error returned was: %s",
+ device->vdp_get_error_string (status));
+ return NULL;
+ }
+
+ buffer =
+ (GstVdpVideoBuffer *) gst_mini_object_new (GST_TYPE_VDP_VIDEO_BUFFER);
+
+ buffer->device = g_object_ref (device);
+ buffer->surface = surface;
+
+ return buffer;
+}
+
+static GObjectClass *gst_vdp_video_buffer_parent_class;
+
+static void
+gst_vdp_video_buffer_finalize (GstVdpVideoBuffer * buffer)
+{
+ GSList *iter;
+ GstVdpDevice *device;
+ VdpStatus status;
+
+ device = buffer->device;
+
+ status = device->vdp_video_surface_destroy (buffer->surface);
+ if (status != VDP_STATUS_OK)
+ GST_ERROR
+ ("Couldn't destroy the buffers VdpVideoSurface, error returned was: %s",
+ device->vdp_get_error_string (status));
+
+ g_object_unref (buffer->device);
+
+ for (iter = buffer->refs; iter; iter = g_slist_next (iter)) {
+ GstBuffer *buf;
+
+ buf = (GstBuffer *) (iter->data);
+ gst_buffer_unref (buf);
+ }
+ g_slist_free (buffer->refs);
+
+ GST_MINI_OBJECT_CLASS (gst_vdp_video_buffer_parent_class)->finalize
+ (GST_MINI_OBJECT (buffer));
+}
+
+static void
+gst_vdp_video_buffer_init (GstVdpVideoBuffer * buffer, gpointer g_class)
+{
+ buffer->device = NULL;
+ buffer->surface = VDP_INVALID_HANDLE;
+
+ buffer->refs = NULL;
+}
+
+static void
+gst_vdp_video_buffer_class_init (gpointer g_class, gpointer class_data)
+{
+ GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
+
+ gst_vdp_video_buffer_parent_class = g_type_class_peek_parent (g_class);
+
+ mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
+ gst_vdp_video_buffer_finalize;
+}
+
+
+GType
+gst_vdp_video_buffer_get_type (void)
+{
+ static GType _gst_vdp_video_buffer_type;
+
+ if (G_UNLIKELY (_gst_vdp_video_buffer_type == 0)) {
+ static const GTypeInfo info = {
+ sizeof (GstBufferClass),
+ NULL,
+ NULL,
+ gst_vdp_video_buffer_class_init,
+ NULL,
+ NULL,
+ sizeof (GstVdpVideoBuffer),
+ 0,
+ (GInstanceInitFunc) gst_vdp_video_buffer_init,
+ NULL
+ };
+ _gst_vdp_video_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
+ "GstVdpVideoBuffer", &info, 0);
+ }
+ return _gst_vdp_video_buffer_type;
+}
diff --git a/sys/vdpau/gstvdpvideobuffer.h b/sys/vdpau/gstvdpvideobuffer.h
new file mode 100644
index 00000000..36eddcbd
--- /dev/null
+++ b/sys/vdpau/gstvdpvideobuffer.h
@@ -0,0 +1,59 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_VDP_VIDEO_BUFFER_H_
+#define _GST_VDP_VIDEO_BUFFER_H_
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvdpdevice.h"
+
+#include "gstvdpvideobuffer.h"
+
+typedef struct _GstVdpVideoBuffer GstVdpVideoBuffer;
+
+#define GST_TYPE_VDP_VIDEO_BUFFER (gst_vdp_video_buffer_get_type())
+
+#define GST_IS_VDP_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_BUFFER))
+#define GST_VDP_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_BUFFER, GstVdpVideoBuffer))
+
+struct _GstVdpVideoBuffer {
+ GstBuffer buffer;
+
+ GstVdpDevice *device;
+ VdpVideoSurface surface;
+
+ GSList *refs;
+};
+
+GType gst_vdp_video_buffer_get_type (void);
+
+GstVdpVideoBuffer* gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type, gint width, gint height);
+
+void gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer *buffer, GstVdpVideoBuffer *buf);
+
+#define GST_VDP_VIDEO_CAPS \
+ "video/x-vdpau-video, " \
+ "chroma-type = (int)[0,2], " \
+ "width = (int)[1,4096], " \
+ "height = (int)[1,4096]"
+
+#endif
diff --git a/sys/vdpau/gstvdpvideoyuv.c b/sys/vdpau/gstvdpvideoyuv.c
new file mode 100644
index 00000000..2318cd40
--- /dev/null
+++ b/sys/vdpau/gstvdpvideoyuv.c
@@ -0,0 +1,462 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvdpvideobuffer.h"
+#include "gstvdpvideoyuv.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_video_yuv_debug);
+#define GST_CAT_DEFAULT gst_vdp_video_yuv_debug
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "framerate = (fraction) [ 0, MAX ], "
+ "width = (int) [ 1, MAX ], " "height = (int) [ 1, MAX ]"));
+
+#define DEBUG_INIT(bla) \
+ GST_DEBUG_CATEGORY_INIT (gst_vdp_video_yuv_debug, "vdpauvideoyuv", 0, "VDPAU VdpSurface to YUV");
+
+GST_BOILERPLATE_FULL (GstVdpVideoYUV, gst_vdp_video_yuv, GstElement,
+ GST_TYPE_ELEMENT, DEBUG_INIT);
+
+static void gst_vdp_video_yuv_finalize (GObject * object);
+static void gst_vdp_video_yuv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_vdp_video_yuv_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+GstFlowReturn
+gst_vdp_video_yuv_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstVdpVideoYUV *video_yuv;
+ GstVdpDevice *device;
+ VdpVideoSurface surface;
+ GstBuffer *outbuf = NULL;
+ GstFlowReturn result = GST_FLOW_ERROR;
+
+ video_yuv = GST_VDP_VIDEO_YUV (GST_OBJECT_PARENT (pad));
+ device = GST_VDP_VIDEO_BUFFER (buffer)->device;
+ surface = GST_VDP_VIDEO_BUFFER (buffer)->surface;
+
+ GST_LOG_OBJECT (video_yuv, "Received buffer format %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (video_yuv->format));
+
+ switch (video_yuv->format) {
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ {
+ gint size;
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ size =
+ gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, video_yuv->width,
+ video_yuv->height);
+ result =
+ gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
+ GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
+ if (G_UNLIKELY (result != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (video_yuv, "Pad alloc_buffer returned %d", result);
+ goto done;
+ }
+
+ data[0] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 0, video_yuv->width, video_yuv->height);
+ data[1] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 2, video_yuv->width, video_yuv->height);
+ data[2] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 1, video_yuv->width, video_yuv->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 0, video_yuv->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 2, video_yuv->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 1, video_yuv->width);
+
+ status =
+ device->vdp_video_surface_get_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ GST_LOG_OBJECT (video_yuv,
+ "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto done;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ {
+ gint size;
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ size =
+ gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, video_yuv->width,
+ video_yuv->height);
+ result =
+ gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
+ GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
+ if (G_UNLIKELY (result != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (video_yuv, "Pad alloc_buffer returned %d", result);
+ goto done;
+ }
+
+ data[0] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 0, video_yuv->width, video_yuv->height);
+ data[1] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 2, video_yuv->width, video_yuv->height);
+ data[2] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 1, video_yuv->width, video_yuv->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 0, video_yuv->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 2, video_yuv->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 1, video_yuv->width);
+
+ status =
+ device->vdp_video_surface_get_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ GST_LOG_OBJECT (video_yuv,
+ "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto done;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ {
+ gint size;
+ VdpStatus status;
+ guint8 *data[2];
+ guint32 stride[2];
+
+ size =
+ video_yuv->width * video_yuv->height +
+ video_yuv->width * video_yuv->height / 2;
+ GST_LOG_OBJECT (video_yuv, "Entering buffer_alloc");
+ result =
+ gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
+ GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
+ if (G_UNLIKELY (result != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (video_yuv, "Pad alloc_buffer returned %d", result);
+ goto done;
+ }
+
+ data[0] = GST_BUFFER_DATA (outbuf);
+ data[1] = GST_BUFFER_DATA (outbuf) + video_yuv->width * video_yuv->height;
+
+ stride[0] = video_yuv->width;
+ stride[1] = video_yuv->width;
+
+ GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
+ status =
+ device->vdp_video_surface_get_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
+ GST_LOG_OBJECT (video_yuv,
+ "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto done;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ gst_buffer_unref (buffer);
+
+ gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS);
+ GST_LOG_OBJECT (video_yuv, "Pushing buffer with ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
+ return gst_pad_push (video_yuv->src, outbuf);
+
+done:
+ if (outbuf)
+ gst_buffer_unref (outbuf);
+ gst_buffer_unref (buffer);
+ return result;
+}
+
+static GstCaps *
+gst_vdp_video_yuv_get_caps (GstVdpVideoYUV * video_yuv,
+ GstVdpDevice * device, gint chroma_type, gint width, gint height,
+ gint framerate_numerator, gint framerate_denominator, gint par_numerator,
+ gint par_denominator)
+{
+ GstCaps *caps;
+ gint i;
+
+ caps = gst_caps_new_empty ();
+
+ for (i = 0; i < N_FORMATS; i++) {
+ VdpStatus status;
+ VdpBool is_supported;
+
+ if (formats[i].chroma_type != chroma_type)
+ continue;
+
+ status =
+ device->vdp_video_surface_query_ycbcr_capabilities (device->device,
+ chroma_type, formats[i].format, &is_supported);
+ if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Could not query VDPAU YCbCr capabilites"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ return NULL;
+ }
+ if (is_supported) {
+ GstCaps *format_caps;
+
+ format_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, formats[i].fourcc,
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ "framerate", GST_TYPE_FRACTION, framerate_numerator,
+ framerate_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ par_numerator, par_denominator, NULL);
+ gst_caps_append (caps, format_caps);
+ }
+ }
+
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ return NULL;
+ }
+
+ return caps;
+}
+
+static gboolean
+gst_vdp_video_yuv_sink_set_caps (GstPad * pad, GstCaps * caps)
+{
+ GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (GST_OBJECT_PARENT (pad));
+
+ GstCaps *src_caps, *new_caps;
+ GstStructure *structure;
+ const GValue *value;
+ GstVdpDevice *device;
+ gint chroma_type;
+ gint width, height;
+ gint framerate_numerator, framerate_denominator;
+ gint par_numerator, par_denominator;
+ guint32 fourcc_format;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+ value = gst_structure_get_value (structure, "device");
+ device = g_value_get_object (value);
+
+ gst_structure_get_int (structure, "chroma-type", &chroma_type);
+ gst_structure_get_int (structure, "width", &width);
+ gst_structure_get_int (structure, "height", &height);
+ gst_structure_get_fraction (structure, "framerate",
+ &framerate_numerator, &framerate_denominator);
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &par_numerator, &par_denominator);
+
+ src_caps =
+ gst_vdp_video_yuv_get_caps (video_yuv, device, chroma_type, width,
+ height, framerate_numerator, framerate_denominator, par_numerator,
+ par_denominator);
+ if (G_UNLIKELY (!src_caps))
+ return FALSE;
+
+ video_yuv->src_caps = src_caps;
+
+ src_caps = gst_pad_get_allowed_caps (video_yuv->src);
+ if (G_UNLIKELY (!src_caps || !gst_caps_get_size (src_caps)))
+ return FALSE;
+
+ new_caps = gst_caps_copy_nth (src_caps, 0);
+ gst_caps_unref (src_caps);
+ if (G_UNLIKELY (!new_caps))
+ return FALSE;
+
+ structure = gst_caps_get_structure (new_caps, 0);
+ gst_structure_get_fourcc (structure, "format", &fourcc_format);
+
+ gst_pad_fixate_caps (video_yuv->src, new_caps);
+ res = gst_pad_set_caps (video_yuv->src, new_caps);
+
+ gst_caps_unref (new_caps);
+
+ if (G_UNLIKELY (!res))
+ return FALSE;
+
+ video_yuv->width = width;
+ video_yuv->height = height;
+ video_yuv->framerate_numerator = framerate_numerator;
+ video_yuv->framerate_denominator = framerate_denominator;
+ video_yuv->format = fourcc_format;
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_vdp_video_yuv_src_getcaps (GstPad * pad)
+{
+ GstVdpVideoYUV *video_yuv;
+
+ video_yuv = GST_VDP_VIDEO_YUV (GST_OBJECT_PARENT (pad));
+
+ if (video_yuv->src_caps)
+ return gst_caps_copy (video_yuv->src_caps);
+
+ if (GST_PAD_CAPS (video_yuv->src))
+ return gst_caps_copy (GST_PAD_CAPS (video_yuv->src));
+
+ return gst_caps_copy (gst_pad_get_pad_template_caps (video_yuv->src));
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_vdp_video_yuv_base_init (gpointer klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_set_details_simple (element_class,
+ "VdpauVideoYUV",
+ "Covideo_yuv/Decoder/Video",
+ "VDPAU video surface to YUV",
+ "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+}
+
+static void
+gst_vdp_video_yuv_class_init (GstVdpVideoYUVClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_vdp_video_yuv_finalize;
+ gobject_class->set_property = gst_vdp_video_yuv_set_property;
+ gobject_class->get_property = gst_vdp_video_yuv_get_property;
+}
+
+static void
+gst_vdp_video_yuv_init (GstVdpVideoYUV * video_yuv, GstVdpVideoYUVClass * klass)
+{
+ video_yuv->src_caps = NULL;
+
+ video_yuv->height = 0;
+ video_yuv->width = 0;
+ video_yuv->framerate_numerator = 0;
+ video_yuv->framerate_denominator = 0;
+ video_yuv->par_numerator = 1;
+ video_yuv->par_denominator = 1;
+
+ video_yuv->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_set_getcaps_function (video_yuv->src, gst_vdp_video_yuv_src_getcaps);
+ gst_element_add_pad (GST_ELEMENT (video_yuv), video_yuv->src);
+
+ video_yuv->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_setcaps_function (video_yuv->sink,
+ gst_vdp_video_yuv_sink_set_caps);
+ gst_pad_set_chain_function (video_yuv->sink, gst_vdp_video_yuv_chain);
+ gst_element_add_pad (GST_ELEMENT (video_yuv), video_yuv->sink);
+ gst_pad_set_active (video_yuv->sink, TRUE);
+}
+
+static void
+gst_vdp_video_yuv_finalize (GObject * object)
+{
+ GstVdpVideoYUV *video_yuv = (GstVdpVideoYUV *) object;
+
+ if (video_yuv->src_caps)
+ gst_caps_unref (video_yuv->src_caps);
+}
+
+static void
+gst_vdp_video_yuv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_video_yuv_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/sys/vdpau/gstvdpvideoyuv.h b/sys/vdpau/gstvdpvideoyuv.h
new file mode 100644
index 00000000..935fe700
--- /dev/null
+++ b/sys/vdpau/gstvdpvideoyuv.h
@@ -0,0 +1,60 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_VDP_VIDEO_YUV_H__
+#define __GST_VDP_VIDEO_YUV_H__
+
+#include <gst/gst.h>
+
+#include "gstvdpdevice.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_VIDEO_YUV (gst_vdp_video_yuv_get_type())
+#define GST_VDP_VIDEO_YUV(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_VIDEO_YUV,GstVdpVideoYUV))
+#define GST_VDP_VIDEO_YUV_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_VIDEO_YUV,GstVdpVideoYUVClass))
+#define GST_VDP_VIDEO_YUV_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_VIDEO_YUV, GstVdpVideoYUVClass))
+#define GST_IS_VDP_VIDEO_YUV(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_VIDEO_YUV))
+#define GST_IS_VDP_VIDEO_YUV_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_VIDEO_YUV))
+
+typedef struct _GstVdpVideoYUV GstVdpVideoYUV;
+typedef struct _GstVdpVideoYUVClass GstVdpVideoYUVClass;
+
+struct _GstVdpVideoYUV {
+ GstElement element;
+
+ GstPad *src, *sink;
+ GstCaps *src_caps;
+
+ gint width, height;
+ gint framerate_numerator, framerate_denominator;
+ gint par_numerator, par_denominator;
+ guint format;
+};
+
+struct _GstVdpVideoYUVClass {
+ GstElementClass parent_class;
+};
+
+GType gst_vdp_video_yuv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_VDP_VIDEO_YUV_H__ */
diff --git a/sys/vdpau/gstvdpyuvvideo.c b/sys/vdpau/gstvdpyuvvideo.c
new file mode 100644
index 00000000..72c053e6
--- /dev/null
+++ b/sys/vdpau/gstvdpyuvvideo.c
@@ -0,0 +1,476 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvdpvideobuffer.h"
+#include "gstvdpyuvvideo.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_yuv_video_debug);
+#define GST_CAT_DEFAULT gst_vdp_yuv_video_debug
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_DISPLAY
+};
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "framerate = (fraction) [ 0, MAX ], "
+ "width = (int) [ 1, MAX ], " "height = (int) [ 1, MAX ]"));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
+
+#define DEBUG_INIT(bla) \
+ GST_DEBUG_CATEGORY_INIT (gst_vdp_yuv_video_debug, "vdpauvideoyuv", 0, "YUV to VDPAU video surface");
+
+GST_BOILERPLATE_FULL (GstVdpYUVVideo, gst_vdp_yuv_video, GstElement,
+ GST_TYPE_ELEMENT, DEBUG_INIT);
+
+static void gst_vdp_yuv_video_finalize (GObject * object);
+static void gst_vdp_yuv_video_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_vdp_yuv_video_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+GstFlowReturn
+gst_vdp_yuv_video_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstVdpYUVVideo *yuv_video;
+ GstVdpDevice *device;
+ VdpVideoSurface surface;
+ GstBuffer *outbuf = NULL;
+
+ yuv_video = GST_VDP_YUV_VIDEO (GST_OBJECT_PARENT (pad));
+ device = yuv_video->device;
+
+ outbuf =
+ GST_BUFFER (gst_vdp_video_buffer_new (device, yuv_video->chroma_type,
+ yuv_video->width, yuv_video->height));
+ surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
+
+ switch (yuv_video->format) {
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ {
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ data[0] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 0, yuv_video->width, yuv_video->height);
+ data[1] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 2, yuv_video->width, yuv_video->height);
+ data[2] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 1, yuv_video->width, yuv_video->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 0, yuv_video->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 2, yuv_video->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 1, yuv_video->width);
+
+ status =
+ device->vdp_video_surface_put_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Couldn't push YV12 data to VDPAU"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto error;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ {
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ data[0] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 0, yuv_video->width, yuv_video->height);
+ data[1] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 2, yuv_video->width, yuv_video->height);
+ data[2] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 1, yuv_video->width, yuv_video->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 0, yuv_video->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 2, yuv_video->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 1, yuv_video->width);
+
+ status =
+ device->vdp_video_surface_put_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Couldn't push YV12 data to VDPAU"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto error;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ {
+ VdpStatus status;
+ guint8 *data[2];
+ guint32 stride[2];
+
+ data[0] = GST_BUFFER_DATA (buffer);
+ data[1] = GST_BUFFER_DATA (buffer) + yuv_video->width * yuv_video->height;
+
+ stride[0] = yuv_video->width;
+ stride[1] = yuv_video->width;
+
+ status =
+ device->vdp_video_surface_put_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto error;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ gst_buffer_unref (buffer);
+
+ gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS);
+ gst_buffer_set_caps (outbuf, GST_PAD_CAPS (yuv_video->src));
+
+ return gst_pad_push (yuv_video->src, outbuf);
+
+error:
+ gst_buffer_unref (outbuf);
+ return GST_FLOW_ERROR;
+}
+
+static GstCaps *
+gst_vdp_yuv_video_get_caps (GstVdpYUVVideo * yuv_video)
+{
+ GstVdpDevice *device;
+ GstCaps *caps;
+ gint i;
+
+ device = yuv_video->device;
+
+ caps = gst_caps_new_empty ();
+
+ for (i = 0; i < N_CHROMA_TYPES; i++) {
+ VdpStatus status;
+ VdpBool is_supported;
+ guint32 max_w, max_h;
+
+ status =
+ device->vdp_video_surface_query_capabilities (device->device,
+ chroma_types[i], &is_supported, &max_w, &max_h);
+
+ if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Could not get query VDPAU video surface capabilites"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ goto error;
+ }
+ if (is_supported) {
+ gint j;
+
+ for (j = 0; j < N_FORMATS; j++) {
+ if (formats[j].chroma_type != chroma_types[i])
+ continue;
+
+ status =
+ device->vdp_video_surface_query_ycbcr_capabilities (device->device,
+ formats[j].chroma_type, formats[j].format, &is_supported);
+ if (status != VDP_STATUS_OK
+ && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Could not query VDPAU YCbCr capabilites"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ goto error;
+ }
+ if (is_supported) {
+ GstCaps *format_caps;
+
+ format_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, formats[j].fourcc,
+ "width", GST_TYPE_INT_RANGE, 1, max_w,
+ "height", GST_TYPE_INT_RANGE, 1, max_h,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+ gst_caps_append (caps, format_caps);
+ }
+ }
+ }
+ }
+error:
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ return NULL;
+ }
+
+ return caps;
+}
+
+static gboolean
+gst_vdp_yuv_video_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (GST_OBJECT_PARENT (pad));
+
+ GstStructure *structure;
+ guint32 fourcc;
+ gint chroma_type = 0;
+ gint width, height;
+ gint framerate_numerator, framerate_denominator;
+ gint par_numerator, par_denominator;
+ gint i;
+ GstCaps *src_caps, *new_caps;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_fourcc (structure, "format", &fourcc);
+ gst_structure_get_int (structure, "width", &width);
+ gst_structure_get_int (structure, "height", &height);
+ gst_structure_get_fraction (structure, "framerate",
+ &framerate_numerator, &framerate_denominator);
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &par_numerator, &par_denominator);
+
+ for (i = 0; i < N_FORMATS; i++) {
+ if (formats[i].fourcc == fourcc) {
+ chroma_type = formats[i].chroma_type;
+ break;
+ }
+ }
+
+ src_caps = gst_pad_get_allowed_caps (yuv_video->src);
+ if (G_UNLIKELY (!src_caps || !gst_caps_get_size (src_caps)))
+ return FALSE;
+
+ new_caps = gst_caps_copy_nth (src_caps, 0);
+ gst_caps_unref (src_caps);
+ if (G_UNLIKELY (!new_caps))
+ return FALSE;
+
+ structure = gst_caps_get_structure (new_caps, 0);
+
+ gst_structure_set (structure,
+ "device", G_TYPE_OBJECT, yuv_video->device,
+ "chroma-type", G_TYPE_INT, chroma_type,
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ "framerate", GST_TYPE_FRACTION, framerate_numerator,
+ framerate_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ par_numerator, par_denominator, NULL);
+
+ gst_pad_fixate_caps (yuv_video->src, new_caps);
+ res = gst_pad_set_caps (yuv_video->src, new_caps);
+
+ gst_caps_unref (new_caps);
+
+ if (G_UNLIKELY (!res))
+ return FALSE;
+
+ yuv_video->width = width;
+ yuv_video->height = height;
+ yuv_video->format = fourcc;
+ yuv_video->chroma_type = chroma_type;
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_vdp_yuv_video_sink_getcaps (GstPad * pad)
+{
+ GstVdpYUVVideo *yuv_video;
+
+ yuv_video = GST_VDP_YUV_VIDEO (GST_OBJECT_PARENT (pad));
+
+ if (yuv_video->sink_caps)
+ return gst_caps_copy (yuv_video->sink_caps);
+
+ return gst_caps_copy (gst_pad_get_pad_template_caps (yuv_video->sink));
+}
+
+static GstStateChangeReturn
+gst_vdp_yuv_video_change_state (GstElement * element, GstStateChange transition)
+{
+ GstVdpYUVVideo *yuv_video;
+
+ yuv_video = GST_VDP_YUV_VIDEO (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ yuv_video->device = gst_vdp_get_device (yuv_video->display);
+ if (!yuv_video->sink_caps)
+ yuv_video->sink_caps = gst_vdp_yuv_video_get_caps (yuv_video);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ g_object_unref (yuv_video->device);
+ yuv_video->device = NULL;
+ break;
+ default:
+ break;
+ }
+
+ return GST_STATE_CHANGE_SUCCESS;
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_vdp_yuv_video_base_init (gpointer klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_set_details_simple (element_class,
+ "VdpauYUVVideo",
+ "Coyuv_video/Decoder/Video",
+ "VDPAU video surface to YUV",
+ "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+}
+
+static void
+gst_vdp_yuv_video_class_init (GstVdpYUVVideoClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_vdp_yuv_video_finalize;
+ gobject_class->set_property = gst_vdp_yuv_video_set_property;
+ gobject_class->get_property = gst_vdp_yuv_video_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_DISPLAY,
+ g_param_spec_string ("display", "Display", "X Display name",
+ NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+
+ gstelement_class->change_state = gst_vdp_yuv_video_change_state;
+}
+
+static void
+gst_vdp_yuv_video_init (GstVdpYUVVideo * yuv_video, GstVdpYUVVideoClass * klass)
+{
+ yuv_video->sink_caps = NULL;
+
+ yuv_video->display = NULL;
+ yuv_video->device = NULL;
+
+ yuv_video->height = 0;
+ yuv_video->width = 0;
+ yuv_video->format = 0;
+ yuv_video->chroma_type = 0;
+
+ yuv_video->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_element_add_pad (GST_ELEMENT (yuv_video), yuv_video->src);
+
+ yuv_video->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_getcaps_function (yuv_video->sink,
+ gst_vdp_yuv_video_sink_getcaps);
+ gst_pad_set_setcaps_function (yuv_video->sink,
+ gst_vdp_yuv_video_sink_setcaps);
+ gst_pad_set_chain_function (yuv_video->sink, gst_vdp_yuv_video_chain);
+ gst_element_add_pad (GST_ELEMENT (yuv_video), yuv_video->sink);
+ gst_pad_set_active (yuv_video->sink, TRUE);
+}
+
+static void
+gst_vdp_yuv_video_finalize (GObject * object)
+{
+ GstVdpYUVVideo *yuv_video = (GstVdpYUVVideo *) object;
+
+ g_free (yuv_video->display);
+}
+
+static void
+gst_vdp_yuv_video_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_free (yuv_video->display);
+ yuv_video->display = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_yuv_video_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_value_set_string (value, yuv_video->display);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/sys/vdpau/gstvdpyuvvideo.h b/sys/vdpau/gstvdpyuvvideo.h
new file mode 100644
index 00000000..2349e1ba
--- /dev/null
+++ b/sys/vdpau/gstvdpyuvvideo.h
@@ -0,0 +1,62 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_VDP_YUV_VIDEO_H__
+#define __GST_VDP_YUV_VIDEO_H__
+
+#include <gst/gst.h>
+
+#include "gstvdpdevice.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_YUV_VIDEO (gst_vdp_yuv_video_get_type())
+#define GST_VDP_YUV_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_YUV_VIDEO,GstVdpYUVVideo))
+#define GST_VDP_YUV_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_YUV_VIDEO,GstVdpYUVVideoClass))
+#define GST_VDP_YUV_VIDEO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_YUV_VIDEO, GstVdpYUVVideoClass))
+#define GST_IS_VDP_YUV_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_YUV_VIDEO))
+#define GST_IS_VDP_YUV_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_YUV_VIDEO))
+
+typedef struct _GstVdpYUVVideo GstVdpYUVVideo;
+typedef struct _GstVdpYUVVideoClass GstVdpYUVVideoClass;
+
+struct _GstVdpYUVVideo {
+ GstElement element;
+
+ GstPad *src, *sink;
+ GstCaps *sink_caps;
+
+ gchar *display;
+ GstVdpDevice *device;
+
+ guint32 format;
+ gint chroma_type;
+ gint width, height;
+};
+
+struct _GstVdpYUVVideoClass {
+ GstElementClass parent_class;
+};
+
+GType gst_vdp_yuv_video_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_VDP_YUV_VIDEO_H__ */
diff --git a/sys/vdpau/mpegutil.c b/sys/vdpau/mpegutil.c
new file mode 100644
index 00000000..b52ab6f8
--- /dev/null
+++ b/sys/vdpau/mpegutil.c
@@ -0,0 +1,430 @@
+/* GStreamer
+ * Copyright (C) 2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <gst/base/gstbitreader.h>
+#include <string.h>
+
+#include "mpegutil.h"
+
+/* default intra quant matrix, in zig-zag order */
+static const guint8 default_intra_quantizer_matrix[64] = {
+ 8,
+ 16, 16,
+ 19, 16, 19,
+ 22, 22, 22, 22,
+ 22, 22, 26, 24, 26,
+ 27, 27, 27, 26, 26, 26,
+ 26, 27, 27, 27, 29, 29, 29,
+ 34, 34, 34, 29, 29, 29, 27, 27,
+ 29, 29, 32, 32, 34, 34, 37,
+ 38, 37, 35, 35, 34, 35,
+ 38, 38, 40, 40, 40,
+ 48, 48, 46, 46,
+ 56, 56, 58,
+ 69, 69,
+ 83
+};
+
+guint8 mpeg2_scan[64] = {
+ /* Zig-Zag scan pattern */
+ 0, 1, 8, 16, 9, 2, 3, 10,
+ 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34,
+ 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36,
+ 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46,
+ 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+static void
+set_fps_from_code (MPEGSeqHdr * hdr, guint8 fps_code)
+{
+ const gint framerates[][2] = {
+ {30, 1}, {24000, 1001}, {24, 1}, {25, 1},
+ {30000, 1001}, {30, 1}, {50, 1}, {60000, 1001},
+ {60, 1}, {30, 1}
+ };
+
+ if (fps_code < 10) {
+ hdr->fps_n = framerates[fps_code][0];
+ hdr->fps_d = framerates[fps_code][1];
+ } else {
+ /* Force a valid framerate */
+ hdr->fps_n = 30000;
+ hdr->fps_d = 1001;
+ }
+}
+
+/* Set the Pixel Aspect Ratio in our hdr from a DAR code in the data */
+static void
+set_par_from_dar (MPEGSeqHdr * hdr, guint8 asr_code)
+{
+ /* Pixel_width = DAR_width * display_vertical_size */
+ /* Pixel_height = DAR_height * display_horizontal_size */
+ switch (asr_code) {
+ case 0x02: /* 3:4 DAR = 4:3 pixels */
+ hdr->par_w = 4 * hdr->height;
+ hdr->par_h = 3 * hdr->width;
+ break;
+ case 0x03: /* 9:16 DAR */
+ hdr->par_w = 16 * hdr->height;
+ hdr->par_h = 9 * hdr->width;
+ break;
+ case 0x04: /* 1:2.21 DAR */
+ hdr->par_w = 221 * hdr->height;
+ hdr->par_h = 100 * hdr->width;
+ break;
+ case 0x01: /* Square pixels */
+ default:
+ hdr->par_w = hdr->par_h = 1;
+ break;
+ }
+}
+
+gboolean
+mpeg_util_parse_sequence_extension (MPEGSeqExtHdr * hdr, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);;
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* skip extension code */
+ if (!gst_bit_reader_skip (&reader, 4))
+ return FALSE;
+
+ /* skip profile and level escape bit */
+ if (!gst_bit_reader_skip (&reader, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->profile, 3))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->level, 4))
+ return FALSE;
+
+ /* progressive */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->progressive, 1))
+ return FALSE;
+
+ /* chroma format */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->chroma_format, 2))
+ return FALSE;
+
+ /* resolution extension */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->horiz_size_ext, 2))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->vert_size_ext, 2))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->bitrate_ext, 12))
+ return FALSE;
+
+ /* skip to framerate extension */
+ if (!gst_bit_reader_skip (&reader, 9))
+ return FALSE;
+
+ /* framerate extension */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->fps_n_ext, 2))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->fps_d_ext, 2))
+ return FALSE;
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_sequence_hdr (MPEGSeqHdr * hdr, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+ guint8 dar_idx, par_idx;
+ guint8 load_intra_flag, load_non_intra_flag;
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* resolution */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->width, 12))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->height, 12))
+ return FALSE;
+
+ /* aspect ratio */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &dar_idx, 4))
+ return FALSE;
+ set_par_from_dar (hdr, dar_idx);
+
+ /* framerate */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &par_idx, 4))
+ return FALSE;
+ set_fps_from_code (hdr, par_idx);
+
+ /* bitrate */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &hdr->bitrate, 18))
+ return FALSE;
+
+ if (!gst_bit_reader_skip (&reader, 1))
+ return FALSE;
+
+ /* VBV buffer size */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->vbv_buffer, 10))
+ return FALSE;
+
+ /* constrained parameters flag */
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->constrained_parameters_flag, 1))
+ return FALSE;
+
+ /* intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_intra_flag, 1))
+ return FALSE;
+ if (load_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memcpy (hdr->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
+
+ /* non intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_non_intra_flag, 1))
+ return FALSE;
+ if (load_non_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->non_intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memset (hdr->non_intra_quantizer_matrix, 16, 64);
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* temperal sequence number */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->tsn, 10))
+ return FALSE;
+
+ /* frame type */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->pic_type, 3))
+ return FALSE;
+
+ if (hdr->pic_type == 0 || hdr->pic_type > 4)
+ return FALSE; /* Corrupted picture packet */
+
+ /* VBV delay */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->vbv_delay, 16))
+ return FALSE;
+
+ if (hdr->pic_type == P_FRAME || hdr->pic_type == B_FRAME) {
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->full_pel_forward_vector,
+ 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->f_code[0][0], 3))
+ return FALSE;
+ hdr->f_code[0][1] = hdr->f_code[0][0];
+ } else {
+ hdr->full_pel_forward_vector = 0;
+ hdr->f_code[0][0] = hdr->f_code[0][1] = 0;
+ }
+
+ if (hdr->pic_type == B_FRAME) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->full_pel_backward_vector, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->f_code[1][0], 3))
+ return FALSE;
+ hdr->f_code[1][1] = hdr->f_code[1][0];
+ } else {
+ hdr->full_pel_backward_vector = 0;
+ hdr->f_code[1][0] = hdr->f_code[1][1] = 0;
+ }
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_picture_coding_extension (MPEGPictureExt * ext,
+ GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* skip extension code */
+ if (!gst_bit_reader_skip (&reader, 4))
+ return FALSE;
+
+ /* f_code */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[0][0], 4))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[0][1], 4))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[1][0], 4))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[1][1], 4))
+ return FALSE;
+
+ /* intra DC precision */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->intra_dc_precision, 2))
+ return FALSE;
+
+ /* picture structure */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->picture_structure, 2))
+ return FALSE;
+
+ /* top field first */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->top_field_first, 1))
+ return FALSE;
+
+ /* frame pred frame dct */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->frame_pred_frame_dct, 1))
+ return FALSE;
+
+ /* concealment motion vectors */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->concealment_motion_vectors,
+ 1))
+ return FALSE;
+
+ /* q scale type */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->q_scale_type, 1))
+ return FALSE;
+
+ /* intra vlc format */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->intra_vlc_format, 1))
+ return FALSE;
+
+ /* alternate scan */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->alternate_scan, 1))
+ return FALSE;
+
+ /* repeat first field */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->repeat_first_field, 1))
+ return FALSE;
+
+ /* chroma_420_type */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->chroma_420_type, 1))
+ return FALSE;
+
+ /* progressive_frame */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->progressive_frame, 1))
+ return FALSE;
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_gop (MPEGGop * gop, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->drop_frame_flag, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->hour, 5))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->minute, 6))
+ return FALSE;
+
+ /* skip unused bit */
+ if (!gst_bit_reader_skip (&reader, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->second, 6))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->frame, 6))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->closed_gop, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->broken_gop, 1))
+ return FALSE;
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+ guint8 load_intra_flag, load_non_intra_flag;
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* skip extension code */
+ if (!gst_bit_reader_skip (&reader, 4))
+ return FALSE;
+
+ /* intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_intra_flag, 1))
+ return FALSE;
+ if (load_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &qm->intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memcpy (qm->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
+
+ /* non intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_non_intra_flag, 1))
+ return FALSE;
+ if (load_non_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &qm->non_intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memset (qm->non_intra_quantizer_matrix, 16, 64);
+
+ return TRUE;
+}
diff --git a/sys/vdpau/mpegutil.h b/sys/vdpau/mpegutil.h
new file mode 100644
index 00000000..aaaa15f8
--- /dev/null
+++ b/sys/vdpau/mpegutil.h
@@ -0,0 +1,150 @@
+/* GStreamer
+ * Copyright (C) 2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __MPEGUTIL_H__
+#define __MPEGUTIL_H__
+
+#include <gst/gst.h>
+
+typedef struct MPEGSeqHdr MPEGSeqHdr;
+typedef struct MPEGSeqExtHdr MPEGSeqExtHdr;
+typedef struct MPEGPictureHdr MPEGPictureHdr;
+typedef struct MPEGPictureExt MPEGPictureExt;
+typedef struct MPEGGop MPEGGop;
+typedef struct MPEGQuantMatrix MPEGQuantMatrix;
+
+/* Packet ID codes for different packet types we
+ * care about */
+#define MPEG_PACKET_PICTURE 0x00
+#define MPEG_PACKET_SLICE_MIN 0x01
+#define MPEG_PACKET_SLICE_MAX 0xaf
+#define MPEG_PACKET_SEQUENCE 0xb3
+#define MPEG_PACKET_EXTENSION 0xb5
+#define MPEG_PACKET_SEQUENCE_END 0xb7
+#define MPEG_PACKET_GOP 0xb8
+#define MPEG_PACKET_NONE 0xff
+
+/* Extension codes we care about */
+#define MPEG_PACKET_EXT_SEQUENCE 0x01
+#define MPEG_PACKET_EXT_SEQUENCE_DISPLAY 0x02
+#define MPEG_PACKET_EXT_QUANT_MATRIX 0x03
+#define MPEG_PACKET_EXT_PICTURE_CODING 0x08
+
+/* frame types */
+#define I_FRAME 1
+#define P_FRAME 2
+#define B_FRAME 3
+
+struct MPEGSeqHdr
+{
+ /* Pixel-Aspect Ratio from DAR code via set_par_from_dar */
+ guint par_w, par_h;
+ /* Width and Height of the video */
+ guint16 width, height;
+ /* Framerate */
+ guint fps_n, fps_d;
+
+ guint32 bitrate;
+ guint16 vbv_buffer;
+
+ guint8 constrained_parameters_flag;
+
+ guint8 intra_quantizer_matrix[64];
+ guint8 non_intra_quantizer_matrix[64];
+};
+
+struct MPEGSeqExtHdr
+{
+
+ /* mpeg2 decoder profile */
+ guint8 profile;
+ /* mpeg2 decoder level */
+ guint8 level;
+
+ guint8 progressive;
+ guint8 chroma_format;
+
+ guint8 horiz_size_ext, vert_size_ext;
+
+ guint16 bitrate_ext;
+ guint8 fps_n_ext, fps_d_ext;
+
+};
+
+struct MPEGPictureHdr
+{
+ guint16 tsn;
+ guint8 pic_type;
+ guint16 vbv_delay;
+
+ guint8 full_pel_forward_vector, full_pel_backward_vector;
+
+ guint8 f_code[2][2];
+};
+
+struct MPEGPictureExt
+{
+ guint8 f_code[2][2];
+
+ guint8 intra_dc_precision;
+ guint8 picture_structure;
+ guint8 top_field_first;
+ guint8 frame_pred_frame_dct;
+ guint8 concealment_motion_vectors;
+ guint8 q_scale_type;
+ guint8 intra_vlc_format;
+ guint8 alternate_scan;
+ guint8 repeat_first_field;
+ guint8 chroma_420_type;
+ guint8 progressive_frame;
+};
+
+struct MPEGGop
+{
+ guint8 drop_frame_flag;
+
+ guint8 hour, minute, second, frame;
+
+ guint8 closed_gop;
+ guint8 broken_gop;
+};
+
+struct MPEGQuantMatrix
+{
+ guint8 intra_quantizer_matrix[64];
+ guint8 non_intra_quantizer_matrix[64];
+};
+
+gboolean mpeg_util_parse_sequence_hdr (MPEGSeqHdr *hdr, GstBuffer *buffer);
+
+gboolean mpeg_util_parse_sequence_extension (MPEGSeqExtHdr *hdr,
+ GstBuffer *buffer);
+
+gboolean mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, GstBuffer *buffer);
+
+gboolean mpeg_util_parse_picture_coding_extension (MPEGPictureExt *ext,
+ GstBuffer *buffer);
+
+gboolean mpeg_util_parse_gop (MPEGGop * gop, GstBuffer *buffer);
+
+gboolean mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, GstBuffer *buffer);
+
+#endif
+
diff --git a/tests/check/elements/camerabin.c b/tests/check/elements/camerabin.c
index 795fb4f0..cc08f50d 100644
--- a/tests/check/elements/camerabin.c
+++ b/tests/check/elements/camerabin.c
@@ -38,8 +38,6 @@
#define PHOTO_SETTING_DELAY_US 0
static GstElement *camera;
-static GCond *cam_cond;
-static GMutex *cam_mutex;
static GMainLoop *main_loop;
static guint cycle_count = 0;
@@ -52,7 +50,7 @@ make_test_file_name (const gchar * base_name)
g_snprintf (file_name, 999, "%s" G_DIR_SEPARATOR_S "%s",
g_get_tmp_dir (), base_name);
- GST_INFO ("capturing to: %s (cycle_count=%d)", file_name, cycle_count);
+ GST_INFO ("capturing to: %s (cycle: %d)", file_name, cycle_count);
return file_name;
}
@@ -79,6 +77,7 @@ handle_image_captured_cb (gpointer data)
GST_DEBUG ("handle_image_captured_cb, cycle: %d", cycle_count);
if (cycle_count == 0) {
+ GST_DEBUG ("all cycles done");
g_main_loop_quit (loop);
} else {
/* Set video recording mode */
@@ -97,7 +96,7 @@ handle_image_captured_cb (gpointer data)
g_signal_emit_by_name (camera, "user-start", NULL);
cycle_count--;
- GST_DEBUG ("next cycle");
+ GST_DEBUG ("next cycle: %d", cycle_count);
}
GST_DEBUG ("handle_image_captured_cb done");
return FALSE;
@@ -110,7 +109,7 @@ capture_done (GstElement * elem, const gchar * filename, gpointer user_data)
g_idle_add ((GSourceFunc) handle_image_captured_cb, loop);
- GST_DEBUG ("image saved");
+ GST_INFO ("image saved");
return FALSE;
}
@@ -160,6 +159,10 @@ capture_bus_cb (GstBus * bus, GstMessage * message, gpointer data)
GST_WARNING ("ERROR: %s [%s]", err->message, debug);
g_error_free (err);
g_free (debug);
+ /* Write debug graph to file */
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camera),
+ GST_DEBUG_GRAPH_SHOW_ALL, "camerabin.error");
+
fail_if (TRUE, "error while capturing");
g_main_loop_quit (loop);
break;
@@ -172,6 +175,9 @@ capture_bus_cb (GstBus * bus, GstMessage * message, gpointer data)
GST_WARNING ("WARNING: %s [%s]", err->message, debug);
g_error_free (err);
g_free (debug);
+ /* Write debug graph to file */
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camera),
+ GST_DEBUG_GRAPH_SHOW_ALL, "camerabin.warning");
break;
}
case GST_MESSAGE_EOS:
@@ -181,7 +187,7 @@ capture_bus_cb (GstBus * bus, GstMessage * message, gpointer data)
default:
st = gst_message_get_structure (message);
if (st && gst_structure_has_name (st, "image-captured")) {
- GST_INFO ("image-captured");
+ GST_INFO ("image captured");
}
break;
}
@@ -200,9 +206,6 @@ setup (void)
main_loop = g_main_loop_new (NULL, TRUE);
- cam_cond = g_cond_new ();
- cam_mutex = g_mutex_new ();
-
camera = gst_check_setup_element ("camerabin");
setup_camerabin_elements (camera);
@@ -242,8 +245,6 @@ setup (void)
static void
teardown (void)
{
- g_mutex_free (cam_mutex);
- g_cond_free (cam_cond);
if (camera)
gst_check_teardown_element (camera);
diff --git a/tests/examples/Makefile.am b/tests/examples/Makefile.am
index e68204e3..45d94378 100644
--- a/tests/examples/Makefile.am
+++ b/tests/examples/Makefile.am
@@ -1,5 +1,5 @@
if HAVE_GTK
-GTK_EXAMPLES=scaletempo mxf
+GTK_EXAMPLES=camerabin mxf scaletempo
else
GTK_EXAMPLES=
endif
@@ -10,5 +10,5 @@ else
DIRECTFB_DIR=
endif
-SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) switch shapewipe
-DIST_SUBDIRS= directfb switch scaletempo shapewipe mxf
+SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) shapewipe switch
+DIST_SUBDIRS= directfb mxf scaletempo shapewipe switch
diff --git a/tests/examples/camerabin/.gitignore b/tests/examples/camerabin/.gitignore
new file mode 100644
index 00000000..eaedc096
--- /dev/null
+++ b/tests/examples/camerabin/.gitignore
@@ -0,0 +1,3 @@
+gst-camera
+gst-camera-perf
+
diff --git a/tests/examples/camerabin/Makefile.am b/tests/examples/camerabin/Makefile.am
new file mode 100644
index 00000000..df88cf14
--- /dev/null
+++ b/tests/examples/camerabin/Makefile.am
@@ -0,0 +1,39 @@
+GST_CAMERABIN_GLADE_FILES = gst-camera.glade
+
+if HAVE_GLADE
+if HAVE_GTK
+
+GST_CAMERABIN_GTK_EXAMPLES = gst-camera
+
+gst_camera_SOURCES = gst-camera.c
+gst_camera_CFLAGS = \
+ -I$(top_builddir)/gst-libs \
+ $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(GLADE_CFLAGS) \
+ -DGST_USE_UNSTABLE_API
+gst_camera_LDADD = \
+ $(top_builddir)/gst-libs/gst/interfaces/libgstphotography-@GST_MAJORMINOR@.la \
+ $(GST_PLUGINS_BASE_LIBS) \
+ -lgstinterfaces-@GST_MAJORMINOR@ \
+ $(GST_LIBS) \
+ $(GLADE_LIBS)
+
+gladedir = $(datadir)/gstreamer-@GST_MAJORMINOR@/camera-apps
+glade_DATA = $(GST_CAMERABIN_GLADE_FILES)
+
+INCLUDES = -DCAMERA_APPS_GLADEDIR=\""$(gladedir)"\"
+
+else
+GST_CAMERABIN_GTK_EXAMPLES =
+endif
+else
+GST_CAMERABIN_GTK_EXAMPLES =
+endif
+
+gst_camera_perf_SOURCES = gst-camera-perf.c
+gst_camera_perf_CFLAGS = $(GST_CFLAGS)
+gst_camera_perf_LDADD = $(GST_LIBS)
+
+bin_PROGRAMS = gst-camera-perf $(GST_CAMERABIN_GTK_EXAMPLES)
+
+EXTRA_DIST = $(GST_CAMERABIN_GLADE_FILES)
+
diff --git a/tests/examples/camerabin/gst-camera-perf.c b/tests/examples/camerabin/gst-camera-perf.c
new file mode 100644
index 00000000..ece3d935
--- /dev/null
+++ b/tests/examples/camerabin/gst-camera-perf.c
@@ -0,0 +1,726 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Nokia Corporation <multimedia@maemo.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+/*
+ * This application runs various tests and messures how long it takes.
+ * FIXME: It needs to figure sane defaults for different hardware or support
+ * we could use GOption for specifying the parameters
+ * The config should have:
+ * - target times
+ * - filter-caps
+ * - preview-caps
+ * - user-res-fps
+ * - element-names: videoenc, audioenc, videomux, imageenc, videosrc, audiosrc
+ * Most of it is interpreted in setup_pipeline()
+ *
+ * gcc `pkg-config --cflags --libs gstreamer-0.10` gst-camera-perf.c -ogst-camera-perf
+ *
+ * plain linux:
+ * ./gst-camera-perf --src-colorspace=YUY2 --image-width=320 --image-height=240 --view-framerate-num=15 --view-framerate-den=1
+ *
+ * maemo:
+ * ./gst-camera-perf --src-colorspace=UYVY --image-width=640 --image-height=480 --view-framerate-num=1491 --view-framerate-den=100 --video-src=v4l2camsrc --audio-enc=nokiaaacenc --video-enc=omx_mpeg4enc --video-mux=hantromp4mux
+ * ./gst-camera-perf --src-colorspace=UYVY --image-width=640 --image-height=480 --view-framerate-num=2999 --view-framerate-den=100 --video-src=v4l2camsrc --audio-enc=nokiaaacenc --video-enc=omx_mpeg4enc --video-mux=hantromp4mux
+ * ./gst-camera-perf --src-colorspace=UYVY --image-width=2592 --image-height=1968 --view-framerate-num=399 --view-framerate-den=100 --video-src=v4l2camsrc --audio-enc=nokiaaacenc --video-enc=omx_mpeg4enc --video-mux=hantromp4mux
+ * ./gst-camera-perf --src-colorspace=UYVY --image-width=2592 --image-height=1968 --view-framerate-num=325 --view-framerate-den=25 --video-src=v4l2camsrc --audio-enc=nokiaaacenc --video-enc=omx_mpeg4enc --video-mux=hantromp4mux --image-enc=dspjpegenc
+ */
+
+/*
+ * Includes
+ */
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <string.h>
+#include <sys/time.h>
+#include <time.h>
+
+/*
+ * enums, typedefs and defines
+ */
+
+#define GET_TIME(t) \
+do { \
+ t = gst_util_get_timestamp (); \
+ GST_INFO("%2d ----------------------------------------", test_ix); \
+} while(0)
+
+#define DIFF_TIME(e,s,d) d=GST_CLOCK_DIFF(s,e)
+
+#define CONT_SHOTS 10
+#define TEST_CASES 9
+
+typedef struct _ResultType
+{
+ GstClockTime avg;
+ GstClockTime min;
+ GstClockTime max;
+ guint32 times;
+} ResultType;
+
+/*
+ * Global vars
+ */
+static GstElement *camera_bin = NULL;
+static GMainLoop *loop = NULL;
+
+/* commandline options */
+static gchar *audiosrc_name = NULL;
+static gchar *videosrc_name = NULL;
+static gchar *audioenc_name = NULL;
+static gchar *videoenc_name = NULL;
+static gchar *imageenc_name = NULL;
+static gchar *videomux_name = NULL;
+static gchar *src_csp = NULL;
+static gint image_width = 0;
+static gint image_height = 0;
+static gint view_framerate_num = 0;
+static gint view_framerate_den = 0;
+
+/* test configuration for common callbacks */
+static GString *filename = NULL;
+static guint32 num_pics = 0;
+static guint32 num_pics_cont = 0;
+//static guint32 num_vids = 0;
+static guint test_ix = 0;
+static gboolean signal_sink = FALSE;
+static gboolean signal_shot = FALSE;
+static gboolean signal_cont = FALSE;
+//static gboolean signal_save = FALSE;
+
+/* time samples and test results */
+static GstClockTime t_initial = G_GUINT64_CONSTANT (0);
+static GstClockTime t_final[CONT_SHOTS] = { G_GUINT64_CONSTANT (0), };
+
+static GstClockTimeDiff diff;
+static ResultType result;
+
+static const GstClockTime target[TEST_CASES] = {
+ 1000 * GST_MSECOND,
+ 0, /* 1500 * GST_MSECOND, not tested */
+ 1500 * GST_MSECOND,
+ 2000 * GST_MSECOND, /* this should be shorter, as we can take next picture before preview is ready */
+ 500 * GST_MSECOND,
+ 0, /* 2000 * GST_MSECOND, not tested */
+ 3500 * GST_MSECOND,
+ 1000 * GST_MSECOND,
+ 0 /* 1000 * GST_MSECOND, not tested */
+};
+
+static const gchar *test_names[TEST_CASES] = {
+ "Camera OFF to VF on",
+ "(3A latency)",
+ "Shot to snapshot",
+ "Shot to shot",
+ "Serial shooting",
+ "(Shutter lag)",
+ "Image saved",
+ "Mode change",
+ "(Video recording)"
+};
+
+/*
+ * Prototypes
+ */
+
+static void print_result (void);
+static gboolean run_test (gpointer user_data);
+
+/*
+ * Callbacks
+ */
+
+static gboolean
+img_sink_has_buffer (GstPad * pad, GstBuffer * buf, gpointer user_data)
+{
+ if (signal_sink) {
+ signal_sink = FALSE;
+ GET_TIME (t_final[0]);
+ }
+ return TRUE;
+}
+
+static gboolean
+img_capture_done (GstElement * camera, GString * fname, gpointer user_data)
+{
+ gboolean ret = FALSE;
+ gboolean print_and_restart = FALSE;
+
+ GST_INFO ("shot %d, cont %d, num %d", signal_shot, signal_cont,
+ num_pics_cont);
+
+ if (signal_shot) {
+ GET_TIME (t_final[num_pics_cont]);
+ signal_shot = FALSE;
+ switch (test_ix) {
+ case 6:
+ DIFF_TIME (t_final[num_pics_cont], t_initial, diff);
+ result.avg = result.min = result.max = diff;
+ print_and_restart = TRUE;
+ break;
+ }
+ GST_INFO ("%2d shot done", test_ix);
+ }
+
+ if (signal_cont) {
+ gint i;
+
+ if (num_pics_cont < CONT_SHOTS) {
+ gchar tmp[6];
+
+ GET_TIME (t_final[num_pics_cont]);
+ num_pics_cont++;
+ for (i = filename->len - 1; i > 0; --i) {
+ if (filename->str[i] == '_')
+ break;
+ }
+ snprintf (tmp, 6, "_%04d", num_pics_cont);
+ memcpy (filename->str + i, tmp, 5);
+ GST_INFO ("%2d cont new filename '%s'", test_ix, filename->str);
+ g_object_set (camera_bin, "filename", filename->str, NULL);
+ // FIXME: is burst capture broken? new filename and return TRUE should be enough
+ g_signal_emit_by_name (camera_bin, "user-start", NULL);
+ ret = TRUE;
+ } else {
+ GstClockTime max = 0;
+ GstClockTime min = -1;
+ GstClockTime total = 0;
+ GstClockTime first_shot = 0;
+ GstClockTime snd_shot = 0;
+
+ num_pics_cont = 0;
+ signal_cont = FALSE;
+
+ DIFF_TIME (t_final[0], t_initial, diff);
+ max < diff ? max = diff : max;
+ min > diff ? min = diff : min;
+ first_shot = diff;
+ total += diff;
+
+ DIFF_TIME (t_final[1], t_final[0], diff);
+ max < diff ? max = diff : max;
+ min > diff ? min = diff : min;
+ snd_shot = diff;
+ total += diff;
+
+ for (i = 2; i < CONT_SHOTS; ++i) {
+ DIFF_TIME (t_final[i], t_final[i - 1], diff);
+
+ max < diff ? max = diff : max;
+ min > diff ? min = diff : min;
+ total += diff;
+ }
+
+ result.avg = total / CONT_SHOTS;
+ result.min = min;
+ result.max = max;
+ print_and_restart = TRUE;
+ GST_INFO ("%2d cont done", test_ix);
+ }
+ }
+
+ switch (test_ix) {
+ case 2:
+ case 3:
+ print_and_restart = TRUE;
+ break;
+ }
+
+ if (print_and_restart) {
+ print_result ();
+ g_idle_add ((GSourceFunc) run_test, NULL);
+ return FALSE;
+ }
+ return ret;
+}
+
+static gboolean
+bus_callback (GstBus * bus, GstMessage * message, gpointer data)
+{
+ const GstStructure *st;
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ERROR:{
+ GError *err;
+ gchar *debug;
+
+ gst_message_parse_error (message, &err, &debug);
+ g_print ("Error: %s\n", err->message);
+ g_error_free (err);
+ g_free (debug);
+
+ /* Write debug graph to file */
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (camera_bin),
+ GST_DEBUG_GRAPH_SHOW_ALL, "camerabin.error");
+
+ g_main_loop_quit (loop);
+ break;
+ }
+ case GST_MESSAGE_EOS:
+ /* end-of-stream */
+ g_main_loop_quit (loop);
+ break;
+ default:
+ st = gst_message_get_structure (message);
+ if (st) {
+ if (gst_structure_has_name (st, "image-captured")) {
+ GST_INFO ("%2d image-captured", test_ix);
+ switch (test_ix) {
+ case 3:
+ GET_TIME (t_final[num_pics_cont]);
+ DIFF_TIME (t_final[num_pics_cont], t_initial, diff);
+ result.avg = result.min = result.max = diff;
+ break;
+ }
+ } else if (gst_structure_has_name (st, "preview-image")) {
+ GST_INFO ("%2d preview-image", test_ix);
+ switch (test_ix) {
+ case 2:
+ GET_TIME (t_final[num_pics_cont]);
+ DIFF_TIME (t_final[num_pics_cont], t_initial, diff);
+ result.avg = result.min = result.max = diff;
+ break;
+ }
+ }
+ }
+ /* unhandled message */
+ break;
+ }
+ return TRUE;
+}
+
+
+/*
+ * Helpers
+ */
+
+static void
+cleanup_pipeline (void)
+{
+ if (camera_bin) {
+ gst_element_set_state (camera_bin, GST_STATE_NULL);
+ gst_element_get_state (camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
+ gst_object_unref (camera_bin);
+ camera_bin = NULL;
+ }
+}
+
+static gboolean
+setup_pipeline_video_sink (void)
+{
+ GstElement *sink = NULL;
+ GstPad *pad = NULL;
+
+ sink = gst_element_factory_make ("fakesink", NULL);
+ if (NULL == sink) {
+ g_warning ("failed to create sink\n");
+ goto error;
+ }
+
+ pad = gst_element_get_static_pad (sink, "sink");
+ if (NULL == pad) {
+ g_warning ("sink has no pad named 'sink'\n");
+ goto error;
+ }
+
+ g_object_set (sink, "sync", TRUE, NULL);
+ gst_pad_add_buffer_probe (pad, (GCallback) img_sink_has_buffer, NULL);
+ gst_object_unref (pad);
+
+ g_object_set (camera_bin, "vfsink", sink, NULL);
+
+ return TRUE;
+error:
+ if (sink)
+ gst_object_unref (sink);
+ return FALSE;
+}
+
+static gboolean
+setup_pipeline_element (const gchar * property_name, const gchar * element_name)
+{
+ gboolean res = TRUE;
+
+ GstElement *elem;
+ if (element_name) {
+ elem = gst_element_factory_make (element_name, NULL);
+ if (elem) {
+ g_object_set (camera_bin, property_name, elem, NULL);
+ } else {
+ g_warning ("can't create element '%s' for property '%s'", element_name,
+ property_name);
+ res = FALSE;
+ }
+ }
+ return res;
+}
+
+static gboolean
+setup_pipeline (void)
+{
+ GstBus *bus;
+ gboolean res = TRUE;
+
+ g_string_printf (filename, "test_%04u.jpg", num_pics);
+
+ camera_bin = gst_element_factory_make ("camerabin", NULL);
+ if (NULL == camera_bin) {
+ g_warning ("can't create camerabin element\n");
+ goto error;
+ }
+
+ g_signal_connect (camera_bin, "img-done", (GCallback) img_capture_done, NULL);
+
+ bus = gst_pipeline_get_bus (GST_PIPELINE (camera_bin));
+ gst_bus_add_watch (bus, bus_callback, NULL);
+ gst_object_unref (bus);
+
+ if (!setup_pipeline_video_sink ()) {
+ goto error;
+ }
+
+ /* set properties */
+
+ if (src_csp && strlen (src_csp) == 4) {
+ GstCaps *filter_caps;
+
+ /* FIXME: why do we need to set this? */
+ filter_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC,
+ GST_MAKE_FOURCC (src_csp[0], src_csp[1], src_csp[2], src_csp[3]), NULL);
+ if (filter_caps) {
+ g_object_set (camera_bin, "filename", filename->str,
+ "filter-caps", filter_caps, NULL);
+ gst_caps_unref (filter_caps);
+ } else {
+ g_warning ("can't make filter-caps with format=%s\n", src_csp);
+ goto error;
+ }
+ }
+
+ /* configure used elements */
+ res &= setup_pipeline_element ("audiosrc", audiosrc_name);
+ res &= setup_pipeline_element ("videosrc", videosrc_name);
+ res &= setup_pipeline_element ("audioenc", audioenc_name);
+ res &= setup_pipeline_element ("videoenc", videoenc_name);
+ res &= setup_pipeline_element ("imageenc", imageenc_name);
+ res &= setup_pipeline_element ("videomux", videomux_name);
+ if (!res) {
+ goto error;
+ }
+
+ /* configure a resolution and framerate */
+ if (image_width && image_height && view_framerate_num && view_framerate_den) {
+ g_signal_emit_by_name (camera_bin, "user-res-fps", image_width,
+ image_height, view_framerate_num, view_framerate_den, NULL);
+ }
+
+ if (GST_STATE_CHANGE_FAILURE ==
+ gst_element_set_state (camera_bin, GST_STATE_READY)) {
+ g_warning ("can't set camerabin to ready\n");
+ goto error;
+ }
+
+ if (GST_STATE_CHANGE_FAILURE ==
+ gst_element_set_state (camera_bin, GST_STATE_PLAYING)) {
+ g_warning ("can't set camerabin to playing\n");
+ goto error;
+ }
+ return TRUE;
+error:
+ cleanup_pipeline ();
+ return FALSE;
+}
+
+/*
+ * Tests
+ */
+
+/* 01) Camera OFF to VF On
+ *
+ * This only tests the time it takes to create the pipeline and CameraBin
+ * element and have the first video frame available in ViewFinder.
+ * It is not testing the real init time. To do it, the timer must start before
+ * the app.
+ */
+static gboolean
+test_01 (void)
+{
+ GET_TIME (t_initial);
+ if (setup_pipeline ()) {
+ /* MAKE SURE THE PIPELINE IS IN PLAYING STATE BEFORE START TAKING PICTURES
+ AND SO ON (otherwise it will deadlock) */
+ gst_element_get_state (camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
+ }
+
+ GET_TIME (t_final[0]);
+ DIFF_TIME (t_final[0], t_initial, diff);
+
+ result.avg = result.min = result.max = diff;
+ result.times = 1;
+ return TRUE;
+}
+
+
+/* 03) Shot to snapshot
+ *
+ * It tests the time between pressing the Shot button and having the photo shown
+ * in ViewFinder
+ */
+static gboolean
+test_03 (void)
+{
+ GstCaps *snap_caps;
+
+ /* FIXME: add options */
+ snap_caps = gst_caps_from_string ("video/x-raw-rgb,width=320,height=240");
+ g_object_set (camera_bin, "preview-caps", snap_caps, NULL);
+ gst_caps_unref (snap_caps);
+
+ GET_TIME (t_initial);
+ g_signal_emit_by_name (camera_bin, "user-start", 0);
+
+ /* the actual results are fetched in bus_callback::preview-image */
+ result.times = 1;
+ return FALSE;
+}
+
+
+/* 04) Shot to shot
+ * It tests the time for being able to take a second shot after the first one.
+ */
+static gboolean
+test_04 (void)
+{
+ GET_TIME (t_initial);
+ g_signal_emit_by_name (camera_bin, "user-start", 0);
+
+ /* the actual results are fetched in bus_callback::image-captured */
+ result.times = 1;
+ return FALSE;
+}
+
+/* 05) Serial shooting
+ *
+ * It tests the time between shots in continuous mode.
+ */
+static gboolean
+test_05 (void)
+{
+ signal_cont = TRUE;
+ GET_TIME (t_initial);
+ g_signal_emit_by_name (camera_bin, "user-start", 0);
+
+ /* the actual results are fetched in img_capture_done */
+ result.times = CONT_SHOTS;
+ return FALSE;
+}
+
+
+/* 07) Image saved
+ *
+ * It tests the time between pressing the Shot and the final image is saved to
+ * file system.
+ */
+static gboolean
+test_07 (void)
+{
+ // signal_save = TRUE;
+ signal_shot = TRUE;
+
+ GET_TIME (t_initial);
+ g_signal_emit_by_name (camera_bin, "user-start", 0);
+ /* call "user-stop" just to go back to initial state (view-finder) again */
+ g_signal_emit_by_name (camera_bin, "user-stop", 0);
+ /* the actual results are fetched in img_capture_done */
+ result.times = 1;
+ return FALSE;
+}
+
+
+/* 08) Mode change
+ *
+ * It tests the time it takes to change between still image and video recording
+ * mode (In this test we change the mode few times).
+ */
+static gboolean
+test_08 (void)
+{
+ GstClockTime total = 0;
+ GstClockTime max = 0;
+ GstClockTime min = -1;
+ const gint count = 6;
+ gint i;
+
+ for (i = 0; i < count; ++i) {
+ GET_TIME (t_final[i]);
+ g_object_set (camera_bin, "mode", (i + 1) & 1, NULL);
+ GET_TIME (t_final[i + 1]);
+ }
+
+ for (i = 0; i < count; ++i) {
+ DIFF_TIME (t_final[i + 1], t_final[i], diff);
+ total += diff;
+ if (diff > max)
+ max = diff;
+ if (diff < min)
+ min = diff;
+ }
+
+ result.avg = total / count;
+ result.min = min;
+ result.max = max;
+ result.times = count;
+
+ /* just make sure we are back to still image mode again */
+ g_object_set (camera_bin, "mode", 0, NULL);
+ return TRUE;
+}
+
+typedef gboolean (*test_case) (void);
+static test_case test_cases[TEST_CASES] = {
+ test_01,
+ NULL,
+ test_03,
+ test_04,
+ test_05,
+ NULL,
+ test_07,
+ test_08
+};
+
+static void
+print_result (void)
+{
+ printf ("| %6.02f%% ", 100.0f * (float) result.max / (float) target[test_ix]);
+ printf ("|%5u ms ", (guint) GST_TIME_AS_MSECONDS (target[test_ix]));
+ printf ("|%5u ms ", (guint) GST_TIME_AS_MSECONDS (result.avg));
+ printf ("|%5u ms ", (guint) GST_TIME_AS_MSECONDS (result.min));
+ printf ("|%5u ms ", (guint) GST_TIME_AS_MSECONDS (result.max));
+ printf ("| %3d ", result.times);
+ printf ("| %-19s |\n", test_names[test_ix]);
+ test_ix++;
+}
+
+static gboolean
+run_test (gpointer user_data)
+{
+ gboolean ret = TRUE;
+
+ printf ("| %02d ", test_ix + 1);
+ if (test_cases[test_ix]) {
+ memset (&result, 0, sizeof (ResultType));
+ ret = test_cases[test_ix] ();
+
+ //while (g_main_context_pending (NULL)) g_main_context_iteration (NULL,FALSE);
+ if (ret) {
+ print_result ();
+ }
+
+ } else {
+ printf ("| test not implemented ");
+ printf ("| %-19s |\n", test_names[test_ix]);
+ test_ix++;
+ }
+
+ if (!camera_bin || test_ix == TEST_CASES) {
+ GST_INFO ("done");
+ g_main_loop_quit (loop);
+ return FALSE;
+ } else {
+ GST_INFO ("%2d result: %d", test_ix, ret);
+ return ret;
+ }
+}
+
+int
+main (int argc, char *argv[])
+{
+ GOptionEntry options[] = {
+ {"audio-src", '\0', 0, G_OPTION_ARG_STRING, &audiosrc_name,
+ "audio source used in video recording", NULL},
+ {"video-src", '\0', 0, G_OPTION_ARG_STRING, &videosrc_name,
+ "video source used in still capture and video recording", NULL},
+ {"audio-enc", '\0', 0, G_OPTION_ARG_STRING, &audioenc_name,
+ "audio encoder used in video recording", NULL},
+ {"video-enc", '\0', 0, G_OPTION_ARG_STRING, &videoenc_name,
+ "video encoder used in video recording", NULL},
+ {"image-enc", '\0', 0, G_OPTION_ARG_STRING, &imageenc_name,
+ "image encoder used in still capture", NULL},
+ {"video-mux", '\0', 0, G_OPTION_ARG_STRING, &videomux_name,
+ "muxer used in video recording", NULL},
+ {"image-width", '\0', 0, G_OPTION_ARG_INT, &image_width,
+ "width for image capture", NULL},
+ {"image-height", '\0', 0, G_OPTION_ARG_INT, &image_height,
+ "height for image capture", NULL},
+ {"view-framerate-num", '\0', 0, G_OPTION_ARG_INT, &view_framerate_num,
+ "framerate numerator for viewfinder", NULL},
+ {"view-framerate-den", '\0', 0, G_OPTION_ARG_INT, &view_framerate_den,
+ "framerate denominator for viewfinder", NULL},
+ {"src-colorspace", '\0', 0, G_OPTION_ARG_STRING, &src_csp,
+ "colorspace format for videosource (e.g. YUY2, UYVY)", NULL},
+ {NULL}
+ };
+ GOptionContext *ctx;
+ GError *err = NULL;
+
+ if (!g_thread_supported ())
+ g_thread_init (NULL);
+
+ ctx = g_option_context_new (NULL);
+ g_option_context_add_main_entries (ctx, options, NULL);
+ g_option_context_add_group (ctx, gst_init_get_option_group ());
+ if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
+ g_print ("Error initializing: %s\n", err->message);
+ exit (1);
+ }
+ g_option_context_free (ctx);
+
+ /* init */
+ filename = g_string_new_len ("", 16);
+ loop = g_main_loop_new (NULL, FALSE);
+
+ /* run */
+ puts ("");
+ puts ("+---------------------------------------------------------------------------------------+");
+ puts ("| test | rate | target | avg | min | max | trials | description |");
+ puts ("+---------------------------------------------------------------------------------------+");
+ g_idle_add ((GSourceFunc) run_test, NULL);
+ g_main_loop_run (loop);
+ puts ("+---------------------------------------------------------------------------------------+");
+ puts ("");
+
+ fflush (stdout);
+
+ /* free */
+ cleanup_pipeline ();
+ g_main_loop_unref (loop);
+ g_string_free (filename, TRUE);
+ g_free (audiosrc_name);
+ g_free (videosrc_name);
+ g_free (audioenc_name);
+ g_free (videoenc_name);
+ g_free (imageenc_name);
+ g_free (videomux_name);
+ g_free (src_csp);
+
+ return 0;
+}
diff --git a/tests/examples/camerabin/gst-camera-perf.glade b/tests/examples/camerabin/gst-camera-perf.glade
new file mode 100644
index 00000000..fe6098ec
--- /dev/null
+++ b/tests/examples/camerabin/gst-camera-perf.glade
@@ -0,0 +1,120 @@
+<?xml version="1.0" standalone="no"?> <!--*- mode: xml -*-->
+<!DOCTYPE glade-interface SYSTEM "http://glade.gnome.org/glade-2.0.dtd">
+
+<glade-interface>
+
+<widget class="GtkWindow" id="wndMain">
+ <property name="events">GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK</property>
+ <property name="title" translatable="yes"></property>
+ <property name="type">GTK_WINDOW_TOPLEVEL</property>
+ <property name="window_position">GTK_WIN_POS_NONE</property>
+ <property name="modal">False</property>
+ <property name="default_width">400</property>
+ <property name="default_height">600</property>
+ <property name="resizable">True</property>
+ <property name="destroy_with_parent">False</property>
+ <property name="decorated">True</property>
+ <property name="skip_taskbar_hint">False</property>
+ <property name="skip_pager_hint">False</property>
+ <property name="type_hint">GDK_WINDOW_TYPE_HINT_NORMAL</property>
+ <property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
+ <property name="focus_on_map">True</property>
+ <property name="urgency_hint">False</property>
+ <signal name="delete_event" handler="on_wndMain_delete_event"/>
+
+ <child>
+ <widget class="GtkVPaned" id="vpnMain">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="position">200</property>
+
+ <child>
+ <widget class="GtkDrawingArea" id="daMain">
+ <property name="height_request">100</property>
+ <property name="visible">True</property>
+ <property name="events">GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK</property>
+ </widget>
+ <packing>
+ <property name="shrink">True</property>
+ <property name="resize">False</property>
+ </packing>
+ </child>
+
+ <child>
+ <widget class="GtkVBox" id="vboxMain">
+ <property name="visible">True</property>
+ <property name="homogeneous">False</property>
+ <property name="spacing">0</property>
+
+ <child>
+ <widget class="GtkButton" id="btnStart">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">start</property>
+ <property name="use_underline">True</property>
+ <property name="relief">GTK_RELIEF_NORMAL</property>
+ <property name="focus_on_click">True</property>
+ <signal name="clicked" handler="on_btnStart_clicked"/>
+ </widget>
+ <packing>
+ <property name="padding">0</property>
+ <property name="expand">False</property>
+ <property name="fill">True</property>
+ </packing>
+ </child>
+
+ <child>
+ <widget class="GtkScrolledWindow" id="scrwndMain">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="hscrollbar_policy">GTK_POLICY_NEVER</property>
+ <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+ <property name="shadow_type">GTK_SHADOW_NONE</property>
+ <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+ <child>
+ <widget class="GtkViewport" id="vpMain">
+ <property name="visible">True</property>
+ <property name="events">GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK</property>
+ <property name="shadow_type">GTK_SHADOW_IN</property>
+
+ <child>
+ <widget class="GtkLabel" id="lbMain">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">== Please wait few seconds after press start ==</property>
+ <property name="use_underline">False</property>
+ <property name="use_markup">True</property>
+ <property name="justify">GTK_JUSTIFY_LEFT</property>
+ <property name="wrap">True</property>
+ <property name="selectable">True</property>
+ <property name="xalign">0.5</property>
+ <property name="yalign">0</property>
+ <property name="xpad">0</property>
+ <property name="ypad">0</property>
+ <property name="ellipsize">PANGO_ELLIPSIZE_NONE</property>
+ <property name="width_chars">-1</property>
+ <property name="single_line_mode">False</property>
+ <property name="angle">0</property>
+ </widget>
+ </child>
+ </widget>
+ </child>
+ </widget>
+ <packing>
+ <property name="padding">0</property>
+ <property name="expand">True</property>
+ <property name="fill">True</property>
+ </packing>
+ </child>
+ </widget>
+ <packing>
+ <property name="shrink">True</property>
+ <property name="resize">True</property>
+ </packing>
+ </child>
+ </widget>
+ </child>
+</widget>
+
+</glade-interface>
diff --git a/tests/examples/camerabin/gst-camera.c b/tests/examples/camerabin/gst-camera.c
new file mode 100644
index 00000000..f894418c
--- /dev/null
+++ b/tests/examples/camerabin/gst-camera.c
@@ -0,0 +1,1756 @@
+/*
+ * GStreamer
+ * Copyright (C) 2008 Nokia Corporation <multimedia@maemo.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+/*
+ * This is a demo application to test the camerabin element.
+ * If you have question don't hesitate in contact me edgard.lima@indt.org.br
+ */
+
+/*
+ * Includes
+ */
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/interfaces/xoverlay.h>
+#include <gst/interfaces/colorbalance.h>
+#include <gst/interfaces/photography.h>
+#include <glade/glade-xml.h>
+#include <gtk/gtk.h>
+#include <gdk/gdkx.h>
+#include <gdk/gdkkeysyms.h>
+
+#include <string.h>
+
+#include <sys/time.h>
+#include <time.h>
+#include <glib/gstdio.h> // g_fopen()
+
+/*
+ * enums, typedefs and defines
+ */
+
+#ifdef USE_MP4
+#define VID_FILE_EXT "mp4"
+#else
+#define VID_FILE_EXT "ogg"
+#endif
+
+#define PREVIEW_TIME_MS (2 * 1000)
+#define N_BURST_IMAGES 10
+#define DEFAULT_GLADE_FILE "gst-camera.glade"
+#define SHARED_GLADE_FILE CAMERA_APPS_GLADEDIR"/"DEFAULT_GLADE_FILE
+
+/* Names of default elements */
+#define CAMERA_APP_VIDEOSRC "v4l2src"
+#define CAMERA_APP_IMAGE_POSTPROC "dummy"
+
+#ifdef HAVE_GST_PHOTO_IFACE_H
+#define EV_COMP_MAX 3.0
+#define EV_COMP_MIN -3.0
+#define EV_COMP_STEP 0.5
+#endif
+
+#define DEFAULT_VF_CAPS \
+ "video/x-raw-yuv, width = (int) 320, height = (int) 240, framerate = (fraction) 1496/100;" \
+ "video/x-raw-yuv, width = (int) 640, height = (int) 480, framerate = (fraction) 1494/100;" \
+ "video/x-raw-yuv, width = (int) 800, height = (int) 480, framerate = (fraction) 2503/100;" \
+ "video/x-raw-yuv, width = (int) 800, height = (int) 480, framerate = (fraction) 2988/100;" \
+ "video/x-raw-yuv, width = (int) 800, height = (int) 480, framerate = (fraction) 1494/100;" \
+ "video/x-raw-yuv, width = (int) 720, height = (int) 480, framerate = (fraction) 1494/100"
+
+#define PREVIEW_CAPS \
+ "video/x-raw-rgb, width = (int) 640, height = (int) 480"
+
+/* states:
+ (image) <---> (video_stopped) <---> (video_recording)
+*/
+typedef enum _tag_CaptureState
+{
+ CAP_STATE_IMAGE,
+ CAP_STATE_VIDEO_STOPED,
+ CAP_STATE_VIDEO_PAUSED,
+ CAP_STATE_VIDEO_RECORDING,
+} CaptureState;
+
+/*
+ * Global Vars
+ */
+
+static GladeXML *ui_glade_xml = NULL;
+static GtkWidget *ui_main_window = NULL;
+static GtkWidget *ui_drawing = NULL;
+static GtkWidget *ui_drawing_frame = NULL;
+static GtkWidget *ui_chk_continous = NULL;
+static GtkButton *ui_bnt_shot = NULL;
+static GtkButton *ui_bnt_pause = NULL;
+static GtkWidget *ui_chk_mute = NULL;
+static GtkWidget *ui_vbox_color_controls = NULL;
+static GtkWidget *ui_chk_rawmsg = NULL;
+
+static GtkWidget *ui_rdbntImageCapture = NULL;
+static GtkWidget *ui_rdbntVideoCapture = NULL;
+static GtkWidget *ui_menuitem_photography = NULL;
+static GtkWidget *ui_menuitem_capture = NULL;
+
+static GtkComboBox *ui_cbbox_resolution = NULL;
+static guint ui_cbbox_resolution_count = 0;
+
+static CaptureState capture_state = CAP_STATE_IMAGE;
+
+static GstElement *gst_camera_bin = NULL;
+static GstElement *gst_videosrc = NULL;
+
+static GString *filename = NULL;
+static guint32 num_pics = 0;
+static guint32 num_pics_cont = 0;
+static guint32 num_vids = 0;
+
+static gint max_fr_n = 0;
+static gint max_fr_d = 0;
+static gchar *video_post;
+static gchar *image_post;
+
+static GList *video_caps_list = NULL;
+
+#ifdef HAVE_GST_PHOTO_IFACE_H
+static gchar *iso_speed_labels[] = { "auto", "100", "200", "400" };
+
+static struct
+{
+ gchar *label;
+ gint width;
+ gint height;
+} image_resolution_label_map[] = {
+ {
+ "View finder resolution", 0, 0}, {
+ "VGA", 640, 480}, {
+ "1,3Mpix (1280x960)", 1280, 960}, {
+ "3Mpix (2048x1536)", 2048, 1536}, {
+ "3,7Mpix 16:9 (2592x1456)", 2592, 1456}, {
+ "5Mpix (2592x1968)", 2592, 1968}
+};
+#endif
+
+/*
+ * functions prototypes
+ */
+static gboolean me_gst_setup_pipeline (const gchar * imagepost,
+ const gchar * videopost);
+static void me_gst_cleanup_element (void);
+
+static gboolean capture_mode_set_state (CaptureState state);
+static void capture_mode_config_gui (void);
+static gboolean capture_mode_stop (void);
+
+static void on_windowMain_delete_event (GtkWidget * widget, GdkEvent * event,
+ gpointer user_data);
+static void on_buttonShot_clicked (GtkButton * button, gpointer user_data);
+static void on_buttonPause_clicked (GtkButton * button, gpointer user_data);
+static void on_comboboxResolution_changed (GtkComboBox * widget,
+ gpointer user_data);
+static void on_radiobuttonImageCapture_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_radiobuttonVideoCapture_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffNone_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffEdge_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffAging_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffDice_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffWarp_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffShagadelic_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffVertigo_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffRev_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_rbBntVidEffQuark_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_chkbntMute_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data);
+static void on_chkbtnRawMsg_toggled (GtkToggleButton * togglebutton,
+ gpointer data);
+static void on_hscaleZoom_value_changed (GtkRange * range, gpointer user_data);
+
+static void ui_connect_signals (void);
+static gboolean ui_create (void);
+static void destroy_color_controls (void);
+static void create_color_controls (void);
+static void init_view_finder_resolution_combobox (void);
+
+#ifdef HAVE_GST_PHOTO_IFACE_H
+static void menuitem_toggle_active (GtkWidget * widget, gpointer data);
+static void sub_menu_initialize (GtkWidget * widget, gpointer data);
+static void fill_photography_menu (GtkMenuItem * parent_item);
+#endif
+
+/*
+ * functions implementation
+ */
+
+static void
+set_filename (GString * name)
+{
+ const gchar *datadir;
+
+ if (capture_state == CAP_STATE_IMAGE) {
+ g_string_printf (name, G_DIR_SEPARATOR_S "test_%04u.jpg", num_pics);
+ datadir = g_get_user_special_dir (G_USER_DIRECTORY_PICTURES);
+ } else {
+ g_string_printf (name, G_DIR_SEPARATOR_S "test_%04u.%s", num_vids,
+ VID_FILE_EXT);
+ datadir = g_get_user_special_dir (G_USER_DIRECTORY_VIDEOS);
+ }
+
+ if (datadir == NULL) {
+ // FIXME: maemo
+ //#define DEFAULT_IMAGEDIR "$HOME/MyDocs/.images/"
+ //#define DEFAULT_VIDEODIR "$HOME/MyDocs/.videos/"
+ gchar *curdir = g_get_current_dir ();
+ g_string_prepend (name, curdir);
+ g_free (curdir);
+ } else {
+ g_string_prepend (name, datadir);
+ }
+}
+
+static GstBusSyncReply
+set_xwindow (GstMessage ** message, gpointer data)
+{
+ GstBusSyncReply ret = GST_BUS_PASS;
+ const GstStructure *s = gst_message_get_structure (*message);
+
+ if (!s || !gst_structure_has_name (s, "prepare-xwindow-id")) {
+ goto done;
+ }
+
+ gst_x_overlay_set_xwindow_id (GST_X_OVERLAY (GST_MESSAGE_SRC (*message)),
+ GDK_WINDOW_XWINDOW (ui_drawing->window));
+
+ gst_message_unref (*message);
+ *message = NULL;
+ ret = GST_BUS_DROP;
+done:
+ return ret;
+}
+
+/* Write raw image buffer to file if found from message */
+static void
+handle_element_message (GstMessage * msg)
+{
+ const GstStructure *st;
+ const GValue *image;
+ GstBuffer *buf = NULL;
+ guint8 *data = NULL;
+ gchar *caps_string;
+ guint size = 0;
+ gchar *filename = NULL;
+ FILE *f = NULL;
+ size_t written;
+
+ st = gst_message_get_structure (msg);
+ if (g_str_equal (gst_structure_get_name (st), "autofocus-done")) {
+ gtk_button_set_label (ui_bnt_pause, "Focus");
+ } else if (gst_structure_has_field_typed (st, "buffer", GST_TYPE_BUFFER)) {
+ image = gst_structure_get_value (st, "buffer");
+ if (image) {
+ buf = gst_value_get_buffer (image);
+ data = GST_BUFFER_DATA (buf);
+ size = GST_BUFFER_SIZE (buf);
+ if (g_str_equal (gst_structure_get_name (st), "raw-image")) {
+ filename = g_strdup_printf ("test_%04u.raw", num_pics);
+ } else if (g_str_equal (gst_structure_get_name (st), "preview-image")) {
+ filename = g_strdup_printf ("test_%04u_vga.rgb", num_pics);
+ } else {
+ /* for future purposes */
+ g_print ("unknown buffer received\n");
+ return;
+ }
+ caps_string = gst_caps_to_string (GST_BUFFER_CAPS (buf));
+ g_print ("writing buffer to %s, buffer caps: %s\n",
+ filename, caps_string);
+ g_free (caps_string);
+ f = g_fopen (filename, "w");
+ if (f) {
+ written = fwrite (data, size, 1, f);
+ if (!written) {
+ g_print ("errro writing file\n");
+ }
+ fclose (f);
+ } else {
+ g_print ("error opening file for raw image writing\n");
+ }
+ g_free (filename);
+ }
+ } else if (g_str_equal (gst_structure_get_name (st), "photo-capture-start")) {
+ g_print ("=== CLICK ===\n");
+ }
+}
+
+static GstBusSyncReply
+my_bus_sync_callback (GstBus * bus, GstMessage * message, gpointer data)
+{
+ GstBusSyncReply ret = GST_BUS_PASS;
+
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_ELEMENT:
+ ret = set_xwindow (&message, data);
+ break;
+ default:
+ /* unhandled message */
+ break;
+ }
+ return ret;
+}
+
+static gboolean
+my_bus_callback (GstBus * bus, GstMessage * message, gpointer data)
+{
+ switch (GST_MESSAGE_TYPE (message)) {
+ case GST_MESSAGE_WARNING:{
+ GError *err;
+ gchar *debug;
+
+ gst_message_parse_warning (message, &err, &debug);
+ g_print ("Warning: %s\n", err->message);
+ g_error_free (err);
+ g_free (debug);
+ break;
+ }
+ case GST_MESSAGE_ERROR:{
+ GError *err;
+ gchar *debug;
+
+ gst_message_parse_error (message, &err, &debug);
+ g_print ("Error: %s\n", err->message);
+ g_error_free (err);
+ g_free (debug);
+
+ me_gst_cleanup_element ();
+ gtk_main_quit ();
+ break;
+ }
+ case GST_MESSAGE_EOS:
+ /* end-of-stream */
+ gtk_main_quit ();
+ break;
+ case GST_MESSAGE_STATE_CHANGED:{
+ GstState old, new, pending;
+
+ gst_message_parse_state_changed (message, &old, &new, &pending);
+
+ /* Create/destroy color controls according videosrc state */
+ if (GST_MESSAGE_SRC (message) == GST_OBJECT (gst_videosrc)) {
+ if (old == GST_STATE_PAUSED && new == GST_STATE_READY) {
+ destroy_color_controls ();
+ } else if (old == GST_STATE_READY && new == GST_STATE_PAUSED) {
+ create_color_controls ();
+ }
+ }
+
+ /* we only care about pipeline state change messages */
+ if (GST_IS_PIPELINE (GST_MESSAGE_SRC (message))) {
+ /* dump graph for pipeline state changes */
+ gchar *dump_name = g_strdup_printf ("camerabin.%s_%s",
+ gst_element_state_get_name (old),
+ gst_element_state_get_name (new));
+ GST_DEBUG_BIN_TO_DOT_FILE_WITH_TS (GST_BIN (GST_MESSAGE_SRC (message)),
+ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE |
+ GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS, dump_name);
+ g_free (dump_name);
+ }
+ }
+ break;
+ case GST_MESSAGE_ELEMENT:
+ {
+ handle_element_message (message);
+ break;
+ }
+ default:
+ /* unhandled message */
+ break;
+ }
+ return TRUE;
+}
+
+static void
+me_set_next_cont_file_name (GString * filename)
+{
+ /* FIXME: better file naming (possible with signal) */
+ if (G_UNLIKELY (num_pics_cont == 1)) {
+ gint i;
+ for (i = filename->len - 1; i > 0; --i) {
+ if (filename->str[i] == '.')
+ break;
+ }
+ g_string_insert (filename, i, "_0001");
+ } else {
+ gchar tmp[6];
+ gint i;
+ for (i = filename->len - 1; i > 0; --i) {
+ if (filename->str[i] == '_')
+ break;
+ }
+ snprintf (tmp, 6, "_%04d", num_pics_cont);
+ memcpy (filename->str + i, tmp, 5);
+ }
+}
+
+static gboolean
+stop_image_preview (gpointer data)
+{
+ g_return_val_if_fail (data != NULL, FALSE);
+
+ g_signal_emit_by_name (data, "user-stop", 0);
+
+ return FALSE;
+}
+
+static gboolean
+me_image_capture_done (GstElement * camera, const gchar * fname,
+ gpointer user_data)
+{
+ gboolean cont =
+ gtk_toggle_button_get_active (GTK_TOGGLE_BUTTON (ui_chk_continous));
+ GString *filename = g_string_new (fname);
+
+ if (num_pics_cont < N_BURST_IMAGES && cont) {
+ num_pics_cont++;
+ me_set_next_cont_file_name (filename);
+ g_object_set (G_OBJECT (camera), "filename", filename->str, NULL);
+ g_string_free (filename, TRUE);
+ } else {
+ gtk_widget_set_sensitive (GTK_WIDGET (ui_bnt_shot), TRUE);
+ printf ("%u image(s) saved\n", num_pics_cont + 1);
+ fflush (stdout);
+ num_pics_cont = 0;
+
+ g_timeout_add (PREVIEW_TIME_MS, (GSourceFunc) stop_image_preview, camera);
+
+ cont = FALSE;
+ }
+ return cont;
+}
+
+static gboolean
+me_gst_setup_pipeline_create_post_bin (const gchar * post, gboolean video)
+{
+ GstElement *vpp = NULL;
+ GstElement *bin, *c1, *c2, *filter;
+ GstPad *pad;
+ GstCaps *caps;
+
+ /* this function uses a bin just because it needs ffmpegcolorspace. For
+ * performance reason one should provide an element without need for color
+ * convertion */
+
+ vpp = gst_element_factory_make (post, NULL);
+ if (NULL == vpp) {
+ fprintf (stderr, "cannot create \'%s\' element\n", post);
+ fflush (stderr);
+ goto done;
+ }
+ c1 = gst_element_factory_make ("ffmpegcolorspace", NULL);
+ c2 = gst_element_factory_make ("ffmpegcolorspace", NULL);
+ if (NULL == c1 || NULL == c2) {
+ fprintf (stderr, "cannot create \'ffmpegcolorspace\' element\n");
+ fflush (stderr);
+ goto done;
+ }
+ filter = gst_element_factory_make ("capsfilter", NULL);
+ if (NULL == filter) {
+ fprintf (stderr, "cannot create \'capsfilter\' element\n");
+ fflush (stderr);
+ goto done;
+ }
+ bin = gst_bin_new (video ? "vid_postproc_bin" : "img_postproc_bin");
+ if (NULL == bin) {
+ goto done;
+ }
+
+ caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('I', '4', '2', '0'), NULL);
+ g_object_set (G_OBJECT (filter), "caps", caps, NULL);
+ gst_caps_unref (caps);
+
+ gst_bin_add_many (GST_BIN (bin), c1, vpp, c2, filter, NULL);
+ if (!gst_element_link_many (c1, vpp, c2, filter, NULL)) {
+ fprintf (stderr, "cannot link video post proc elements\n");
+ fflush (stderr);
+ goto done;
+ }
+
+ pad = gst_element_get_static_pad (c1, "sink");
+ gst_element_add_pad (bin, gst_ghost_pad_new ("sink", pad));
+ gst_object_unref (GST_OBJECT (pad));
+
+ pad = gst_element_get_static_pad (filter, "src");
+ gst_element_add_pad (bin, gst_ghost_pad_new ("src", pad));
+ gst_object_unref (GST_OBJECT (pad));
+
+ g_object_set (gst_camera_bin, (video ? "videopp" : "imagepp"), bin, NULL);
+ return TRUE;
+done:
+ return FALSE;
+}
+
+static void
+me_gst_setup_pipeline_create_codecs (void)
+{
+#ifdef USE_MP4
+ g_object_set (gst_camera_bin, "videoenc",
+ gst_element_factory_make ("omx_mpeg4enc", NULL), NULL);
+
+ g_object_set (gst_camera_bin, "audioenc",
+ gst_element_factory_make ("omx_aacenc", NULL), NULL);
+
+ g_object_set (gst_camera_bin, "videomux",
+ gst_element_factory_make ("hantromp4mux", NULL), NULL);
+#else
+ /* using defaults theora, vorbis, ogg */
+#endif
+}
+
+static gboolean
+me_gst_setup_pipeline_create_img_post_bin (const gchar * imagepost)
+{
+ return me_gst_setup_pipeline_create_post_bin (imagepost, FALSE);
+}
+
+static gboolean
+me_gst_setup_pipeline_create_vid_post_bin (const gchar * videopost)
+{
+ return me_gst_setup_pipeline_create_post_bin (videopost, TRUE);
+}
+
+static gboolean
+me_gst_setup_pipeline (const gchar * imagepost, const gchar * videopost)
+{
+ GstBus *bus;
+ GstCaps *preview_caps;
+
+ set_filename (filename);
+
+ me_gst_cleanup_element ();
+
+ gst_camera_bin = gst_element_factory_make ("camerabin", NULL);
+ if (NULL == gst_camera_bin) {
+ goto done;
+ }
+
+ g_signal_connect (gst_camera_bin, "img-done",
+ (GCallback) me_image_capture_done, NULL);
+
+ preview_caps = gst_caps_from_string (PREVIEW_CAPS);
+
+ bus = gst_pipeline_get_bus (GST_PIPELINE (gst_camera_bin));
+ gst_bus_add_watch (bus, my_bus_callback, NULL);
+ gst_bus_set_sync_handler (bus, my_bus_sync_callback, NULL);
+ gst_object_unref (bus);
+
+ /* set properties */
+ g_object_set (gst_camera_bin, "filename", filename->str, NULL);
+ g_object_set (gst_camera_bin, "preview-caps", preview_caps, NULL);
+ gst_caps_unref (preview_caps);
+
+ gst_videosrc = gst_element_factory_make (CAMERA_APP_VIDEOSRC, NULL);
+ if (gst_videosrc) {
+ g_object_set (G_OBJECT (gst_camera_bin), "videosrc", gst_videosrc, NULL);
+ }
+
+ if (imagepost) {
+ if (!me_gst_setup_pipeline_create_img_post_bin (imagepost))
+ goto done;
+ } else {
+ /* Use default image postprocessing element */
+ GstElement *ipp =
+ gst_element_factory_make (CAMERA_APP_IMAGE_POSTPROC, NULL);
+ if (ipp) {
+ g_object_set (G_OBJECT (gst_camera_bin), "imagepp", ipp, NULL);
+ }
+ }
+
+ if (videopost) {
+ if (!me_gst_setup_pipeline_create_vid_post_bin (videopost))
+ goto done;
+ }
+
+ me_gst_setup_pipeline_create_codecs ();
+
+ if (GST_STATE_CHANGE_FAILURE ==
+ gst_element_set_state (gst_camera_bin, GST_STATE_READY)) {
+ goto done;
+ }
+
+ if (!gst_videosrc) {
+ g_object_get (G_OBJECT (gst_camera_bin), "videosrc", &gst_videosrc, NULL);
+ }
+
+ init_view_finder_resolution_combobox ();
+
+ if (GST_STATE_CHANGE_FAILURE ==
+ gst_element_set_state (gst_camera_bin, GST_STATE_PAUSED)) {
+ goto done;
+ } else {
+ gst_element_get_state (gst_camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
+ }
+
+ if (GST_STATE_CHANGE_FAILURE ==
+ gst_element_set_state (gst_camera_bin, GST_STATE_PLAYING)) {
+ goto done;
+ } else {
+ gst_element_get_state (gst_camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
+ }
+
+#ifdef HAVE_GST_PHOTO_IFACE_H
+ /* Initialize menus to default settings */
+ GtkWidget *sub_menu =
+ gtk_menu_item_get_submenu (GTK_MENU_ITEM (ui_menuitem_capture));
+ gtk_container_foreach (GTK_CONTAINER (sub_menu), sub_menu_initialize, NULL);
+ sub_menu =
+ gtk_menu_item_get_submenu (GTK_MENU_ITEM (ui_menuitem_photography));
+ gtk_container_foreach (GTK_CONTAINER (sub_menu), sub_menu_initialize, NULL);
+#endif
+
+ capture_state = CAP_STATE_IMAGE;
+ return TRUE;
+done:
+ fprintf (stderr, "error to create pipeline\n");
+ fflush (stderr);
+ me_gst_cleanup_element ();
+ return FALSE;
+}
+
+static void
+me_gst_cleanup_element ()
+{
+ if (gst_camera_bin) {
+ gst_element_set_state (gst_camera_bin, GST_STATE_NULL);
+ gst_element_get_state (gst_camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
+ gst_object_unref (gst_camera_bin);
+ gst_camera_bin = NULL;
+
+ g_list_foreach (video_caps_list, (GFunc) gst_caps_unref, NULL);
+ g_list_free (video_caps_list);
+ video_caps_list = NULL;
+ }
+}
+
+static gboolean
+capture_mode_stop ()
+{
+ if (capture_state == CAP_STATE_VIDEO_PAUSED
+ || capture_state == CAP_STATE_VIDEO_RECORDING) {
+ return capture_mode_set_state (CAP_STATE_VIDEO_STOPED);
+ } else {
+ return TRUE;
+ }
+}
+
+static void
+capture_mode_config_gui ()
+{
+ switch (capture_state) {
+ case CAP_STATE_IMAGE:
+ gtk_button_set_label (ui_bnt_shot, "Shot");
+ gtk_button_set_label (ui_bnt_pause, "Focus");
+ gtk_widget_set_sensitive (GTK_WIDGET (ui_bnt_pause), TRUE);
+ gtk_widget_show (ui_chk_continous);
+ gtk_widget_show (ui_chk_rawmsg);
+ gtk_widget_hide (ui_chk_mute);
+ gtk_toggle_button_set_active (GTK_TOGGLE_BUTTON (ui_rdbntImageCapture),
+ TRUE);
+ break;
+ case CAP_STATE_VIDEO_STOPED:
+ gtk_button_set_label (ui_bnt_shot, "Rec");
+ gtk_button_set_label (ui_bnt_pause, "Pause");
+ gtk_widget_set_sensitive (GTK_WIDGET (ui_bnt_pause), FALSE);
+ gtk_widget_show (GTK_WIDGET (ui_bnt_pause));
+ gtk_widget_show (ui_chk_mute);
+ gtk_widget_hide (ui_chk_continous);
+ gtk_widget_hide (ui_chk_rawmsg);
+ gtk_toggle_button_set_active (GTK_TOGGLE_BUTTON (ui_rdbntVideoCapture),
+ TRUE);
+ break;
+ case CAP_STATE_VIDEO_PAUSED:
+ gtk_button_set_label (ui_bnt_pause, "Cont");
+ break;
+ case CAP_STATE_VIDEO_RECORDING:
+ gtk_button_set_label (ui_bnt_shot, "Stop");
+ gtk_button_set_label (ui_bnt_pause, "Pause");
+ gtk_widget_set_sensitive (GTK_WIDGET (ui_bnt_pause), TRUE);
+ break;
+ default:
+ break;
+ }
+}
+
+static gboolean
+capture_mode_set_state (CaptureState state)
+{
+ if (capture_state == state)
+ return TRUE;
+
+ switch (capture_state) {
+ case CAP_STATE_IMAGE:
+ if (state == CAP_STATE_VIDEO_PAUSED) {
+ goto done;
+ }
+ g_object_set (gst_camera_bin, "mode", 1, NULL);
+ capture_state = CAP_STATE_VIDEO_STOPED;
+ if (state == CAP_STATE_VIDEO_RECORDING)
+ capture_mode_set_state (state);
+ break;
+ case CAP_STATE_VIDEO_STOPED:
+ if (state == CAP_STATE_VIDEO_PAUSED) {
+ goto done;
+ }
+ capture_state = state;
+ if (state == CAP_STATE_IMAGE)
+ g_object_set (gst_camera_bin, "mode", 0, NULL);
+ else { /* state == CAP_STATE_VIDEO_RECORDING */
+ g_object_set (gst_camera_bin, "mode", 1, NULL);
+ g_signal_emit_by_name (gst_camera_bin, "user-start", 0);
+ }
+ break;
+ case CAP_STATE_VIDEO_PAUSED:
+ if (state == CAP_STATE_VIDEO_RECORDING) {
+ g_signal_emit_by_name (gst_camera_bin, "user-start", 0);
+ capture_state = CAP_STATE_VIDEO_RECORDING;
+ } else {
+ g_signal_emit_by_name (gst_camera_bin, "user-stop", 0);
+ capture_state = CAP_STATE_VIDEO_STOPED;
+ if (state == CAP_STATE_IMAGE)
+ capture_mode_set_state (state);
+ }
+ break;
+ case CAP_STATE_VIDEO_RECORDING:
+ if (state == CAP_STATE_VIDEO_PAUSED) {
+ g_signal_emit_by_name (gst_camera_bin, "user-pause", 0);
+ capture_state = CAP_STATE_VIDEO_PAUSED;
+ } else {
+ g_signal_emit_by_name (gst_camera_bin, "user-stop", 0);
+ capture_state = CAP_STATE_VIDEO_STOPED;
+ if (state == CAP_STATE_IMAGE)
+ capture_mode_set_state (state);
+ }
+ break;
+ }
+ return TRUE;
+done:
+ return FALSE;
+}
+
+static void
+on_windowMain_delete_event (GtkWidget * widget, GdkEvent * event, gpointer data)
+{
+ capture_mode_set_state (CAP_STATE_IMAGE);
+ capture_mode_config_gui ();
+ me_gst_cleanup_element ();
+ gtk_main_quit ();
+}
+
+static void
+set_metadata (void)
+{
+ /* for more information about image metadata tags, see:
+ * http://webcvs.freedesktop.org/gstreamer/gst-plugins-bad/tests/icles/metadata_editor.c
+ * and for the mapping:
+ * http://webcvs.freedesktop.org/gstreamer/gst-plugins-bad/ext/metadata/metadata_mapping.htm?view=co
+ */
+
+ GstTagSetter *setter = GST_TAG_SETTER (gst_camera_bin);
+ GTimeVal time = { 0, 0 };
+ gchar *date_str, *desc_str;
+
+ g_get_current_time (&time);
+ date_str = g_time_val_to_iso8601 (&time); /* this is UTC */
+ desc_str = g_strdup_printf ("picture taken by %s", g_get_real_name ());
+
+ gst_tag_setter_add_tags (setter, GST_TAG_MERGE_REPLACE,
+ "date-time-original", date_str,
+ "date-time-modified", date_str,
+ "creator-tool", "camerabin-demo",
+ GST_TAG_DESCRIPTION, desc_str,
+ GST_TAG_TITLE, "My picture", GST_TAG_COPYRIGHT, "LGPL", NULL);
+
+ g_free (date_str);
+ g_free (desc_str);
+}
+
+static void
+on_buttonShot_clicked (GtkButton * button, gpointer user_data)
+{
+ switch (capture_state) {
+ case CAP_STATE_IMAGE:
+ {
+ gtk_widget_set_sensitive (GTK_WIDGET (ui_bnt_shot), FALSE);
+ set_filename (filename);
+ num_pics++;
+ g_object_set (gst_camera_bin, "filename", filename->str, NULL);
+
+ set_metadata ();
+ g_signal_emit_by_name (gst_camera_bin, "user-start", 0);
+ }
+ break;
+ case CAP_STATE_VIDEO_STOPED:
+ set_filename (filename);
+ num_vids++;
+ g_object_set (gst_camera_bin, "filename", filename->str, NULL);
+ capture_mode_set_state (CAP_STATE_VIDEO_RECORDING);
+ capture_mode_config_gui ();
+ break;
+ case CAP_STATE_VIDEO_PAUSED:
+ /* fall trough */
+ case CAP_STATE_VIDEO_RECORDING:
+ capture_mode_set_state (CAP_STATE_VIDEO_STOPED);
+ capture_mode_config_gui ();
+ break;
+ default:
+ break;
+ }
+}
+
+static void
+on_buttonPause_clicked (GtkButton * button, gpointer user_data)
+{
+ switch (capture_state) {
+ case CAP_STATE_IMAGE:
+ if (g_str_equal (gtk_button_get_label (ui_bnt_pause), "Focus")) {
+ /* Start autofocus */
+ gst_photography_set_autofocus (GST_PHOTOGRAPHY (gst_camera_bin), TRUE);
+ gtk_button_set_label (ui_bnt_pause, "Cancel Focus");
+ } else {
+ /* Cancel autofocus */
+ gst_photography_set_autofocus (GST_PHOTOGRAPHY (gst_camera_bin), FALSE);
+ gtk_button_set_label (ui_bnt_pause, "Focus");
+ }
+ break;
+ case CAP_STATE_VIDEO_STOPED:
+ break;
+ case CAP_STATE_VIDEO_PAUSED:
+ capture_mode_set_state (CAP_STATE_VIDEO_RECORDING);
+ capture_mode_config_gui ();
+ break;
+ case CAP_STATE_VIDEO_RECORDING:
+ capture_mode_set_state (CAP_STATE_VIDEO_PAUSED);
+ capture_mode_config_gui ();
+ break;
+ default:
+ break;
+ }
+}
+
+static gboolean
+on_drawingareaView_configure_event (GtkWidget * widget,
+ GdkEventConfigure * event, gpointer data)
+{
+ Display *display = GDK_WINDOW_XDISPLAY (GDK_WINDOW (widget->window));
+
+ XMoveResizeWindow (display, GDK_WINDOW_XID (GDK_WINDOW (widget->window)),
+ widget->allocation.x, widget->allocation.y,
+ widget->allocation.width, widget->allocation.height);
+ XSync (display, False);
+
+ return TRUE;
+}
+
+static void
+on_comboboxResolution_changed (GtkComboBox * widget, gpointer user_data)
+{
+ GstStructure *st;
+ gint w = 0, h = 0;
+ GstCaps *video_caps =
+ g_list_nth_data (video_caps_list, gtk_combo_box_get_active (widget));
+
+ if (video_caps) {
+
+ gst_element_set_state (gst_camera_bin, GST_STATE_READY);
+
+ st = gst_caps_get_structure (video_caps, 0);
+
+ gst_structure_get_int (st, "width", &w);
+ gst_structure_get_int (st, "height", &h);
+
+ if (w && h) {
+ g_object_set (ui_drawing_frame, "ratio", (gfloat) w / (gfloat) h, NULL);
+ }
+
+ g_object_set (G_OBJECT (gst_camera_bin), "filter-caps", video_caps, NULL);
+
+ gst_element_set_state (gst_camera_bin, GST_STATE_PLAYING);
+ }
+}
+
+static void
+on_radiobuttonImageCapture_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data)
+{
+ if (gtk_toggle_button_get_active (togglebutton)) {
+ if (capture_state != CAP_STATE_IMAGE) {
+ capture_mode_set_state (CAP_STATE_IMAGE);
+ capture_mode_config_gui ();
+ }
+ }
+}
+
+static void
+on_radiobuttonVideoCapture_toggled (GtkToggleButton * togglebutton,
+ gpointer user_data)
+{
+ if (gtk_toggle_button_get_active (togglebutton)) {
+ if (capture_state == CAP_STATE_IMAGE) {
+ capture_mode_set_state (CAP_STATE_VIDEO_STOPED);
+ capture_mode_config_gui ();
+ }
+ }
+}
+
+static void
+on_rbBntVidEff_toggled (GtkToggleButton * togglebutton, gchar * effect)
+{
+ if (gtk_toggle_button_get_active (togglebutton)) {
+ /* lets also use those effects to image */
+ video_post = effect;
+ image_post = effect;
+ capture_mode_stop ();
+
+ me_gst_cleanup_element ();
+ if (!me_gst_setup_pipeline (image_post, video_post))
+ gtk_main_quit ();
+ capture_mode_config_gui ();
+ }
+}
+
+static void
+on_rbBntVidEffNone_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ on_rbBntVidEff_toggled (togglebutton, NULL);
+}
+
+static void
+on_rbBntVidEffEdge_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "edgetv");
+}
+
+static void
+on_rbBntVidEffAging_toggled (GtkToggleButton * togglebutton, gpointer user_data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "agingtv");
+}
+
+static void
+on_rbBntVidEffDice_toggled (GtkToggleButton * togglebutton, gpointer user_data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "dicetv");
+}
+
+static void
+on_rbBntVidEffWarp_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "warptv");
+}
+
+static void
+on_rbBntVidEffShagadelic_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "shagadelictv");
+}
+
+static void
+on_rbBntVidEffVertigo_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "vertigotv");
+}
+
+static void
+on_rbBntVidEffRev_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "revtv");
+}
+
+static void
+on_rbBntVidEffQuark_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ on_rbBntVidEff_toggled (togglebutton, "quarktv");
+}
+
+static void
+on_chkbntMute_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ g_object_set (gst_camera_bin, "mute",
+ gtk_toggle_button_get_active (GTK_TOGGLE_BUTTON (togglebutton)), NULL);
+}
+
+static void
+on_chkbtnRawMsg_toggled (GtkToggleButton * togglebutton, gpointer data)
+{
+ const gchar *env_var = "CAMSRC_PUBLISH_RAW";
+ if (gtk_toggle_button_get_active (GTK_TOGGLE_BUTTON (togglebutton))) {
+ g_setenv (env_var, "1", TRUE);
+ } else {
+ g_unsetenv (env_var);
+ }
+}
+
+static void
+on_hscaleZoom_value_changed (GtkRange * range, gpointer user_data)
+{
+ gint zoom = gtk_range_get_value (range);
+ g_object_set (gst_camera_bin, "zoom", zoom, NULL);
+}
+
+static void
+on_color_control_value_changed (GtkRange * range, gpointer user_data)
+{
+ GstColorBalance *balance = GST_COLOR_BALANCE (gst_camera_bin);
+ gint val = gtk_range_get_value (range);
+ GstColorBalanceChannel *channel = (GstColorBalanceChannel *) user_data;
+ gst_color_balance_set_value (balance, channel, val);
+}
+
+
+gboolean
+on_key_released (GtkWidget * widget, GdkEventKey * event, gpointer user_data)
+{
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ switch (event->keyval) {
+ case GDK_F11:
+#ifdef HAVE_GST_PHOTO_IFACE_H
+ gst_photography_set_autofocus (GST_PHOTOGRAPHY (gst_camera_bin), FALSE);
+#endif
+ break;
+ default:
+ break;
+ }
+
+ return FALSE;
+}
+
+gboolean
+on_key_pressed (GtkWidget * widget, GdkEventKey * event, gpointer user_data)
+{
+ g_return_val_if_fail (event != NULL, FALSE);
+
+ switch (event->keyval) {
+ case GDK_F11:
+#ifdef HAVE_GST_PHOTO_IFACE_H
+ gst_photography_set_autofocus (GST_PHOTOGRAPHY (gst_camera_bin), TRUE);
+#endif
+ break;
+ case 0x0:
+ on_buttonShot_clicked (NULL, NULL);
+ break;
+ default:
+ break;
+ }
+
+ return FALSE;
+}
+
+static void
+ui_connect_signals (void)
+{
+ glade_xml_signal_connect (ui_glade_xml, "on_windowMain_delete_event",
+ (GCallback) on_windowMain_delete_event);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_buttonShot_clicked",
+ (GCallback) on_buttonShot_clicked);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_buttonPause_clicked",
+ (GCallback) on_buttonPause_clicked);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_drawingareaView_configure_event",
+ (GCallback) on_drawingareaView_configure_event);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_comboboxResolution_changed",
+ (GCallback) on_comboboxResolution_changed);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_radiobuttonImageCapture_toggled",
+ (GCallback) on_radiobuttonImageCapture_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_radiobuttonVideoCapture_toggled",
+ (GCallback) on_radiobuttonVideoCapture_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffNone_toggled",
+ (GCallback) on_rbBntVidEffNone_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffEdge_toggled",
+ (GCallback) on_rbBntVidEffEdge_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffAging_toggled",
+ (GCallback) on_rbBntVidEffAging_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffDice_toggled",
+ (GCallback) on_rbBntVidEffDice_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffWarp_toggled",
+ (GCallback) on_rbBntVidEffWarp_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffShagadelic_toggled",
+ (GCallback) on_rbBntVidEffShagadelic_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffVertigo_toggled",
+ (GCallback) on_rbBntVidEffVertigo_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffRev_toggled",
+ (GCallback) on_rbBntVidEffRev_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_rbBntVidEffQuark_toggled",
+ (GCallback) on_rbBntVidEffQuark_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_chkbntMute_toggled",
+ (GCallback) on_chkbntMute_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_chkbtnRawMsg_toggled",
+ (GCallback) on_chkbtnRawMsg_toggled);
+
+ glade_xml_signal_connect (ui_glade_xml, "on_hscaleZoom_value_changed",
+ (GCallback) on_hscaleZoom_value_changed);
+
+ g_signal_connect (ui_main_window, "key-press-event",
+ (GCallback) on_key_pressed, NULL);
+
+ g_signal_connect (ui_main_window, "key-release-event",
+ (GCallback) on_key_released, NULL);
+}
+
+static gchar *
+format_value_callback (GtkScale * scale, gdouble value, gpointer user_data)
+{
+ GstColorBalanceChannel *channel = (GstColorBalanceChannel *) user_data;
+
+ return g_strdup_printf ("%s: %d", channel->label, (gint) value);
+}
+
+static gint
+create_menu_items_from_structure (GstStructure * structure)
+{
+ const GValue *framerate_list = NULL;
+ const gchar *structure_name;
+ GString *item_str = NULL;
+ guint j, num_items_created = 0, num_framerates = 1;
+ gint w = 0, h = 0, n = 0, d = 1;
+ guint32 fourcc = 0;
+
+ g_return_val_if_fail (structure != NULL, 0);
+
+ structure_name = gst_structure_get_name (structure);
+
+ /* lets filter yuv only */
+ if (0 == strcmp (structure_name, "video/x-raw-yuv")) {
+ item_str = g_string_new_len ("", 128);
+
+ if (gst_structure_has_field_typed (structure, "format", GST_TYPE_FOURCC)) {
+ gst_structure_get_fourcc (structure, "format", &fourcc);
+ }
+
+ if (gst_structure_has_field_typed (structure, "width", GST_TYPE_INT_RANGE)) {
+ const GValue *wrange = gst_structure_get_value (structure, "width");
+ /* If range found, use the maximum */
+ w = gst_value_get_int_range_max (wrange);
+ } else if (gst_structure_has_field_typed (structure, "width", G_TYPE_INT)) {
+ gst_structure_get_int (structure, "width", &w);
+ }
+
+ if (gst_structure_has_field_typed (structure, "height", GST_TYPE_INT_RANGE)) {
+ const GValue *hrange = gst_structure_get_value (structure, "height");
+ /* If range found, use the maximum */
+ h = gst_value_get_int_range_max (hrange);
+ } else if (gst_structure_has_field_typed (structure, "height", G_TYPE_INT)) {
+ gst_structure_get_int (structure, "height", &h);
+ }
+
+ if (gst_structure_has_field_typed (structure, "framerate",
+ GST_TYPE_FRACTION)) {
+ gst_structure_get_fraction (structure, "framerate", &n, &d);
+ } else if (gst_structure_has_field_typed (structure, "framerate",
+ GST_TYPE_LIST)) {
+ framerate_list = gst_structure_get_value (structure, "framerate");
+ num_framerates = gst_value_list_get_size (framerate_list);
+ } else if (gst_structure_has_field_typed (structure, "framerate",
+ GST_TYPE_FRACTION_RANGE)) {
+ const GValue *fr = gst_structure_get_value (structure, "framerate");
+ const GValue *frmax = gst_value_get_fraction_range_max (fr);
+ max_fr_n = gst_value_get_fraction_numerator (frmax);
+ max_fr_d = gst_value_get_fraction_denominator (frmax);
+ }
+
+ if (max_fr_n || max_fr_d) {
+ goto range_found;
+ }
+
+ for (j = 0; j < num_framerates; j++) {
+ GstCaps *video_caps;
+
+ if (framerate_list) {
+ const GValue *item = gst_value_list_get_value (framerate_list, j);
+ n = gst_value_get_fraction_numerator (item);
+ d = gst_value_get_fraction_denominator (item);
+ }
+ g_string_assign (item_str, structure_name);
+ g_string_append_printf (item_str, " (%" GST_FOURCC_FORMAT ")",
+ GST_FOURCC_ARGS (fourcc));
+ g_string_append_printf (item_str, ", %dx%d at %d/%d", w, h, n, d);
+ gtk_combo_box_append_text (ui_cbbox_resolution, item_str->str);
+
+ video_caps =
+ gst_caps_new_simple (structure_name, "format", GST_TYPE_FOURCC,
+ fourcc,
+ "width", G_TYPE_INT, w, "height", G_TYPE_INT, h,
+ "framerate", GST_TYPE_FRACTION, n, d, NULL);
+ video_caps_list = g_list_append (video_caps_list, video_caps);
+ num_items_created++;
+ }
+ }
+
+range_found:
+
+ if (item_str) {
+ g_string_free (item_str, TRUE);
+ }
+
+ return num_items_created;
+}
+
+static void
+fill_resolution_combo (GstCaps * caps)
+{
+ guint size, num_items, i;
+ GstStructure *st;
+
+ max_fr_n = max_fr_d = 0;
+
+ /* Create new items */
+ size = gst_caps_get_size (caps);
+
+ for (i = 0; i < size; i++) {
+ st = gst_caps_get_structure (caps, i);
+ num_items = create_menu_items_from_structure (st);
+ ui_cbbox_resolution_count += num_items;
+ }
+}
+
+static GstCaps *
+create_default_caps ()
+{
+ GstCaps *default_caps;
+
+ default_caps = gst_caps_from_string (DEFAULT_VF_CAPS);
+
+ return default_caps;
+}
+
+static void
+init_view_finder_resolution_combobox ()
+{
+ GstCaps *input_caps = NULL, *default_caps = NULL, *intersect = NULL;
+
+ g_object_get (gst_camera_bin, "inputcaps", &input_caps, NULL);
+ if (input_caps) {
+ fill_resolution_combo (input_caps);
+ }
+
+ /* Fill in default items if supported */
+ default_caps = create_default_caps ();
+ intersect = gst_caps_intersect (default_caps, input_caps);
+ if (intersect) {
+ fill_resolution_combo (intersect);
+ gst_caps_unref (intersect);
+ }
+ gst_caps_unref (default_caps);
+
+ if (input_caps) {
+ gst_caps_unref (input_caps);
+ }
+
+ /* Set some item active */
+ gtk_combo_box_set_active (ui_cbbox_resolution, ui_cbbox_resolution_count - 1);
+}
+
+static void
+destroy_color_controls ()
+{
+ GList *widgets, *item;
+ GtkWidget *widget = NULL;
+
+ widgets = gtk_container_get_children (GTK_CONTAINER (ui_vbox_color_controls));
+ for (item = widgets; item; item = g_list_next (item)) {
+ widget = GTK_WIDGET (item->data);
+ g_signal_handlers_disconnect_by_func (widget, (GFunc) format_value_callback,
+ g_object_get_data (G_OBJECT (widget), "channel"));
+ g_signal_handlers_disconnect_by_func (widget,
+ (GFunc) on_color_control_value_changed,
+ g_object_get_data (G_OBJECT (widget), "channel"));
+ gtk_container_remove (GTK_CONTAINER (ui_vbox_color_controls), widget);
+ }
+ g_list_free (widgets);
+}
+
+static void
+create_color_controls ()
+{
+ GstColorBalance *balance = NULL;
+ const GList *controls, *item;
+ GstColorBalanceChannel *channel;
+ GtkWidget *hscale;
+
+ if (GST_IS_COLOR_BALANCE (gst_camera_bin)) {
+ balance = GST_COLOR_BALANCE (gst_camera_bin);
+ }
+
+ if (NULL == balance) {
+ goto done;
+ }
+
+ controls = gst_color_balance_list_channels (balance);
+ for (item = controls; item; item = g_list_next (item)) {
+ channel = item->data;
+
+ hscale = gtk_hscale_new ((GtkAdjustment *)
+ gtk_adjustment_new (gst_color_balance_get_value (balance, channel),
+ channel->min_value, channel->max_value, 1, 10, 10));
+
+ g_signal_connect (GTK_RANGE (hscale), "value-changed",
+ (GCallback) on_color_control_value_changed, (gpointer) channel);
+ g_signal_connect (GTK_SCALE (hscale), "format-value",
+ (GCallback) format_value_callback, (gpointer) channel);
+ g_object_set_data (G_OBJECT (hscale), "channel", (gpointer) channel);
+
+ gtk_box_pack_start (GTK_BOX (ui_vbox_color_controls), GTK_WIDGET (hscale),
+ FALSE, TRUE, 0);
+ }
+
+ gtk_widget_show_all (ui_vbox_color_controls);
+done:
+ return;
+}
+
+#ifdef HAVE_GST_PHOTO_IFACE_H
+static void
+menuitem_toggle_active (GtkWidget * widget, gpointer data)
+{
+ gboolean active;
+ g_object_get (G_OBJECT (widget), "active", &active, NULL);
+ if (active) {
+ gtk_check_menu_item_toggled (GTK_CHECK_MENU_ITEM (widget));
+ }
+}
+
+static void
+sub_menu_initialize (GtkWidget * widget, gpointer data)
+{
+ GtkWidget *submenu;
+ submenu = gtk_menu_item_get_submenu (GTK_MENU_ITEM (widget));
+ gtk_container_foreach (GTK_CONTAINER (submenu), menuitem_toggle_active, NULL);
+}
+
+void
+photo_menuitem_toggled_cb (GtkRadioMenuItem * menuitem, gpointer user_data)
+{
+ gboolean active = FALSE, ret = FALSE;
+ GEnumClass *eclass = (GEnumClass *) user_data;
+ GType etype = G_ENUM_CLASS_TYPE (eclass);
+ GEnumValue *val;
+ gint set_value = -1;
+
+ /* Get value using menu item name */
+ val =
+ g_enum_get_value_by_nick (eclass,
+ gtk_widget_get_name (GTK_WIDGET (menuitem)));
+
+ g_object_get (G_OBJECT (menuitem), "active", &active, NULL);
+ if (active) {
+ if (etype == GST_TYPE_WHITE_BALANCE_MODE) {
+ GstWhiteBalanceMode mode;
+ ret =
+ gst_photography_set_white_balance_mode (GST_PHOTOGRAPHY
+ (gst_camera_bin), val->value);
+ gst_photography_get_white_balance_mode (GST_PHOTOGRAPHY (gst_camera_bin),
+ &mode);
+ set_value = (gint) mode;
+ } else if (etype == GST_TYPE_SCENE_MODE) {
+ GstSceneMode mode;
+ ret =
+ gst_photography_set_scene_mode (GST_PHOTOGRAPHY (gst_camera_bin),
+ val->value);
+ gst_photography_get_scene_mode (GST_PHOTOGRAPHY (gst_camera_bin), &mode);
+ set_value = (gint) mode;
+ } else if (etype == GST_TYPE_COLOUR_TONE_MODE) {
+ GstColourToneMode mode;
+ ret =
+ gst_photography_set_colour_tone_mode (GST_PHOTOGRAPHY
+ (gst_camera_bin), val->value);
+ gst_photography_get_colour_tone_mode (GST_PHOTOGRAPHY (gst_camera_bin),
+ &mode);
+ set_value = (gint) mode;
+ } else if (etype == GST_TYPE_FLASH_MODE) {
+ GstFlashMode mode;
+ ret =
+ gst_photography_set_flash_mode (GST_PHOTOGRAPHY (gst_camera_bin),
+ val->value);
+ gst_photography_get_flash_mode (GST_PHOTOGRAPHY (gst_camera_bin), &mode);
+ set_value = (gint) mode;
+ }
+
+ if (!ret) {
+ g_print ("%s setting failed\n", val->value_name);
+ } else if (val->value != set_value) {
+ g_print ("%s setting failed, got %d\n", val->value_nick, set_value);
+ }
+ }
+}
+
+void
+photo_iso_speed_toggled_cb (GtkRadioMenuItem * menuitem, gpointer user_data)
+{
+ gboolean active;
+ const gchar *name;
+ guint val = 0, set_val = G_MAXUINT;
+
+ g_object_get (G_OBJECT (menuitem), "active", &active, NULL);
+ if (active) {
+ name = gtk_widget_get_name (GTK_WIDGET (menuitem));
+ /* iso auto setting = 0 */
+ /* FIXME: check what values other than 0 can be set */
+ if (!g_str_equal (name, "auto")) {
+ sscanf (name, "%d", &val);
+ }
+ if (!gst_photography_set_iso_speed (GST_PHOTOGRAPHY (gst_camera_bin), val)) {
+ g_print ("ISO speed (%d) setting failed\n", val);
+ } else {
+ gst_photography_get_iso_speed (GST_PHOTOGRAPHY (gst_camera_bin),
+ &set_val);
+ if (val != set_val) {
+ g_print ("ISO speed (%d) setting failed, got %d\n", val, set_val);
+ }
+ }
+ }
+}
+
+void
+photo_ev_comp_toggled_cb (GtkRadioMenuItem * menuitem, gpointer user_data)
+{
+ gboolean active;
+ const gchar *name;
+ gfloat val = 0.0, set_val = G_MAXFLOAT;
+
+ g_object_get (G_OBJECT (menuitem), "active", &active, NULL);
+ if (active) {
+ name = gtk_widget_get_name (GTK_WIDGET (menuitem));
+ sscanf (name, "%f", &val);
+ if (!gst_photography_set_ev_compensation (GST_PHOTOGRAPHY (gst_camera_bin),
+ val)) {
+ g_print ("EV compensation (%.1f) setting failed\n", val);
+ } else {
+ gst_photography_get_ev_compensation (GST_PHOTOGRAPHY (gst_camera_bin),
+ &set_val);
+ if (val != set_val) {
+ g_print ("EV compensation (%.1f) setting failed, got %.1f\n", val,
+ set_val);
+ }
+ }
+ }
+}
+
+static void
+photo_add_submenu_from_enum (GtkMenuItem * parent_item, GType enum_type)
+{
+ GTypeClass *tclass;
+ GEnumClass *eclass;
+ GtkWidget *new_item = NULL, *new_submenu = NULL;
+ guint i;
+ GEnumValue *val;
+ GSList *group = NULL;
+
+ g_return_if_fail (parent_item && enum_type && G_TYPE_IS_CLASSED (enum_type));
+
+ tclass = g_type_class_ref (enum_type);
+ eclass = G_ENUM_CLASS (tclass);
+ new_submenu = gtk_menu_new ();
+
+ for (i = 0; i < eclass->n_values; i++) {
+ val = g_enum_get_value (eclass, i);
+ new_item = gtk_radio_menu_item_new_with_label (group, val->value_nick);
+ /* Store enum nick as the menu item name */
+ gtk_widget_set_name (new_item, val->value_nick);
+ group = gtk_radio_menu_item_get_group (GTK_RADIO_MENU_ITEM (new_item));
+ g_signal_connect (new_item, "toggled",
+ (GCallback) photo_menuitem_toggled_cb, eclass);
+ gtk_menu_shell_append (GTK_MENU_SHELL (new_submenu), new_item);
+ gtk_widget_show (new_item);
+ }
+
+ gtk_menu_item_set_submenu (parent_item, new_submenu);
+ g_type_class_unref (tclass);
+}
+
+static void
+add_submenu_from_list (GtkMenuItem * parent_item, GList * labels,
+ GCallback toggled_cb)
+{
+ GtkWidget *new_item = NULL, *new_submenu = NULL;
+ GSList *group = NULL;
+ GList *l;
+
+ new_submenu = gtk_menu_new ();
+
+ for (l = labels; l != NULL; l = g_list_next (l)) {
+ const gchar *label = l->data;
+ new_item = gtk_radio_menu_item_new_with_label (group, label);
+ if (g_str_equal (label, "0")) {
+ /* Let's set zero as default */
+ gtk_check_menu_item_set_active (GTK_CHECK_MENU_ITEM (new_item), TRUE);
+ }
+ gtk_widget_set_name (new_item, label);
+ group = gtk_radio_menu_item_get_group (GTK_RADIO_MENU_ITEM (new_item));
+ g_signal_connect (new_item, "toggled", toggled_cb, NULL);
+ gtk_menu_shell_append (GTK_MENU_SHELL (new_submenu), new_item);
+ gtk_widget_show (new_item);
+ }
+
+ gtk_menu_item_set_submenu (parent_item, new_submenu);
+}
+
+static GtkMenuItem *
+add_menuitem (GtkMenu * parent_menu, const gchar * item_name)
+{
+ GtkWidget *new_item;
+
+ new_item = gtk_menu_item_new_with_label (item_name);
+ gtk_menu_shell_append (GTK_MENU_SHELL (parent_menu), new_item);
+ gtk_widget_show (new_item);
+
+ return GTK_MENU_ITEM (new_item);
+}
+
+GList *
+create_iso_speed_labels ()
+{
+ GList *labels = NULL;
+ gint i;
+ for (i = 0; i < G_N_ELEMENTS (iso_speed_labels); i++) {
+ labels = g_list_append (labels, iso_speed_labels[i]);
+ }
+ return labels;
+}
+
+GList *
+create_ev_comp_labels ()
+{
+ GList *labels = NULL;
+ gdouble comp;
+ char buf[G_ASCII_DTOSTR_BUF_SIZE];
+
+ for (comp = EV_COMP_MIN; comp <= EV_COMP_MAX; comp += EV_COMP_STEP) {
+ g_ascii_dtostr (buf, sizeof (buf), comp);
+ labels = g_list_append (labels, g_strdup (buf));
+ }
+ return labels;
+}
+
+static void
+fill_photography_menu (GtkMenuItem * parent_item)
+{
+ GtkWidget *photo_menu = gtk_menu_new ();
+ GtkMenuItem *item = NULL;
+ GList *labels = NULL;
+
+ /* Add menu items and create and associate submenus to each item */
+ item = add_menuitem (GTK_MENU (photo_menu), "AWB");
+ photo_add_submenu_from_enum (item, GST_TYPE_WHITE_BALANCE_MODE);
+
+ item = add_menuitem (GTK_MENU (photo_menu), "Colour Tone");
+ photo_add_submenu_from_enum (item, GST_TYPE_COLOUR_TONE_MODE);
+
+ item = add_menuitem (GTK_MENU (photo_menu), "Scene");
+ photo_add_submenu_from_enum (item, GST_TYPE_SCENE_MODE);
+
+ item = add_menuitem (GTK_MENU (photo_menu), "Flash");
+ photo_add_submenu_from_enum (item, GST_TYPE_FLASH_MODE);
+
+ item = add_menuitem (GTK_MENU (photo_menu), "ISO");
+ labels = create_iso_speed_labels ();
+ add_submenu_from_list (item, labels, (GCallback) photo_iso_speed_toggled_cb);
+ g_list_free (labels);
+
+ item = add_menuitem (GTK_MENU (photo_menu), "EV comp");
+ labels = create_ev_comp_labels ();
+ add_submenu_from_list (item, labels, (GCallback) photo_ev_comp_toggled_cb);
+ g_list_free (labels);
+
+ gtk_menu_item_set_submenu (parent_item, photo_menu);
+}
+
+void
+capture_image_res_toggled_cb (GtkRadioMenuItem * menuitem, gpointer user_data)
+{
+ gboolean active;
+ const gchar *label;
+ gint i;
+
+ g_object_get (G_OBJECT (menuitem), "active", &active, NULL);
+ if (active) {
+ label = gtk_widget_get_name (GTK_WIDGET (menuitem));
+ /* Look for width and height corresponding to the label */
+ for (i = 0; i < G_N_ELEMENTS (image_resolution_label_map); i++) {
+ if (g_str_equal (label, image_resolution_label_map[i].label)) {
+ /* set found values */
+ g_signal_emit_by_name (gst_camera_bin, "user-image-res",
+ image_resolution_label_map[i].width,
+ image_resolution_label_map[i].height, 0);
+ break;
+ }
+ }
+ }
+}
+
+GList *
+create_image_resolution_labels ()
+{
+ GList *labels = NULL;
+ int i;
+ for (i = 0; i < G_N_ELEMENTS (image_resolution_label_map); i++) {
+ labels = g_list_append (labels, image_resolution_label_map[i].label);
+ }
+ return labels;
+}
+
+static void
+fill_capture_menu (GtkMenuItem * parent_item)
+{
+ GtkWidget *capture_menu = gtk_menu_new ();
+ GtkMenuItem *item = NULL;
+ GList *labels = NULL;
+
+ /* Add menu items and create and associate submenus to each item */
+ item = add_menuitem (GTK_MENU (capture_menu), "Image resolution");
+
+ labels = create_image_resolution_labels ();
+ add_submenu_from_list (item, labels,
+ (GCallback) capture_image_res_toggled_cb);
+ g_list_free (labels);
+
+ gtk_menu_item_set_submenu (parent_item, capture_menu);
+}
+#endif /* HAVE_GST_PHOTO_IFACE_H */
+
+static gboolean
+ui_create (void)
+{
+ gchar *gladefile = DEFAULT_GLADE_FILE;
+
+ if (!g_file_test (gladefile, G_FILE_TEST_EXISTS)) {
+ gladefile = SHARED_GLADE_FILE;
+ }
+
+ ui_glade_xml = glade_xml_new (gladefile, NULL, NULL);
+ if (!ui_glade_xml) {
+ fprintf (stderr, "glade_xml_new failed for %s\n", gladefile);
+ fflush (stderr);
+ goto done;
+ }
+
+ ui_main_window = glade_xml_get_widget (ui_glade_xml, "windowMain");
+ ui_drawing = glade_xml_get_widget (ui_glade_xml, "drawingareaView");
+ ui_drawing_frame = glade_xml_get_widget (ui_glade_xml, "drawingareaFrame");
+ ui_chk_continous = glade_xml_get_widget (ui_glade_xml, "chkbntContinous");
+ ui_chk_rawmsg = glade_xml_get_widget (ui_glade_xml, "chkbtnRawMsg");
+ ui_bnt_shot = GTK_BUTTON (glade_xml_get_widget (ui_glade_xml, "buttonShot"));
+ ui_bnt_pause =
+ GTK_BUTTON (glade_xml_get_widget (ui_glade_xml, "buttonPause"));
+ ui_cbbox_resolution =
+ GTK_COMBO_BOX (glade_xml_get_widget (ui_glade_xml, "comboboxResolution"));
+ ui_chk_mute = glade_xml_get_widget (ui_glade_xml, "chkbntMute");
+ ui_vbox_color_controls = glade_xml_get_widget (ui_glade_xml,
+ "vboxColorControls");
+ ui_rdbntImageCapture = glade_xml_get_widget (ui_glade_xml,
+ "radiobuttonImageCapture");
+ ui_rdbntVideoCapture = glade_xml_get_widget (ui_glade_xml,
+ "radiobuttonVideoCapture");
+
+ ui_menuitem_photography = glade_xml_get_widget (ui_glade_xml,
+ "menuitemPhotography");
+ ui_menuitem_capture = glade_xml_get_widget (ui_glade_xml, "menuitemCapture");
+#ifdef HAVE_GST_PHOTO_IFACE_H
+ if (ui_menuitem_photography) {
+ fill_photography_menu (GTK_MENU_ITEM (ui_menuitem_photography));
+ }
+
+ if (ui_menuitem_capture) {
+ fill_capture_menu (GTK_MENU_ITEM (ui_menuitem_capture));
+ }
+#endif
+ if (!(ui_main_window && ui_drawing && ui_chk_continous && ui_bnt_shot &&
+ ui_bnt_pause && ui_cbbox_resolution && ui_chk_mute &&
+ ui_vbox_color_controls && ui_rdbntImageCapture &&
+ ui_rdbntVideoCapture && ui_chk_rawmsg && ui_menuitem_photography &&
+ ui_menuitem_capture)) {
+ fprintf (stderr, "Some widgets couldn't be created\n");
+ fflush (stderr);
+ goto done;
+ }
+
+ gtk_widget_set_double_buffered (ui_drawing, FALSE);
+ ui_connect_signals ();
+ gtk_widget_show_all (ui_main_window);
+ capture_mode_config_gui ();
+ return TRUE;
+done:
+ return FALSE;
+}
+
+/*
+ * main
+ */
+
+int
+main (int argc, char *argv[])
+{
+ int ret = 0;
+
+ gst_init (&argc, &argv);
+ gtk_init (&argc, &argv);
+
+ filename = g_string_new_len ("", 16);
+
+ /* create UI */
+ if (!ui_create ()) {
+ ret = -1;
+ goto done;
+ }
+ /* create pipeline and run */
+ if (me_gst_setup_pipeline (NULL, NULL)) {
+ gtk_main ();
+ }
+
+done:
+ me_gst_cleanup_element ();
+ g_string_free (filename, TRUE);
+ return ret;
+}
diff --git a/tests/examples/camerabin/gst-camera.glade b/tests/examples/camerabin/gst-camera.glade
new file mode 100644
index 00000000..5230c832
--- /dev/null
+++ b/tests/examples/camerabin/gst-camera.glade
@@ -0,0 +1,397 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
+<!--*- mode: xml -*-->
+<glade-interface>
+ <widget class="GtkWindow" id="windowMain">
+ <property name="events">GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK</property>
+ <property name="title" translatable="yes">gst-camera</property>
+ <signal name="delete_event" handler="on_windowMain_delete_event"/>
+ <child>
+ <widget class="GtkVBox" id="vboxMain">
+ <property name="visible">True</property>
+ <child>
+ <widget class="GtkHBox" id="hbox2">
+ <property name="visible">True</property>
+ <child>
+ <widget class="GtkMenuBar" id="menubar1">
+ <property name="visible">True</property>
+ <child>
+ <widget class="GtkMenuItem" id="menuitemPhotography">
+ <property name="visible">True</property>
+ <property name="label" translatable="yes">Photography</property>
+ <property name="use_underline">True</property>
+ <child>
+ <widget class="GtkMenu" id="menu1">
+ <property name="visible">True</property>
+ </widget>
+ </child>
+ </widget>
+ </child>
+ <child>
+ <widget class="GtkMenuItem" id="menuitemCapture">
+ <property name="visible">True</property>
+ <property name="label" translatable="yes">Capture</property>
+ <property name="use_underline">True</property>
+ </widget>
+ </child>
+ </widget>
+ <packing>
+ <property name="expand">False</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkComboBox" id="comboboxResolution">
+ <property name="visible">True</property>
+ <property name="items" translatable="yes"></property>
+ <signal name="changed" handler="on_comboboxResolution_changed"/>
+ </widget>
+ <packing>
+ <property name="position">1</property>
+ </packing>
+ </child>
+ </widget>
+ <packing>
+ <property name="expand">False</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkHBox" id="hboxMode">
+ <property name="visible">True</property>
+ <child>
+ <widget class="GtkRadioButton" id="radiobuttonImageCapture">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">Image capture</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <signal name="toggled" handler="on_radiobuttonImageCapture_toggled"/>
+ </widget>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="radiobuttonVideoCapture">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">Video rec</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">radiobuttonImageCapture</property>
+ <signal name="toggled" handler="on_radiobuttonVideoCapture_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">1</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkButton" id="buttonShot">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">Shot</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <signal name="clicked" handler="on_buttonShot_clicked"/>
+ </widget>
+ <packing>
+ <property name="position">2</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkButton" id="buttonPause">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">Pause</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <signal name="clicked" handler="on_buttonPause_clicked"/>
+ </widget>
+ <packing>
+ <property name="position">3</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkCheckButton" id="chkbntMute">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">mute</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="draw_indicator">True</property>
+ <signal name="toggled" handler="on_chkbntMute_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">4</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkCheckButton" id="chkbntContinous">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">continous</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="draw_indicator">True</property>
+ </widget>
+ <packing>
+ <property name="position">5</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkCheckButton" id="chkbtnRawMsg">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="tooltip" translatable="yes">Send raw image after still image capture as gstreamer message</property>
+ <property name="label" translatable="yes">raw msg</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="draw_indicator">True</property>
+ <signal name="toggled" handler="on_chkbtnRawMsg_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">6</property>
+ </packing>
+ </child>
+ </widget>
+ <packing>
+ <property name="expand">False</property>
+ <property name="position">1</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkTable" id="tableOptions">
+ <property name="visible">True</property>
+ <property name="n_rows">1</property>
+ <property name="n_columns">3</property>
+ <child>
+ <widget class="GtkVBox" id="vboxVidEffect">
+ <property name="visible">True</property>
+ <child>
+ <widget class="GtkLabel" id="labelVidEff">
+ <property name="visible">True</property>
+ <property name="label" translatable="yes">video effects:</property>
+ </widget>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffNone">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">none</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <signal name="toggled" handler="on_rbBntVidEffNone_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">1</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffEdge">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">edged</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffEdge_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">2</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffAging">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">aging</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffAging_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">3</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffDice">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">dice</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffDice_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">4</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffWarp">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">warp</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffWarp_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">5</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffShaga">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">shagadelic</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffShagadelic_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">6</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffVertigo">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">vertigo</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffVertigo_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">7</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffRev">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">rev</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffRev_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">8</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkRadioButton" id="rbBntVidEffQuark">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="label" translatable="yes">quark</property>
+ <property name="use_underline">True</property>
+ <property name="response_id">0</property>
+ <property name="active">True</property>
+ <property name="draw_indicator">True</property>
+ <property name="group">rbBntVidEffNone</property>
+ <signal name="toggled" handler="on_rbBntVidEffQuark_toggled"/>
+ </widget>
+ <packing>
+ <property name="position">9</property>
+ </packing>
+ </child>
+ </widget>
+ <packing>
+ <property name="x_options"></property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkScrolledWindow" id="scrlWndColorControls">
+ <property name="width_request">200</property>
+ <property name="height_request">200</property>
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+ <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+ <child>
+ <widget class="GtkViewport" id="viewportColorControls">
+ <property name="visible">True</property>
+ <property name="events">GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK</property>
+ <child>
+ <widget class="GtkVBox" id="vboxColorControls">
+ <property name="visible">True</property>
+ </widget>
+ </child>
+ </widget>
+ </child>
+ </widget>
+ <packing>
+ <property name="left_attach">2</property>
+ <property name="right_attach">3</property>
+ <property name="x_options"></property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkAspectFrame" id="drawingareaFrame">
+ <property name="visible">True</property>
+ <property name="label_xalign">0</property>
+ <property name="shadow_type">GTK_SHADOW_NONE</property>
+ <property name="ratio">1</property>
+ <property name="obey_child">False</property>
+ <child>
+ <widget class="GtkAlignment" id="alignment1">
+ <property name="visible">True</property>
+ <child>
+ <widget class="GtkDrawingArea" id="drawingareaView">
+ <property name="visible">True</property>
+ <property name="events">GDK_POINTER_MOTION_MASK | GDK_POINTER_MOTION_HINT_MASK | GDK_BUTTON_PRESS_MASK | GDK_BUTTON_RELEASE_MASK</property>
+ <signal name="configure_event" handler="on_drawingareaView_configure_event"/>
+ </widget>
+ </child>
+ </widget>
+ </child>
+ </widget>
+ <packing>
+ <property name="left_attach">1</property>
+ <property name="right_attach">2</property>
+ </packing>
+ </child>
+ </widget>
+ <packing>
+ <property name="position">2</property>
+ </packing>
+ </child>
+ <child>
+ <widget class="GtkHScale" id="hscaleZoom">
+ <property name="visible">True</property>
+ <property name="can_focus">True</property>
+ <property name="adjustment">100 100 1100 10 100 100</property>
+ <property name="digits">0</property>
+ <property name="value_pos">GTK_POS_LEFT</property>
+ <signal name="value_changed" handler="on_hscaleZoom_value_changed"/>
+ </widget>
+ <packing>
+ <property name="expand">False</property>
+ <property name="position">3</property>
+ </packing>
+ </child>
+ </widget>
+ </child>
+ </widget>
+</glade-interface>