summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rwxr-xr-xautogen.sh4
m---------common0
-rw-r--r--configure.ac8
-rw-r--r--docs/plugins/Makefile.am2
-rw-r--r--docs/plugins/gst-plugins-bad-plugins-docs.sgml2
-rw-r--r--docs/plugins/gst-plugins-bad-plugins-sections.txt29
-rw-r--r--docs/plugins/gst-plugins-bad-plugins.args29
-rw-r--r--docs/plugins/gst-plugins-bad-plugins.hierarchy1
-rw-r--r--docs/plugins/inspect/plugin-debugutilsbad.xml21
-rw-r--r--ext/Makefile.am9
-rw-r--r--ext/schroedinger/Makefile.am28
-rw-r--r--ext/schroedinger/gstschro.c54
-rw-r--r--ext/schroedinger/gstschrodec.c722
-rw-r--r--ext/schroedinger/gstschroenc.c736
-rw-r--r--ext/schroedinger/gstschroparse.c610
-rw-r--r--ext/schroedinger/gstschroutils.c149
-rw-r--r--ext/schroedinger/gstschroutils.h34
-rw-r--r--ext/sdl/sdlvideosink.c2
-rw-r--r--gst-libs/gst/Makefile.am4
-rw-r--r--gst-libs/gst/video/Makefile.am24
-rw-r--r--gst-libs/gst/video/gstbasevideocodec.c565
-rw-r--r--gst-libs/gst/video/gstbasevideocodec.h150
-rw-r--r--gst-libs/gst/video/gstbasevideodecoder.c1173
-rw-r--r--gst-libs/gst/video/gstbasevideodecoder.h166
-rw-r--r--gst-libs/gst/video/gstbasevideoencoder.c502
-rw-r--r--gst-libs/gst/video/gstbasevideoencoder.h123
-rw-r--r--gst-libs/gst/video/gstbasevideoparse.c870
-rw-r--r--gst-libs/gst/video/gstbasevideoparse.h141
-rw-r--r--gst-libs/gst/video/gstbasevideoutils.c162
-rw-r--r--gst-libs/gst/video/gstbasevideoutils.h95
-rw-r--r--gst/camerabin/camerabinpreview.c15
-rw-r--r--gst/camerabin/camerabinvideo.c8
-rw-r--r--gst/camerabin/gstcamerabin.c87
-rw-r--r--gst/debugutils/Makefile.am4
-rw-r--r--gst/debugutils/debugutilsbad.c5
-rw-r--r--gst/debugutils/gstcapssetter.c350
-rw-r--r--gst/debugutils/gstcapssetter.h62
-rw-r--r--gst/freeze/Makefile.am1
-rw-r--r--gst/frei0r/gstfrei0r.c47
-rw-r--r--gst/frei0r/gstfrei0rfilter.c27
-rw-r--r--gst/frei0r/gstfrei0rfilter.h2
-rw-r--r--gst/frei0r/gstfrei0rmixer.c29
-rw-r--r--gst/frei0r/gstfrei0rsrc.c26
-rw-r--r--gst/mpegdemux/mpegtspacketizer.c37
-rw-r--r--gst/rtpmanager/Makefile.am2
-rw-r--r--gst/rtpmanager/gstrtpbin.c240
-rw-r--r--gst/rtpmanager/gstrtpbin.h2
-rw-r--r--gst/rtpmanager/gstrtpclient.c484
-rw-r--r--gst/rtpmanager/gstrtpclient.h56
-rw-r--r--gst/rtpmanager/gstrtpmanager.c5
-rw-r--r--gst/rtpmanager/gstrtpsession.c112
-rw-r--r--gst/rtpmanager/rtpsession.c153
-rw-r--r--gst/rtpmanager/rtpsession.h3
-rw-r--r--gst/rtpmanager/rtpsource.c99
-rw-r--r--gst/rtpmanager/rtpsource.h3
-rw-r--r--gst/shapewipe/gstshapewipe.c279
-rw-r--r--gst/shapewipe/gstshapewipe.h1
-rw-r--r--tests/check/elements/rtpbin.c84
-rw-r--r--tests/examples/Makefile.am2
-rw-r--r--tests/examples/camerabin/gst-camera-perf.c68
60 files changed, 7475 insertions, 1233 deletions
diff --git a/autogen.sh b/autogen.sh
index 61fb4d7c..8b88b9bf 100755
--- a/autogen.sh
+++ b/autogen.sh
@@ -33,11 +33,11 @@ CONFIGURE_DEF_OPT='--enable-maintainer-mode --enable-gtk-doc'
autogen_options $@
-echo -n "+ check for build tools"
+printf "+ check for build tools"
if test ! -z "$NOCHECK"; then echo " skipped"; else echo; fi
version_check "autoconf" "$AUTOCONF autoconf autoconf-2.54 autoconf-2.53 autoconf-2.52" \
"ftp://ftp.gnu.org/pub/gnu/autoconf/" 2 52 || DIE=1
-version_check "automake" "$AUTOMAKE automake automake-1.9 automake-1.7 automake-1.6 automake-1.5" \
+version_check "automake" "$AUTOMAKE automake automake-1.11 automake-1.10 automake-1.9 automake-1.7 automake-1.6 automake-1.5" \
"ftp://ftp.gnu.org/pub/gnu/automake/" 1 7 || DIE=1
version_check "autopoint" "autopoint" \
"ftp://ftp.gnu.org/pub/gnu/gettext/" 0 17 || DIE=1
diff --git a/common b/common
-Subproject f810030e4692aa43ae84f6649730fe25558a973
+Subproject 5845b632c99d8f0ab863bd955a9568d7937108f
diff --git a/configure.ac b/configure.ac
index ac22aef5..fa52dde5 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1468,6 +1468,12 @@ AG_GST_CHECK_FEATURE(VDPAU, [VDPAU], vdpau, [
fi
])
+dnl *** schroedinger ***
+translit(dnm, m, l) AM_CONDITIONAL(USE_SCHRO, true)
+AG_GST_CHECK_FEATURE(SCHRO, [Schroedinger video codec], schro, [
+ AG_GST_PKG_CHECK_MODULES(SCHRO, schroedinger-1.0 >= 1.0.7)
+])
+
else
dnl not building plugins with external dependencies,
@@ -1645,6 +1651,7 @@ gst-libs/gst/Makefile
gst-libs/gst/dshow/Makefile
gst-libs/gst/interfaces/Makefile
gst-libs/gst/signalprocessor/Makefile
+gst-libs/gst/video/Makefile
sys/Makefile
sys/dshowdecwrapper/Makefile
sys/acmenc/Makefile
@@ -1707,6 +1714,7 @@ ext/nas/Makefile
ext/neon/Makefile
ext/ofa/Makefile
ext/resindvd/Makefile
+ext/schroedinger/Makefile
ext/sdl/Makefile
ext/sndfile/Makefile
ext/soundtouch/Makefile
diff --git a/docs/plugins/Makefile.am b/docs/plugins/Makefile.am
index 03b2e71c..2bda68f2 100644
--- a/docs/plugins/Makefile.am
+++ b/docs/plugins/Makefile.am
@@ -124,6 +124,7 @@ EXTRA_HFILES = \
$(top_srcdir)/gst/dccp/gstdccpserversink.h \
$(top_srcdir)/gst/dccp/gstdccpserversrc.h \
$(top_srcdir)/gst/debugutils/fpsdisplaysink.h \
+ $(top_srcdir)/gst/debugutils/gstcapssetter.h \
$(top_srcdir)/gst/dtmf/gstdtmfsrc.h \
$(top_srcdir)/gst/dtmf/gstrtpdtmfsrc.h \
$(top_srcdir)/gst/dtmf/gstrtpdtmfdepay.h \
@@ -137,7 +138,6 @@ EXTRA_HFILES = \
$(top_srcdir)/gst/rawparse/gstaudioparse.h \
$(top_srcdir)/gst/rawparse/gstvideoparse.h \
$(top_srcdir)/gst/rtpmanager/gstrtpbin.h \
- $(top_srcdir)/gst/rtpmanager/gstrtpclient.h \
$(top_srcdir)/gst/rtpmanager/gstrtpjitterbuffer.h \
$(top_srcdir)/gst/rtpmanager/gstrtpptdemux.h \
$(top_srcdir)/gst/rtpmanager/gstrtpsession.h \
diff --git a/docs/plugins/gst-plugins-bad-plugins-docs.sgml b/docs/plugins/gst-plugins-bad-plugins-docs.sgml
index 959c9cb1..21ac6101 100644
--- a/docs/plugins/gst-plugins-bad-plugins-docs.sgml
+++ b/docs/plugins/gst-plugins-bad-plugins-docs.sgml
@@ -25,6 +25,7 @@
<xi:include href="xml/element-amrwbparse.xml" />
<xi:include href="xml/element-autoconvert.xml" />
<xi:include href="xml/element-camerabin.xml" />
+ <xi:include href="xml/element-capssetter.xml" />
<xi:include href="xml/element-celtdec.xml" />
<xi:include href="xml/element-celtenc.xml" />
<!--xi:include href="xml/element-dc1394.xml" /-->
@@ -41,7 +42,6 @@
<xi:include href="xml/element-festival.xml" />
<xi:include href="xml/element-fpsdisplaysink.xml" />
<xi:include href="xml/element-gstrtpbin.xml" />
- <xi:include href="xml/element-gstrtpclient.xml" />
<xi:include href="xml/element-gstrtpjitterbuffer.xml" />
<xi:include href="xml/element-gstrtpptdemux.xml" />
<xi:include href="xml/element-gstrtpsession.xml" />
diff --git a/docs/plugins/gst-plugins-bad-plugins-sections.txt b/docs/plugins/gst-plugins-bad-plugins-sections.txt
index 70afb729..e5742a85 100644
--- a/docs/plugins/gst-plugins-bad-plugins-sections.txt
+++ b/docs/plugins/gst-plugins-bad-plugins-sections.txt
@@ -112,6 +112,20 @@ gst_camerabin_get_type
</SECTION>
<SECTION>
+<FILE>element-capssetter</FILE>
+<TITLE>capssetter</TITLE>
+GstCapsSetter
+<SUBSECTION Standard>
+GstCapsSetterClass
+GST_TYPE_CAPS_SETTER
+GST_CAPS_SETTER
+GST_IS_CAPS_SETTER
+GST_CAPS_SETTER_CLASS
+GST_IS_CAPS_SETTER_CLASS
+gst_caps_setter_get_type
+</SECTION>
+
+<SECTION>
<FILE>element-celtdec</FILE>
<TITLE>celtdec</TITLE>
GstCeltDec
@@ -717,21 +731,6 @@ GST_IS_RTP_BIN_CLASS
</SECTION>
<SECTION>
-<FILE>element-gstrtpclient</FILE>
-<TITLE>gstrtpclient</TITLE>
-GstRtpClient
-<SUBSECTION Standard>
-GstRtpClientClass
-GstRtpClientPrivate
-GST_RTP_CLIENT
-GST_IS_RTP_CLIENT
-GST_TYPE_RTP_CLIENT
-gst_rtp_client_get_type
-GST_RTP_CLIENT_CLASS
-GST_IS_RTP_CLIENT_CLASS
-</SECTION>
-
-<SECTION>
<FILE>element-gstrtpjitterbuffer</FILE>
<TITLE>gstrtpjitterbuffer</TITLE>
GstRtpJitterBuffer
diff --git a/docs/plugins/gst-plugins-bad-plugins.args b/docs/plugins/gst-plugins-bad-plugins.args
index b13244f2..82bcfd30 100644
--- a/docs/plugins/gst-plugins-bad-plugins.args
+++ b/docs/plugins/gst-plugins-bad-plugins.args
@@ -23503,3 +23503,32 @@
<DEFAULT>TRUE</DEFAULT>
</ARG>
+<ARG>
+<NAME>GstCapsSetter::caps</NAME>
+<TYPE>GstCaps*</TYPE>
+<RANGE></RANGE>
+<FLAGS>rw</FLAGS>
+<NICK>Merge caps</NICK>
+<BLURB>Merge these caps (thereby overwriting) in the stream.</BLURB>
+<DEFAULT></DEFAULT>
+</ARG>
+
+<ARG>
+<NAME>GstCapsSetter::join</NAME>
+<TYPE>gboolean</TYPE>
+<RANGE></RANGE>
+<FLAGS>rw</FLAGS>
+<NICK>Join</NICK>
+<BLURB>Match incoming caps' mime-type to mime-type of provided caps.</BLURB>
+<DEFAULT>TRUE</DEFAULT>
+</ARG>
+
+<ARG>
+<NAME>GstCapsSetter::replace</NAME>
+<TYPE>gboolean</TYPE>
+<RANGE></RANGE>
+<FLAGS>rw</FLAGS>
+<NICK>Replace</NICK>
+<BLURB>Drop fields of incoming caps.</BLURB>
+<DEFAULT>FALSE</DEFAULT>
+</ARG>
diff --git a/docs/plugins/gst-plugins-bad-plugins.hierarchy b/docs/plugins/gst-plugins-bad-plugins.hierarchy
index a1204ca6..daf967df 100644
--- a/docs/plugins/gst-plugins-bad-plugins.hierarchy
+++ b/docs/plugins/gst-plugins-bad-plugins.hierarchy
@@ -76,6 +76,7 @@ GObject
GstVideoMark
GstIIR
GstLegacyresample
+ GstCapsSetter
GstSignalProcessor
ladspa-noise-white
ladspa-delay-5s
diff --git a/docs/plugins/inspect/plugin-debugutilsbad.xml b/docs/plugins/inspect/plugin-debugutilsbad.xml
index 0aabef1a..ff6d4b92 100644
--- a/docs/plugins/inspect/plugin-debugutilsbad.xml
+++ b/docs/plugins/inspect/plugin-debugutilsbad.xml
@@ -10,6 +10,27 @@
<origin>Unknown package origin</origin>
<elements>
<element>
+ <name>capssetter</name>
+ <longname>CapsSetter</longname>
+ <class>Generic</class>
+ <description>Set/merge caps on stream</description>
+ <author>Mark Nauwelaerts &lt;mnauw@users.sourceforge.net&gt;</author>
+ <pads>
+ <caps>
+ <name>src</name>
+ <direction>source</direction>
+ <presence>always</presence>
+ <details>ANY</details>
+ </caps>
+ <caps>
+ <name>sink</name>
+ <direction>sink</direction>
+ <presence>always</presence>
+ <details>ANY</details>
+ </caps>
+ </pads>
+ </element>
+ <element>
<name>fpsdisplaysink</name>
<longname>Measure and show framerate on videosink</longname>
<class>Sink/Video</class>
diff --git a/ext/Makefile.am b/ext/Makefile.am
index 8d395057..a2ecd18d 100644
--- a/ext/Makefile.am
+++ b/ext/Makefile.am
@@ -264,6 +264,12 @@ endif
POLYP_DIR=
# endif
+if USE_SCHRO
+SCHRO_DIR=schroedinger
+else
+SCHRO_DIR=
+endif
+
if USE_SDL
SDL_DIR=sdl
else
@@ -373,6 +379,7 @@ SUBDIRS=\
$(NEON_DIR) \
$(OFA_DIR) \
$(POLYP_DIR) \
+ $(SCHRO_DIR) \
$(SDL_DIR) \
$(SHOUT_DIR) \
$(SMOOTHWAVE_DIR) \
@@ -405,6 +412,7 @@ DIST_SUBDIRS = \
jack \
jp2k \
libmms \
+ lv2 \
dts \
divx \
modplug \
@@ -419,6 +427,7 @@ DIST_SUBDIRS = \
neon \
ofa \
resindvd \
+ schroedinger \
sdl \
sndfile \
soundtouch \
diff --git a/ext/schroedinger/Makefile.am b/ext/schroedinger/Makefile.am
new file mode 100644
index 00000000..6a895c51
--- /dev/null
+++ b/ext/schroedinger/Makefile.am
@@ -0,0 +1,28 @@
+
+plugin_LTLIBRARIES = libgstschro.la
+
+noinst_HEADERS = \
+ gstschroutils.h
+
+libgstschro_la_SOURCES = \
+ gstschro.c \
+ gstschrodec.c \
+ gstschroenc.c \
+ gstschroparse.c \
+ gstschroutils.c
+libgstschro_la_CFLAGS = \
+ $(GST_CFLAGS) \
+ $(GST_BASE_CFLAGS) \
+ $(GST_PLUGINS_BASE_CFLAGS) \
+ -DGST_USE_UNSTABLE_API \
+ $(SCHRO_CFLAGS)
+libgstschro_la_LIBADD = \
+ $(GST_LIBS) \
+ $(GST_BASE_LIBS) -lgstvideo-@GST_MAJORMINOR@ \
+ $(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_MAJORMINOR@.la \
+ $(GST_PLUGINS_BASE_LIBS) \
+ $(SCHRO_LIBS)
+libgstschro_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstschro_la_LIBTOOLFLAGS = --tag=disable-static
+
+
diff --git a/ext/schroedinger/gstschro.c b/ext/schroedinger/gstschro.c
new file mode 100644
index 00000000..44d9226e
--- /dev/null
+++ b/ext/schroedinger/gstschro.c
@@ -0,0 +1,54 @@
+/* GStreamer
+ * Copyright (C) 2005 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <schroedinger/schro.h>
+
+GType gst_schro_enc_get_type (void);
+GType gst_schro_dec_get_type (void);
+GType gst_schro_parse_get_type (void);
+
+GST_DEBUG_CATEGORY (schro_debug);
+#define GST_CAT_DEFAULT schro_debug
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ schro_init ();
+
+ GST_DEBUG_CATEGORY_INIT (schro_debug, "schro", 0, "Schroedinger");
+ gst_element_register (plugin, "schrodec", GST_RANK_PRIMARY,
+ gst_schro_dec_get_type ());
+ gst_element_register (plugin, "schroparse", GST_RANK_NONE,
+ gst_schro_parse_get_type ());
+ gst_element_register (plugin, "schroenc", GST_RANK_PRIMARY,
+ gst_schro_enc_get_type ());
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "schro",
+ "Schroedinger plugin",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/ext/schroedinger/gstschrodec.c b/ext/schroedinger/gstschrodec.c
new file mode 100644
index 00000000..1dcca7ac
--- /dev/null
+++ b/ext/schroedinger/gstschrodec.c
@@ -0,0 +1,722 @@
+/* Schrodinger
+ * Copyright (C) 2006 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/video/video.h>
+#include <gst/video/gstbasevideodecoder.h>
+#include <string.h>
+#include <schroedinger/schro.h>
+#include <math.h>
+#include "gstschroutils.h"
+
+#include <schroedinger/schroparse.h>
+
+GST_DEBUG_CATEGORY_EXTERN (schro_debug);
+#define GST_CAT_DEFAULT schro_debug
+
+#define GST_TYPE_SCHRO_DEC \
+ (gst_schro_dec_get_type())
+#define GST_SCHRO_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SCHRO_DEC,GstSchroDec))
+#define GST_SCHRO_DEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SCHRO_DEC,GstSchroDecClass))
+#define GST_IS_SCHRO_DEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SCHRO_DEC))
+#define GST_IS_SCHRO_DEC_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SCHRO_DEC))
+
+typedef struct _GstSchroDec GstSchroDec;
+typedef struct _GstSchroDecClass GstSchroDecClass;
+
+struct _GstSchroDec
+{
+ GstBaseVideoDecoder base_video_decoder;
+
+ SchroDecoder *decoder;
+
+ GstBuffer *seq_header_buffer;
+};
+
+struct _GstSchroDecClass
+{
+ GstBaseVideoDecoder base_video_decoder_class;
+};
+
+
+/* GstSchroDec signals and args */
+enum
+{
+ LAST_SIGNAL
+};
+
+enum
+{
+ ARG_0
+};
+
+static void gst_schro_dec_finalize (GObject * object);
+static void gst_schro_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_schro_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static gboolean gst_schro_dec_sink_query (GstPad * pad, GstQuery * query);
+
+static gboolean gst_schro_dec_start (GstBaseVideoDecoder * dec);
+static gboolean gst_schro_dec_stop (GstBaseVideoDecoder * dec);
+static gboolean gst_schro_dec_reset (GstBaseVideoDecoder * dec);
+static GstFlowReturn gst_schro_dec_parse_data (GstBaseVideoDecoder *
+ base_video_decoder, gboolean at_eos);
+static int gst_schro_dec_scan_for_sync (GstBaseVideoDecoder *
+ base_video_decoder, gboolean at_eos, int offset, int n);
+static GstFlowReturn gst_schro_dec_handle_frame (GstBaseVideoDecoder * decoder,
+ GstVideoFrame * frame);
+static GstFlowReturn gst_schro_dec_finish (GstBaseVideoDecoder *
+ base_video_decoder, GstVideoFrame * frame);
+static void gst_schrodec_send_tags (GstSchroDec * schro_dec);
+
+static GstStaticPadTemplate gst_schro_dec_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-dirac")
+ );
+
+static GstStaticPadTemplate gst_schro_dec_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YUY2, AYUV }"))
+ );
+
+GST_BOILERPLATE (GstSchroDec, gst_schro_dec, GstBaseVideoDecoder,
+ GST_TYPE_BASE_VIDEO_DECODER);
+
+static void
+gst_schro_dec_base_init (gpointer g_class)
+{
+ static GstElementDetails compress_details =
+ GST_ELEMENT_DETAILS ("Dirac Decoder",
+ "Codec/Decoder/Video",
+ "Decode Dirac streams",
+ "David Schleef <ds@schleef.org>");
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_schro_dec_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_schro_dec_sink_template));
+
+ gst_element_class_set_details (element_class, &compress_details);
+}
+
+static void
+gst_schro_dec_class_init (GstSchroDecClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+ GstElementClass *element_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
+
+ gobject_class->set_property = gst_schro_dec_set_property;
+ gobject_class->get_property = gst_schro_dec_get_property;
+ gobject_class->finalize = gst_schro_dec_finalize;
+
+ base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start);
+ base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_schro_dec_stop);
+ base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_schro_dec_reset);
+ base_video_decoder_class->parse_data =
+ GST_DEBUG_FUNCPTR (gst_schro_dec_parse_data);
+ base_video_decoder_class->scan_for_sync =
+ GST_DEBUG_FUNCPTR (gst_schro_dec_scan_for_sync);
+ base_video_decoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_schro_dec_handle_frame);
+ base_video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_dec_finish);
+}
+
+static void
+gst_schro_dec_init (GstSchroDec * schro_dec, GstSchroDecClass * klass)
+{
+ GST_DEBUG ("gst_schro_dec_init");
+
+ gst_pad_set_query_function (GST_BASE_VIDEO_CODEC_SINK_PAD (schro_dec),
+ gst_schro_dec_sink_query);
+
+ schro_dec->decoder = schro_decoder_new ();
+}
+
+#define OGG_DIRAC_GRANULE_SHIFT 22
+#define OGG_DIRAC_GRANULE_LOW_MASK ((1ULL<<OGG_DIRAC_GRANULE_SHIFT)-1)
+
+static gint64
+granulepos_to_frame (gint64 granulepos)
+{
+ guint64 pt;
+ int dist_h;
+ int dist_l;
+ int dist;
+ int delay;
+ guint64 dt;
+
+ if (granulepos == -1)
+ return -1;
+
+ pt = ((granulepos >> 22) + (granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9;
+ dist_h = (granulepos >> 22) & 0xff;
+ dist_l = granulepos & 0xff;
+ dist = (dist_h << 8) | dist_l;
+ delay = (granulepos >> 9) & 0x1fff;
+ dt = pt - delay;
+
+ return pt >> 1;
+}
+
+static gboolean
+gst_schro_dec_sink_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ GstSchroDec *dec;
+ GstVideoState *state;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ dec = GST_SCHRO_DEC (gst_pad_get_parent (pad));
+
+ /* FIXME: check if we are in a decoding state */
+
+ state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (dec));
+
+ res = FALSE;
+ if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) {
+ if (state->fps_d != 0) {
+ *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
+ state->fps_d * GST_SECOND, state->fps_n);
+ res = TRUE;
+ } else {
+ res = FALSE;
+ }
+ }
+
+ gst_object_unref (dec);
+
+ return res;
+}
+
+static gboolean
+gst_schro_dec_sink_query (GstPad * pad, GstQuery * query)
+{
+ GstSchroDec *dec;
+ gboolean res = FALSE;
+
+ dec = GST_SCHRO_DEC (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res = gst_schro_dec_sink_convert (pad, src_fmt, src_val, &dest_fmt,
+ &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+done:
+ gst_object_unref (dec);
+
+ return res;
+error:
+ GST_DEBUG_OBJECT (dec, "query failed");
+ goto done;
+}
+
+static gboolean
+gst_schro_dec_start (GstBaseVideoDecoder * dec)
+{
+ if (dec->codec_data) {
+ GST_DEBUG_OBJECT (dec, "codec data!");
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_schro_dec_stop (GstBaseVideoDecoder * dec)
+{
+
+ return TRUE;
+}
+
+static gboolean
+gst_schro_dec_reset (GstBaseVideoDecoder * dec)
+{
+ GstSchroDec *schro_dec;
+
+ schro_dec = GST_SCHRO_DEC (dec);
+
+ GST_DEBUG ("reset");
+
+ if (schro_dec->decoder) {
+ schro_decoder_reset (schro_dec->decoder);
+ }
+
+ return TRUE;
+}
+
+static void
+gst_schro_dec_finalize (GObject * object)
+{
+ GstSchroDec *schro_dec;
+
+ g_return_if_fail (GST_IS_SCHRO_DEC (object));
+ schro_dec = GST_SCHRO_DEC (object);
+
+ if (schro_dec->decoder) {
+ schro_decoder_free (schro_dec->decoder);
+ schro_dec->decoder = NULL;
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_schro_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSchroDec *src;
+
+ g_return_if_fail (GST_IS_SCHRO_DEC (object));
+ src = GST_SCHRO_DEC (object);
+
+ GST_DEBUG ("gst_schro_dec_set_property");
+ switch (prop_id) {
+ default:
+ break;
+ }
+}
+
+static void
+gst_schro_dec_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstSchroDec *src;
+
+ g_return_if_fail (GST_IS_SCHRO_DEC (object));
+ src = GST_SCHRO_DEC (object);
+
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size)
+{
+ SchroVideoFormat video_format;
+ int ret;
+ GstVideoState *state;
+
+ GST_DEBUG_OBJECT (schro_dec, "parse_sequence_header size=%d", size);
+
+ state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (schro_dec));
+
+ schro_dec->seq_header_buffer = gst_buffer_new_and_alloc (size);
+ memcpy (GST_BUFFER_DATA (schro_dec->seq_header_buffer), data, size);
+
+ ret = schro_parse_decode_sequence_header (data + 13, size - 13,
+ &video_format);
+ if (ret) {
+ if (video_format.chroma_format == SCHRO_CHROMA_444) {
+ state->format = GST_VIDEO_FORMAT_AYUV;
+ } else if (video_format.chroma_format == SCHRO_CHROMA_422) {
+ state->format = GST_VIDEO_FORMAT_YUY2;
+ } else if (video_format.chroma_format == SCHRO_CHROMA_420) {
+ state->format = GST_VIDEO_FORMAT_I420;
+ }
+ state->fps_n = video_format.frame_rate_numerator;
+ state->fps_d = video_format.frame_rate_denominator;
+ GST_DEBUG_OBJECT (schro_dec, "Frame rate is %d/%d", state->fps_n,
+ state->fps_d);
+
+ state->width = video_format.width;
+ state->height = video_format.height;
+ GST_DEBUG ("Frame dimensions are %d x %d\n", state->width, state->height);
+
+ state->clean_width = video_format.clean_width;
+ state->clean_height = video_format.clean_height;
+ state->clean_offset_left = video_format.left_offset;
+ state->clean_offset_top = video_format.top_offset;
+
+ state->par_n = video_format.aspect_ratio_numerator;
+ state->par_d = video_format.aspect_ratio_denominator;
+ GST_DEBUG ("Pixel aspect ratio is %d/%d", state->par_n, state->par_d);
+
+ /* FIXME state points to what is actually in the decoder */
+ //gst_base_video_decoder_set_state (GST_BASE_VIDEO_DECODER (schro_dec),
+ // state);
+ } else {
+ GST_WARNING ("Failed to get frame rate from sequence header");
+ }
+
+ gst_schrodec_send_tags (schro_dec);
+}
+
+
+static GstFlowReturn
+gst_schro_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
+ gboolean at_eos)
+{
+ GstSchroDec *schro_decoder;
+ unsigned char header[SCHRO_PARSE_HEADER_SIZE];
+ int next;
+ int prev;
+ int parse_code;
+
+ GST_DEBUG_OBJECT (base_video_decoder, "parse_data");
+
+ schro_decoder = GST_SCHRO_DEC (base_video_decoder);
+
+ if (gst_adapter_available (base_video_decoder->input_adapter) <
+ SCHRO_PARSE_HEADER_SIZE) {
+ return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA;
+ }
+
+ GST_DEBUG ("available %d",
+ gst_adapter_available (base_video_decoder->input_adapter));
+
+ gst_adapter_copy (base_video_decoder->input_adapter, header, 0,
+ SCHRO_PARSE_HEADER_SIZE);
+
+ parse_code = header[4];
+ next = GST_READ_UINT32_BE (header + 5);
+ prev = GST_READ_UINT32_BE (header + 9);
+
+ GST_DEBUG ("%08x %02x %08x %08x",
+ GST_READ_UINT32_BE (header), parse_code, next, prev);
+
+ if (memcmp (header, "BBCD", 4) != 0 ||
+ (next & 0xf0000000) || (prev & 0xf0000000)) {
+ gst_base_video_decoder_lost_sync (base_video_decoder);
+ return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA;
+ }
+
+ if (SCHRO_PARSE_CODE_IS_END_OF_SEQUENCE (parse_code)) {
+ GstVideoFrame *frame;
+
+ if (next != 0 && next != SCHRO_PARSE_HEADER_SIZE) {
+ GST_WARNING ("next is not 0 or 13 in EOS packet (%d)", next);
+ }
+
+ gst_base_video_decoder_add_to_frame (base_video_decoder,
+ SCHRO_PARSE_HEADER_SIZE);
+
+ frame = base_video_decoder->current_frame;
+ frame->is_eos = TRUE;
+
+ SCHRO_DEBUG ("eos");
+
+ return gst_base_video_decoder_have_frame (base_video_decoder);
+ }
+
+ if (gst_adapter_available (base_video_decoder->input_adapter) < next) {
+ return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA;
+ }
+
+ if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (parse_code)) {
+ guint8 *data;
+
+ data = g_malloc (next);
+
+ gst_adapter_copy (base_video_decoder->input_adapter, data, 0, next);
+ parse_sequence_header (schro_decoder, data, next);
+
+ gst_base_video_decoder_set_sync_point (base_video_decoder);
+
+ if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_sink_timestamp)) {
+ base_video_decoder->current_frame->presentation_timestamp =
+ base_video_decoder->last_sink_timestamp;
+ GST_DEBUG ("got timestamp %lld", base_video_decoder->last_sink_timestamp);
+ } else if (base_video_decoder->last_sink_offset_end != -1) {
+ GstVideoState *state;
+
+#if 0
+ /* FIXME perhaps should use this to determine if the granulepos
+ * is valid */
+ {
+ guint64 pt;
+ int dist_h;
+ int dist_l;
+ int dist;
+ int delay;
+ guint64 dt;
+ gint64 granulepos = base_video_decoder->last_sink_offset_end;
+
+ pt = ((granulepos >> 22) +
+ (granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9;
+ dist_h = (granulepos >> 22) & 0xff;
+ dist_l = granulepos & 0xff;
+ dist = (dist_h << 8) | dist_l;
+ delay = (granulepos >> 9) & 0x1fff;
+ dt = pt - delay;
+ GST_DEBUG ("gp pt %lld dist %d delay %d dt %lld", pt, dist, delay, dt);
+ }
+#endif
+ state =
+ gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER
+ (schro_decoder));
+ base_video_decoder->current_frame->presentation_timestamp =
+ gst_util_uint64_scale (granulepos_to_frame
+ (base_video_decoder->last_sink_offset_end), state->fps_d * GST_SECOND,
+ state->fps_n);
+ } else {
+ base_video_decoder->current_frame->presentation_timestamp = -1;
+ }
+
+ g_free (data);
+ }
+
+ if (schro_decoder->seq_header_buffer == NULL) {
+ gst_adapter_flush (base_video_decoder->input_adapter, next);
+ return GST_FLOW_OK;
+ }
+
+ if (SCHRO_PARSE_CODE_IS_PICTURE (parse_code)) {
+ GstVideoFrame *frame;
+ guint8 tmp[4];
+
+ frame = base_video_decoder->current_frame;
+
+ gst_adapter_copy (base_video_decoder->input_adapter, tmp,
+ SCHRO_PARSE_HEADER_SIZE, 4);
+
+ frame->presentation_frame_number = GST_READ_UINT32_BE (tmp);
+
+ gst_base_video_decoder_add_to_frame (base_video_decoder, next);
+
+ return gst_base_video_decoder_have_frame (base_video_decoder);
+ } else {
+ gst_base_video_decoder_add_to_frame (base_video_decoder, next);
+ }
+
+ return GST_FLOW_OK;
+}
+
+static int
+gst_schro_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
+ gboolean at_eos, int offset, int n)
+{
+ GstAdapter *adapter = base_video_decoder->input_adapter;
+ int n_available;
+
+ n_available = gst_adapter_available (adapter) - offset;
+
+ if (n_available < 4) {
+ if (at_eos) {
+ return n_available;
+ } else {
+ return 0;
+ }
+ }
+
+ n_available -= 3;
+
+ return gst_adapter_masked_scan_uint32 (adapter, 0xffffffff, 0x42424344,
+ offset, MIN (n, n_available - 3));
+}
+
+
+static void
+gst_schrodec_send_tags (GstSchroDec * schro_dec)
+{
+ GstTagList *list;
+
+ list = gst_tag_list_new ();
+ gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
+ GST_TAG_VIDEO_CODEC, "Dirac", NULL);
+
+ gst_element_found_tags_for_pad (GST_ELEMENT_CAST (schro_dec),
+ GST_BASE_VIDEO_CODEC_SRC_PAD (schro_dec), list);
+}
+
+static GstFlowReturn
+gst_schro_dec_process (GstSchroDec * schro_dec, gboolean eos)
+{
+ gboolean go;
+ GstFlowReturn ret;
+
+ ret = GST_FLOW_OK;
+ go = TRUE;
+ while (go) {
+ int it;
+
+ it = schro_decoder_autoparse_wait (schro_dec->decoder);
+
+ switch (it) {
+ case SCHRO_DECODER_FIRST_ACCESS_UNIT:
+ break;
+ case SCHRO_DECODER_NEED_BITS:
+ GST_DEBUG ("need bits");
+ go = 0;
+ break;
+ case SCHRO_DECODER_NEED_FRAME:
+ {
+ GstBuffer *outbuf;
+ GstVideoState *state;
+ SchroFrame *schro_frame;
+ GstFlowReturn flow_ret;
+ int size;
+
+ GST_DEBUG ("need frame");
+
+ state =
+ gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER
+ (schro_dec));
+ size =
+ gst_video_format_get_size (state->format, state->width,
+ state->height);
+ flow_ret =
+ gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
+ (schro_dec), GST_BUFFER_OFFSET_NONE, size,
+ GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (schro_dec)), &outbuf);
+ if (flow_ret != GST_FLOW_OK) {
+ go = FALSE;
+ ret = flow_ret;
+ break;
+ }
+ schro_frame = gst_schro_buffer_wrap (outbuf,
+ state->format, state->width, state->height);
+ schro_decoder_add_output_picture (schro_dec->decoder, schro_frame);
+ break;
+ }
+ case SCHRO_DECODER_OK:
+ {
+ SchroFrame *schro_frame;
+ SchroTag *tag;
+ GstVideoFrame *frame;
+
+ GST_DEBUG ("got frame");
+
+ tag = schro_decoder_get_picture_tag (schro_dec->decoder);
+ schro_frame = schro_decoder_pull (schro_dec->decoder);
+ frame = tag->value;
+
+ if (schro_frame) {
+ if (schro_frame->priv) {
+ GstFlowReturn flow_ret;
+
+ frame->src_buffer = gst_buffer_ref (GST_BUFFER (schro_frame->priv));
+
+ flow_ret =
+ gst_base_video_decoder_finish_frame (GST_BASE_VIDEO_DECODER
+ (schro_dec), frame);
+ if (flow_ret != GST_FLOW_OK) {
+ GST_DEBUG ("finish frame returned %d", flow_ret);
+ return flow_ret;
+ }
+ } else {
+ GST_DEBUG ("skipped frame");
+ }
+
+ schro_frame_unref (schro_frame);
+ }
+ if (!eos) {
+ go = FALSE;
+ }
+ }
+
+ break;
+ case SCHRO_DECODER_EOS:
+ GST_DEBUG ("eos");
+ go = FALSE;
+ break;
+ case SCHRO_DECODER_ERROR:
+ go = FALSE;
+ GST_DEBUG ("codec error");
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ }
+ return ret;
+}
+
+GstFlowReturn
+gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
+ GstVideoFrame * frame)
+{
+ GstSchroDec *schro_dec;
+ int schro_ret;
+ SchroBuffer *input_buffer;
+ GstVideoState *state;
+
+ schro_dec = GST_SCHRO_DEC (base_video_decoder);
+
+ GST_DEBUG ("handle frame");
+
+ state = gst_base_video_decoder_get_state (base_video_decoder);
+
+ gst_base_video_decoder_set_src_caps (base_video_decoder);
+
+ input_buffer = gst_schro_wrap_gst_buffer (frame->sink_buffer);
+ frame->sink_buffer = NULL;
+
+ input_buffer->tag = schro_tag_new (frame, NULL);
+
+ schro_ret = schro_decoder_autoparse_push (schro_dec->decoder, input_buffer);
+
+ return gst_schro_dec_process (schro_dec, FALSE);
+}
+
+GstFlowReturn
+gst_schro_dec_finish (GstBaseVideoDecoder * base_video_decoder,
+ GstVideoFrame * frame)
+{
+ GstSchroDec *schro_dec;
+
+ schro_dec = GST_SCHRO_DEC (base_video_decoder);
+
+ GST_DEBUG ("finish");
+
+ gst_base_video_decoder_set_src_caps (base_video_decoder);
+
+ schro_decoder_autoparse_push_end_of_sequence (schro_dec->decoder);
+
+ return gst_schro_dec_process (schro_dec, TRUE);
+}
diff --git a/ext/schroedinger/gstschroenc.c b/ext/schroedinger/gstschroenc.c
new file mode 100644
index 00000000..2d0fb39b
--- /dev/null
+++ b/ext/schroedinger/gstschroenc.c
@@ -0,0 +1,736 @@
+/* Schrodinger
+ * Copyright (C) 2006 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/video/gstbasevideoencoder.h>
+#include <string.h>
+
+#include <schroedinger/schro.h>
+#include <schroedinger/schrobitstream.h>
+#include <schroedinger/schrovirtframe.h>
+#include <math.h>
+#include "gstschroutils.h"
+
+GST_DEBUG_CATEGORY_EXTERN (schro_debug);
+#define GST_CAT_DEFAULT schro_debug
+
+#define GST_TYPE_SCHRO_ENC \
+ (gst_schro_enc_get_type())
+#define GST_SCHRO_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SCHRO_ENC,GstSchroEnc))
+#define GST_SCHRO_ENC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SCHRO_ENC,GstSchroEncClass))
+#define GST_IS_SCHRO_ENC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SCHRO_ENC))
+#define GST_IS_SCHRO_ENC_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SCHRO_ENC))
+
+typedef struct _GstSchroEnc GstSchroEnc;
+typedef struct _GstSchroEncClass GstSchroEncClass;
+
+typedef enum
+{
+ GST_SCHRO_ENC_OUTPUT_OGG,
+ GST_SCHRO_ENC_OUTPUT_QUICKTIME,
+ GST_SCHRO_ENC_OUTPUT_AVI,
+ GST_SCHRO_ENC_OUTPUT_MPEG_TS,
+ GST_SCHRO_ENC_OUTPUT_MP4
+} GstSchroEncOutputType;
+
+struct _GstSchroEnc
+{
+ GstBaseVideoEncoder base_encoder;
+
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* video properties */
+ GstSchroEncOutputType output_format;
+
+ /* state */
+ SchroEncoder *encoder;
+ SchroVideoFormat *video_format;
+ GstVideoFrame *eos_frame;
+ GstBuffer *seq_header_buffer;
+
+ guint64 last_granulepos;
+};
+
+struct _GstSchroEncClass
+{
+ GstBaseVideoEncoderClass parent_class;
+};
+
+
+
+enum
+{
+ LAST_SIGNAL
+};
+
+enum
+{
+ ARG_0
+};
+
+static void gst_schro_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_schro_enc_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+static GstFlowReturn gst_schro_enc_process (GstSchroEnc * schro_enc);
+
+static gboolean gst_schro_enc_set_format (GstBaseVideoEncoder *
+ base_video_encoder, GstVideoState * state);
+static gboolean gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder);
+static gboolean gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder);
+static gboolean gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame);
+static gboolean gst_schro_enc_handle_frame (GstBaseVideoEncoder *
+ base_video_encoder, GstVideoFrame * frame);
+static GstFlowReturn gst_schro_enc_shape_output (GstBaseVideoEncoder *
+ base_video_encoder, GstVideoFrame * frame);
+static GstCaps *gst_schro_enc_get_caps (GstBaseVideoEncoder *
+ base_video_encoder);
+
+static GstStaticPadTemplate gst_schro_enc_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, AYUV }"))
+ );
+
+static GstStaticPadTemplate gst_schro_enc_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-dirac;video/x-qt-part;video/x-mp4-part")
+ );
+
+GST_BOILERPLATE (GstSchroEnc, gst_schro_enc, GstBaseVideoEncoder,
+ GST_TYPE_BASE_VIDEO_ENCODER);
+
+static void
+gst_schro_enc_base_init (gpointer g_class)
+{
+ static GstElementDetails schro_enc_details =
+ GST_ELEMENT_DETAILS ("Dirac Encoder",
+ "Codec/Encoder/Video",
+ "Encode raw video into Dirac stream",
+ "David Schleef <ds@schleef.org>");
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_schro_enc_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_schro_enc_sink_template));
+
+ gst_element_class_set_details (element_class, &schro_enc_details);
+}
+
+static void
+gst_schro_enc_class_init (GstSchroEncClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+ GstBaseVideoEncoderClass *basevideocoder_class;
+ int i;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+ basevideocoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass);
+
+ gobject_class->set_property = gst_schro_enc_set_property;
+ gobject_class->get_property = gst_schro_enc_get_property;
+
+ for (i = 0; i < schro_encoder_get_n_settings (); i++) {
+ const SchroEncoderSetting *setting;
+
+ setting = schro_encoder_get_setting_info (i);
+
+ switch (setting->type) {
+ case SCHRO_ENCODER_SETTING_TYPE_BOOLEAN:
+ g_object_class_install_property (gobject_class, i + 1,
+ g_param_spec_boolean (setting->name, setting->name, setting->name,
+ setting->default_value, G_PARAM_READWRITE));
+ break;
+ case SCHRO_ENCODER_SETTING_TYPE_INT:
+ g_object_class_install_property (gobject_class, i + 1,
+ g_param_spec_int (setting->name, setting->name, setting->name,
+ setting->min, setting->max, setting->default_value,
+ G_PARAM_READWRITE));
+ break;
+ case SCHRO_ENCODER_SETTING_TYPE_ENUM:
+ g_object_class_install_property (gobject_class, i + 1,
+ g_param_spec_int (setting->name, setting->name, setting->name,
+ setting->min, setting->max, setting->default_value,
+ G_PARAM_READWRITE));
+ break;
+ case SCHRO_ENCODER_SETTING_TYPE_DOUBLE:
+ g_object_class_install_property (gobject_class, i + 1,
+ g_param_spec_double (setting->name, setting->name, setting->name,
+ setting->min, setting->max, setting->default_value,
+ G_PARAM_READWRITE));
+ break;
+ default:
+ break;
+ }
+ }
+
+ basevideocoder_class->set_format =
+ GST_DEBUG_FUNCPTR (gst_schro_enc_set_format);
+ basevideocoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_enc_start);
+ basevideocoder_class->stop = GST_DEBUG_FUNCPTR (gst_schro_enc_stop);
+ basevideocoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_enc_finish);
+ basevideocoder_class->handle_frame =
+ GST_DEBUG_FUNCPTR (gst_schro_enc_handle_frame);
+ basevideocoder_class->shape_output =
+ GST_DEBUG_FUNCPTR (gst_schro_enc_shape_output);
+ basevideocoder_class->get_caps = GST_DEBUG_FUNCPTR (gst_schro_enc_get_caps);
+}
+
+static void
+gst_schro_enc_init (GstSchroEnc * schro_enc, GstSchroEncClass * klass)
+{
+ GST_DEBUG ("gst_schro_enc_init");
+
+ /* Normally, we'd create the encoder in ->start(), but we use the
+ * encoder to store object properties. So it needs to be created
+ * here. */
+ schro_enc->encoder = schro_encoder_new ();
+ schro_encoder_set_packet_assembly (schro_enc->encoder, TRUE);
+ schro_enc->video_format = schro_encoder_get_video_format (schro_enc->encoder);
+}
+
+
+
+static gboolean
+gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoState * state)
+{
+ GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
+
+ schro_video_format_set_std_video_format (schro_enc->video_format,
+ SCHRO_VIDEO_FORMAT_CUSTOM);
+
+ switch (state->format) {
+ case GST_VIDEO_FORMAT_I420:
+ case GST_VIDEO_FORMAT_YV12:
+ schro_enc->video_format->chroma_format = SCHRO_CHROMA_420;
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ case GST_VIDEO_FORMAT_UYVY:
+ schro_enc->video_format->chroma_format = SCHRO_CHROMA_422;
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ schro_enc->video_format->chroma_format = SCHRO_CHROMA_444;
+ break;
+ case GST_VIDEO_FORMAT_ARGB:
+ schro_enc->video_format->chroma_format = SCHRO_CHROMA_420;
+ break;
+ default:
+ g_assert_not_reached ();
+ }
+
+ schro_enc->video_format->frame_rate_numerator = state->fps_n;
+ schro_enc->video_format->frame_rate_denominator = state->fps_d;
+
+ schro_enc->video_format->width = state->width;
+ schro_enc->video_format->height = state->height;
+ schro_enc->video_format->clean_width = state->clean_width;
+ schro_enc->video_format->clean_height = state->clean_height;
+ schro_enc->video_format->left_offset = state->clean_offset_left;
+ schro_enc->video_format->top_offset = state->clean_offset_top;
+
+ schro_enc->video_format->aspect_ratio_numerator = state->par_n;
+ schro_enc->video_format->aspect_ratio_denominator = state->par_d;
+
+ schro_video_format_set_std_signal_range (schro_enc->video_format,
+ SCHRO_SIGNAL_RANGE_8BIT_VIDEO);
+ schro_video_format_set_std_colour_spec (schro_enc->video_format,
+ SCHRO_COLOUR_SPEC_HDTV);
+
+ schro_encoder_set_video_format (schro_enc->encoder, schro_enc->video_format);
+ schro_encoder_start (schro_enc->encoder);
+
+ schro_enc->seq_header_buffer =
+ gst_schro_wrap_schro_buffer (schro_encoder_encode_sequence_header
+ (schro_enc->encoder));
+
+ return TRUE;
+}
+
+static void
+gst_schro_enc_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstSchroEnc *src;
+
+ g_return_if_fail (GST_IS_SCHRO_ENC (object));
+ src = GST_SCHRO_ENC (object);
+
+ GST_DEBUG ("gst_schro_enc_set_property");
+
+ if (prop_id >= 1) {
+ const SchroEncoderSetting *setting;
+ setting = schro_encoder_get_setting_info (prop_id - 1);
+ switch (G_VALUE_TYPE (value)) {
+ case G_TYPE_DOUBLE:
+ schro_encoder_setting_set_double (src->encoder, setting->name,
+ g_value_get_double (value));
+ break;
+ case G_TYPE_INT:
+ schro_encoder_setting_set_double (src->encoder, setting->name,
+ g_value_get_int (value));
+ break;
+ case G_TYPE_BOOLEAN:
+ schro_encoder_setting_set_double (src->encoder, setting->name,
+ g_value_get_boolean (value));
+ break;
+ }
+ }
+}
+
+static void
+gst_schro_enc_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstSchroEnc *src;
+
+ g_return_if_fail (GST_IS_SCHRO_ENC (object));
+ src = GST_SCHRO_ENC (object);
+
+ if (prop_id >= 1) {
+ const SchroEncoderSetting *setting;
+ setting = schro_encoder_get_setting_info (prop_id - 1);
+ switch (G_VALUE_TYPE (value)) {
+ case G_TYPE_DOUBLE:
+ g_value_set_double (value,
+ schro_encoder_setting_get_double (src->encoder, setting->name));
+ break;
+ case G_TYPE_INT:
+ g_value_set_int (value,
+ schro_encoder_setting_get_double (src->encoder, setting->name));
+ break;
+ case G_TYPE_BOOLEAN:
+ g_value_set_boolean (value,
+ schro_encoder_setting_get_double (src->encoder, setting->name));
+ break;
+ }
+ }
+}
+
+/*
+ * start is called once the input format is known. This function
+ * must decide on an output format and negotiate it.
+ */
+static gboolean
+gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder)
+{
+ GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
+ GstCaps *caps;
+ GstStructure *structure;
+
+ GST_DEBUG ("set_output_caps");
+ caps =
+ gst_pad_get_allowed_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
+ (base_video_encoder));
+
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ return FALSE;
+ }
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_has_name (structure, "video/x-dirac")) {
+ schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_OGG;
+ } else if (gst_structure_has_name (structure, "video/x-qt-part")) {
+ schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_QUICKTIME;
+ } else if (gst_structure_has_name (structure, "video/x-avi-part")) {
+ schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_AVI;
+ } else if (gst_structure_has_name (structure, "video/x-mp4-part")) {
+ schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_MP4;
+ } else {
+ return FALSE;
+ }
+
+ gst_base_video_encoder_set_latency_fields (base_video_encoder,
+ 2 * (int) schro_encoder_setting_get_double (schro_enc->encoder,
+ "queue_depth"));
+
+ gst_caps_unref (caps);
+ return TRUE;
+}
+
+static gboolean
+gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder)
+{
+ GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
+
+ if (schro_enc->encoder) {
+ schro_encoder_free (schro_enc->encoder);
+ schro_enc->encoder = NULL;
+ }
+
+ return TRUE;
+}
+
+static gboolean
+gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame)
+{
+ GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
+
+ GST_DEBUG ("finish");
+
+ schro_enc->eos_frame = frame;
+
+ schro_encoder_end_of_stream (schro_enc->encoder);
+ gst_schro_enc_process (schro_enc);
+
+ return TRUE;
+}
+
+static gboolean
+gst_schro_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame)
+{
+ GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
+ SchroFrame *schro_frame;
+ GstFlowReturn ret;
+ const GstVideoState *state;
+
+ state = gst_base_video_encoder_get_state (base_video_encoder);
+
+ schro_frame = gst_schro_buffer_wrap (frame->sink_buffer,
+ state->format, state->width, state->height);
+
+ GST_DEBUG ("pushing frame %p", frame);
+ schro_encoder_push_frame_full (schro_enc->encoder, schro_frame, frame);
+
+ ret = gst_schro_enc_process (schro_enc);
+
+ return ret;
+}
+
+#if 0
+static void
+gst_caps_add_streamheader (GstCaps * caps, GList * list)
+{
+ GValue array = { 0 };
+ GValue value = { 0 };
+ GstBuffer *buf;
+ GList *g;
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+
+ for (g = g_list_first (list); g; g = g_list_next (list)) {
+ g_value_init (&value, GST_TYPE_BUFFER);
+ buf = gst_buffer_copy (GST_BUFFER (g->data));
+ gst_value_set_buffer (&value, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (&array, &value);
+ g_value_unset (&value);
+ }
+ gst_structure_set_value (gst_caps_get_structure (caps, 0),
+ "streamheader", &array);
+ g_value_unset (&array);
+}
+#endif
+
+static GstCaps *
+gst_schro_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
+{
+ GstCaps *caps;
+ const GstVideoState *state;
+ GstSchroEnc *schro_enc;
+
+ schro_enc = GST_SCHRO_ENC (base_video_encoder);
+
+ state = gst_base_video_encoder_get_state (base_video_encoder);
+
+ if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_OGG) {
+ caps = gst_caps_new_simple ("video/x-dirac",
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+
+ GST_BUFFER_FLAG_SET (schro_enc->seq_header_buffer, GST_BUFFER_FLAG_IN_CAPS);
+
+ {
+ GValue array = { 0 };
+ GValue value = { 0 };
+ GstBuffer *buf;
+ int size;
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+ g_value_init (&value, GST_TYPE_BUFFER);
+ size = GST_BUFFER_SIZE (schro_enc->seq_header_buffer);
+ buf = gst_buffer_new_and_alloc (size + SCHRO_PARSE_HEADER_SIZE);
+ memcpy (GST_BUFFER_DATA (buf),
+ GST_BUFFER_DATA (schro_enc->seq_header_buffer), size);
+ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 0, 0x42424344);
+ GST_WRITE_UINT8 (GST_BUFFER_DATA (buf) + size + 4,
+ SCHRO_PARSE_CODE_END_OF_SEQUENCE);
+ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 5, 0);
+ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 9, size);
+ gst_value_set_buffer (&value, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (&array, &value);
+ gst_structure_set_value (gst_caps_get_structure (caps, 0),
+ "streamheader", &array);
+ g_value_unset (&value);
+ g_value_unset (&array);
+ }
+ } else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_QUICKTIME) {
+ caps = gst_caps_new_simple ("video/x-qt-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_AVI) {
+ caps = gst_caps_new_simple ("video/x-avi-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_MPEG_TS) {
+ caps = gst_caps_new_simple ("video/x-mpegts-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_MP4) {
+ caps = gst_caps_new_simple ("video/x-mp4-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else {
+ g_assert_not_reached ();
+ }
+
+ return caps;
+}
+
+
+
+
+static GstFlowReturn
+gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame)
+{
+ GstSchroEnc *schro_enc;
+ int dpn;
+ int delay;
+ int dist;
+ int pt;
+ int dt;
+ guint64 granulepos_hi;
+ guint64 granulepos_low;
+ GstBuffer *buf = frame->src_buffer;
+
+ schro_enc = GST_SCHRO_ENC (base_video_encoder);
+
+ dpn = frame->decode_frame_number;
+
+ pt = frame->presentation_frame_number * 2;
+ dt = frame->decode_frame_number * 2;
+ delay = pt - dt;
+ dist = frame->distance_from_sync;
+
+ GST_DEBUG ("sys %d dpn %d pt %d dt %d delay %d dist %d",
+ (int) frame->system_frame_number,
+ (int) frame->decode_frame_number, pt, dt, delay, dist);
+
+ granulepos_hi = (((uint64_t) pt - delay) << 9) | ((dist >> 8));
+ granulepos_low = (delay << 9) | (dist & 0xff);
+ GST_DEBUG ("granulepos %lld:%lld", granulepos_hi, granulepos_low);
+
+ if (frame->is_eos) {
+ GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos;
+ } else {
+ schro_enc->last_granulepos = (granulepos_hi << 22) | (granulepos_low);
+ GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos;
+ }
+
+ gst_buffer_set_caps (buf, base_video_encoder->caps);
+
+ return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
+}
+
+static GstFlowReturn
+gst_schro_enc_shape_output_quicktime (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame)
+{
+ GstBuffer *buf = frame->src_buffer;
+ const GstVideoState *state;
+
+ state = gst_base_video_encoder_get_state (base_video_encoder);
+
+ GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
+ frame->presentation_frame_number);
+ GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
+ frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
+ GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
+ frame->system_frame_number);
+ GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
+
+ if (frame->is_sync_point &&
+ frame->presentation_frame_number == frame->system_frame_number) {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ gst_buffer_set_caps (buf, base_video_encoder->caps);
+
+ return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
+}
+
+static GstFlowReturn
+gst_schro_enc_shape_output_mp4 (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame)
+{
+ GstBuffer *buf = frame->src_buffer;
+ const GstVideoState *state;
+
+ state = gst_base_video_encoder_get_state (base_video_encoder);
+
+ GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
+ frame->presentation_frame_number);
+ GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
+ frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
+ GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
+ frame->decode_frame_number);
+ GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
+
+ GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
+ frame->system_frame_number);
+
+ if (frame->is_sync_point &&
+ frame->presentation_frame_number == frame->system_frame_number) {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ gst_buffer_set_caps (buf, base_video_encoder->caps);
+
+ return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
+}
+
+static GstFlowReturn
+gst_schro_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame)
+{
+ GstSchroEnc *schro_enc;
+
+ schro_enc = GST_SCHRO_ENC (base_video_encoder);
+
+ switch (schro_enc->output_format) {
+ case GST_SCHRO_ENC_OUTPUT_OGG:
+ return gst_schro_enc_shape_output_ogg (base_video_encoder, frame);
+ case GST_SCHRO_ENC_OUTPUT_QUICKTIME:
+ return gst_schro_enc_shape_output_quicktime (base_video_encoder, frame);
+ case GST_SCHRO_ENC_OUTPUT_MP4:
+ return gst_schro_enc_shape_output_mp4 (base_video_encoder, frame);
+ default:
+ g_assert_not_reached ();
+ break;
+ }
+
+ return GST_FLOW_ERROR;
+}
+
+static GstFlowReturn
+gst_schro_enc_process (GstSchroEnc * schro_enc)
+{
+ SchroBuffer *encoded_buffer;
+ GstVideoFrame *frame;
+ GstFlowReturn ret;
+ int presentation_frame;
+ void *voidptr;
+ GstBaseVideoEncoder *base_video_encoder = GST_BASE_VIDEO_ENCODER (schro_enc);
+
+ GST_DEBUG ("process");
+
+ while (1) {
+ switch (schro_encoder_wait (schro_enc->encoder)) {
+ case SCHRO_STATE_NEED_FRAME:
+ return GST_FLOW_OK;
+ case SCHRO_STATE_END_OF_STREAM:
+ GST_DEBUG ("EOS");
+ return GST_FLOW_OK;
+ case SCHRO_STATE_HAVE_BUFFER:
+ voidptr = NULL;
+ encoded_buffer = schro_encoder_pull_full (schro_enc->encoder,
+ &presentation_frame, &voidptr);
+ frame = voidptr;
+ if (encoded_buffer == NULL) {
+ GST_DEBUG ("encoder_pull returned NULL");
+ /* FIXME This shouldn't happen */
+ return GST_FLOW_ERROR;
+ }
+
+ if (voidptr == NULL) {
+ GST_DEBUG ("got eos");
+ frame = schro_enc->eos_frame;
+ }
+
+ if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (encoded_buffer->data[4])) {
+ frame->is_sync_point = TRUE;
+ }
+
+ frame->src_buffer = gst_schro_wrap_schro_buffer (encoded_buffer);
+
+ ret = gst_base_video_encoder_finish_frame (base_video_encoder, frame);
+
+ if (ret != GST_FLOW_OK) {
+ GST_DEBUG ("pad_push returned %d", ret);
+ return ret;
+ }
+ break;
+ case SCHRO_STATE_AGAIN:
+ break;
+ }
+ }
+ return GST_FLOW_OK;
+}
diff --git a/ext/schroedinger/gstschroparse.c b/ext/schroedinger/gstschroparse.c
new file mode 100644
index 00000000..d368a52b
--- /dev/null
+++ b/ext/schroedinger/gstschroparse.c
@@ -0,0 +1,610 @@
+/* Schrodinger
+ * Copyright (C) 2006 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/video/video.h>
+#include <gst/video/gstbasevideoparse.h>
+#include <string.h>
+#include <schroedinger/schro.h>
+#include <liboil/liboil.h>
+#include <math.h>
+
+#include <schroedinger/schroparse.h>
+
+
+GST_DEBUG_CATEGORY_EXTERN (schro_debug);
+#define GST_CAT_DEFAULT schro_debug
+
+#define GST_TYPE_SCHRO_PARSE \
+ (gst_schro_parse_get_type())
+#define GST_SCHRO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SCHRO_PARSE,GstSchroParse))
+#define GST_SCHRO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SCHRO_PARSE,GstSchroParseClass))
+#define GST_IS_SCHRO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SCHRO_PARSE))
+#define GST_IS_SCHRO_PARSE_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SCHRO_PARSE))
+
+typedef struct _GstSchroParse GstSchroParse;
+typedef struct _GstSchroParseClass GstSchroParseClass;
+
+typedef enum
+{
+ GST_SCHRO_PARSE_OUTPUT_OGG,
+ GST_SCHRO_PARSE_OUTPUT_QUICKTIME,
+ GST_SCHRO_PARSE_OUTPUT_AVI,
+ GST_SCHRO_PARSE_OUTPUT_MPEG_TS,
+ GST_SCHRO_PARSE_OUTPUT_MP4
+} GstSchroParseOutputType;
+
+struct _GstSchroParse
+{
+ GstBaseVideoParse base_video_parse;
+
+ GstPad *sinkpad, *srcpad;
+
+ GstSchroParseOutputType output_format;
+
+ GstBuffer *seq_header_buffer;
+
+ /* state */
+
+
+ gboolean have_picture;
+ int buf_picture_number;
+ int seq_hdr_picture_number;
+ int picture_number;
+
+ guint64 last_granulepos;
+
+ int bytes_per_picture;
+};
+
+struct _GstSchroParseClass
+{
+ GstBaseVideoParseClass base_video_parse_class;
+};
+
+/* GstSchroParse signals and args */
+enum
+{
+ LAST_SIGNAL
+};
+
+enum
+{
+ ARG_0
+};
+
+static void gst_schro_parse_finalize (GObject * object);
+
+static gboolean gst_schro_parse_start (GstBaseVideoParse * base_video_parse);
+static gboolean gst_schro_parse_stop (GstBaseVideoParse * base_video_parse);
+static gboolean gst_schro_parse_reset (GstBaseVideoParse * base_video_parse);
+static int gst_schro_parse_scan_for_sync (GstAdapter * adapter,
+ gboolean at_eos, int offset, int n);
+static gboolean gst_schro_parse_parse_data (GstBaseVideoParse *
+ base_video_parse, gboolean at_eos);
+static gboolean gst_schro_parse_shape_output (GstBaseVideoParse *
+ base_video_parse, GstVideoFrame * frame);
+static GstCaps *gst_schro_parse_get_caps (GstBaseVideoParse * base_video_parse);
+
+
+
+static GstStaticPadTemplate gst_schro_parse_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-dirac")
+ );
+
+static GstStaticPadTemplate gst_schro_parse_src_template =
+ GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS
+ ("video/x-dirac;video/x-qt-part;video/x-avi-part;video/x-mp4-part")
+ );
+
+GST_BOILERPLATE (GstSchroParse, gst_schro_parse, GstBaseVideoParse,
+ GST_TYPE_BASE_VIDEO_PARSE);
+
+static void
+gst_schro_parse_base_init (gpointer g_class)
+{
+ static GstElementDetails compress_details =
+ GST_ELEMENT_DETAILS ("Dirac Parser",
+ "Codec/Parser/Video",
+ "Parse Dirac streams",
+ "David Schleef <ds@schleef.org>");
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_schro_parse_src_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_schro_parse_sink_template));
+
+ gst_element_class_set_details (element_class, &compress_details);
+}
+
+static void
+gst_schro_parse_class_init (GstSchroParseClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+ GstBaseVideoParseClass *base_video_parse_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+ base_video_parse_class = GST_BASE_VIDEO_PARSE_CLASS (klass);
+
+ gobject_class->finalize = gst_schro_parse_finalize;
+
+ base_video_parse_class->start = GST_DEBUG_FUNCPTR (gst_schro_parse_start);
+ base_video_parse_class->stop = GST_DEBUG_FUNCPTR (gst_schro_parse_stop);
+ base_video_parse_class->reset = GST_DEBUG_FUNCPTR (gst_schro_parse_reset);
+ base_video_parse_class->parse_data =
+ GST_DEBUG_FUNCPTR (gst_schro_parse_parse_data);
+ base_video_parse_class->shape_output =
+ GST_DEBUG_FUNCPTR (gst_schro_parse_shape_output);
+ base_video_parse_class->scan_for_sync =
+ GST_DEBUG_FUNCPTR (gst_schro_parse_scan_for_sync);
+ base_video_parse_class->get_caps =
+ GST_DEBUG_FUNCPTR (gst_schro_parse_get_caps);
+
+}
+
+static void
+gst_schro_parse_init (GstSchroParse * schro_parse, GstSchroParseClass * klass)
+{
+ GstBaseVideoParse *base_video_parse = GST_BASE_VIDEO_PARSE (schro_parse);
+
+ GST_DEBUG ("gst_schro_parse_init");
+
+ schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_OGG;
+
+ base_video_parse->reorder_depth = 2;
+}
+
+static gboolean
+gst_schro_parse_reset (GstBaseVideoParse * base_video_parse)
+{
+ GstSchroParse *schro_parse;
+
+ schro_parse = GST_SCHRO_PARSE (base_video_parse);
+
+ GST_DEBUG ("reset");
+
+ return TRUE;
+}
+
+static void
+gst_schro_parse_finalize (GObject * object)
+{
+ GstSchroParse *schro_parse;
+
+ g_return_if_fail (GST_IS_SCHRO_PARSE (object));
+ schro_parse = GST_SCHRO_PARSE (object);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_schro_parse_start (GstBaseVideoParse * base_video_parse)
+{
+ GstSchroParse *schro_parse = GST_SCHRO_PARSE (base_video_parse);
+ GstCaps *caps;
+ GstStructure *structure;
+
+ GST_DEBUG ("start");
+ caps =
+ gst_pad_get_allowed_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
+ (base_video_parse));
+
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ return FALSE;
+ }
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ if (gst_structure_has_name (structure, "video/x-dirac")) {
+ schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_OGG;
+ } else if (gst_structure_has_name (structure, "video/x-qt-part")) {
+ schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_QUICKTIME;
+ } else if (gst_structure_has_name (structure, "video/x-avi-part")) {
+ schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_AVI;
+ } else if (gst_structure_has_name (structure, "video/x-mpegts-part")) {
+ schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_MPEG_TS;
+ } else if (gst_structure_has_name (structure, "video/x-mp4-part")) {
+ schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_MP4;
+ } else {
+ return FALSE;
+ }
+
+ gst_caps_unref (caps);
+ return TRUE;
+}
+
+static gboolean
+gst_schro_parse_stop (GstBaseVideoParse * base_video_parse)
+{
+ return TRUE;
+}
+
+static void
+parse_sequence_header (GstSchroParse * schro_parse, guint8 * data, int size)
+{
+ SchroVideoFormat video_format;
+ int ret;
+ GstVideoState *state;
+
+ GST_DEBUG ("parse_sequence_header size=%d", size);
+
+ state = gst_base_video_parse_get_state (GST_BASE_VIDEO_PARSE (schro_parse));
+
+ schro_parse->seq_header_buffer = gst_buffer_new_and_alloc (size);
+ memcpy (GST_BUFFER_DATA (schro_parse->seq_header_buffer), data, size);
+
+ ret = schro_parse_decode_sequence_header (data + 13, size - 13,
+ &video_format);
+ if (ret) {
+ state->fps_n = video_format.frame_rate_numerator;
+ state->fps_d = video_format.frame_rate_denominator;
+ GST_DEBUG ("Frame rate is %d/%d", state->fps_n, state->fps_d);
+
+ state->width = video_format.width;
+ state->height = video_format.height;
+ GST_DEBUG ("Frame dimensions are %d x %d\n", state->width, state->height);
+
+ state->clean_width = video_format.clean_width;
+ state->clean_height = video_format.clean_height;
+ state->clean_offset_left = video_format.left_offset;
+ state->clean_offset_top = video_format.top_offset;
+
+ state->par_n = video_format.aspect_ratio_numerator;
+ state->par_d = video_format.aspect_ratio_denominator;
+ GST_DEBUG ("Pixel aspect ratio is %d/%d", state->par_n, state->par_d);
+
+ gst_base_video_parse_set_state (GST_BASE_VIDEO_PARSE (schro_parse), state);
+ } else {
+ GST_WARNING ("Failed to get frame rate from sequence header");
+ }
+
+}
+
+static int
+gst_schro_parse_scan_for_sync (GstAdapter * adapter, gboolean at_eos,
+ int offset, int n)
+{
+ int n_available = gst_adapter_available (adapter) - offset;
+
+ if (n_available < 4) {
+ if (at_eos) {
+ return n_available;
+ } else {
+ return 0;
+ }
+ }
+
+ n_available -= 3;
+
+ return gst_adapter_masked_scan_uint32 (adapter, 0xffffffff, 0x42424344,
+ offset, MIN (n, n_available - 3));
+}
+
+static GstFlowReturn
+gst_schro_parse_parse_data (GstBaseVideoParse * base_video_parse,
+ gboolean at_eos)
+{
+ GstSchroParse *schro_parse;
+ unsigned char header[SCHRO_PARSE_HEADER_SIZE];
+ int next;
+ int prev;
+ int parse_code;
+
+ GST_DEBUG ("parse_data");
+
+ schro_parse = GST_SCHRO_PARSE (base_video_parse);
+
+ if (gst_adapter_available (base_video_parse->input_adapter) <
+ SCHRO_PARSE_HEADER_SIZE) {
+ return GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA;
+ }
+
+ GST_DEBUG ("available %d",
+ gst_adapter_available (base_video_parse->input_adapter));
+
+ gst_adapter_copy (base_video_parse->input_adapter, header, 0,
+ SCHRO_PARSE_HEADER_SIZE);
+
+ parse_code = header[4];
+ next = GST_READ_UINT32_BE (header + 5);
+ prev = GST_READ_UINT32_BE (header + 9);
+
+ GST_DEBUG ("%08x %02x %08x %08x",
+ GST_READ_UINT32_BE (header), parse_code, next, prev);
+
+ if (memcmp (header, "BBCD", 4) != 0 ||
+ (next & 0xf0000000) || (prev & 0xf0000000)) {
+ gst_base_video_parse_lost_sync (base_video_parse);
+ return GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA;
+ }
+
+ if (SCHRO_PARSE_CODE_IS_END_OF_SEQUENCE (parse_code)) {
+ GstVideoFrame *frame;
+
+ if (next != 0 && next != SCHRO_PARSE_HEADER_SIZE) {
+ GST_WARNING ("next is not 0 or 13 in EOS packet (%d)", next);
+ }
+
+ gst_base_video_parse_add_to_frame (base_video_parse,
+ SCHRO_PARSE_HEADER_SIZE);
+
+ frame = gst_base_video_parse_get_frame (base_video_parse);
+ frame->is_eos = TRUE;
+
+ SCHRO_DEBUG ("eos");
+
+ return gst_base_video_parse_finish_frame (base_video_parse);
+ }
+
+ if (gst_adapter_available (base_video_parse->input_adapter) < next) {
+ return GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA;
+ }
+
+ if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (parse_code)) {
+ guint8 *data;
+
+ data = g_malloc (next);
+
+ gst_adapter_copy (base_video_parse->input_adapter, data, 0, next);
+ parse_sequence_header (schro_parse, data, next);
+
+ base_video_parse->current_frame->is_sync_point = TRUE;
+
+ g_free (data);
+ }
+
+ if (schro_parse->seq_header_buffer == NULL) {
+ gst_adapter_flush (base_video_parse->input_adapter, next);
+ return GST_FLOW_OK;
+ }
+
+ if (SCHRO_PARSE_CODE_IS_PICTURE (parse_code)) {
+ GstVideoFrame *frame;
+ guint8 tmp[4];
+
+ frame = gst_base_video_parse_get_frame (base_video_parse);
+
+#if 0
+ if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buf))) {
+ frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf);
+ }
+#endif
+
+ gst_adapter_copy (base_video_parse->input_adapter, tmp,
+ SCHRO_PARSE_HEADER_SIZE, 4);
+
+ frame->presentation_frame_number = GST_READ_UINT32_BE (tmp);
+
+ gst_base_video_parse_add_to_frame (base_video_parse, next);
+
+ return gst_base_video_parse_finish_frame (base_video_parse);
+ } else {
+ gst_base_video_parse_add_to_frame (base_video_parse, next);
+ }
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_schro_parse_shape_output_ogg (GstBaseVideoParse * base_video_parse,
+ GstVideoFrame * frame)
+{
+ GstSchroParse *schro_parse;
+ int dpn;
+ int delay;
+ int dist;
+ int pt;
+ int dt;
+ guint64 granulepos_hi;
+ guint64 granulepos_low;
+ GstBuffer *buf = frame->src_buffer;
+
+ schro_parse = GST_SCHRO_PARSE (base_video_parse);
+
+ dpn = frame->decode_frame_number;
+
+ pt = frame->presentation_frame_number * 2;
+ dt = frame->decode_frame_number * 2;
+ delay = pt - dt;
+ dist = frame->distance_from_sync;
+
+ GST_DEBUG ("sys %d dpn %d pt %d dt %d delay %d dist %d",
+ (int) frame->system_frame_number,
+ (int) frame->decode_frame_number, pt, dt, delay, dist);
+
+ granulepos_hi = (((guint64) pt - delay) << 9) | ((dist >> 8));
+ granulepos_low = (delay << 9) | (dist & 0xff);
+ GST_DEBUG ("granulepos %lld:%lld", granulepos_hi, granulepos_low);
+
+ if (frame->is_eos) {
+ GST_BUFFER_OFFSET_END (buf) = schro_parse->last_granulepos;
+ } else {
+ schro_parse->last_granulepos = (granulepos_hi << 22) | (granulepos_low);
+ GST_BUFFER_OFFSET_END (buf) = schro_parse->last_granulepos;
+ }
+
+ return gst_base_video_parse_push (base_video_parse, buf);
+}
+
+static GstFlowReturn
+gst_schro_parse_shape_output_quicktime (GstBaseVideoParse * base_video_parse,
+ GstVideoFrame * frame)
+{
+ GstBuffer *buf = frame->src_buffer;
+ const GstVideoState *state;
+
+ state = gst_base_video_parse_get_state (base_video_parse);
+
+ GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
+ frame->system_frame_number);
+
+ if (frame->is_sync_point &&
+ frame->presentation_frame_number == frame->system_frame_number) {
+ GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ GST_DEBUG ("sync point");
+ } else {
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ return gst_base_video_parse_push (base_video_parse, buf);
+}
+
+static GstFlowReturn
+gst_schro_parse_shape_output_mpeg_ts (GstBaseVideoParse * base_video_parse,
+ GstVideoFrame * frame)
+{
+ GstBuffer *buf = frame->src_buffer;
+ const GstVideoState *state;
+
+ state = gst_base_video_parse_get_state (base_video_parse);
+
+ return gst_base_video_parse_push (base_video_parse, buf);
+}
+
+static GstFlowReturn
+gst_schro_parse_shape_output (GstBaseVideoParse * base_video_parse,
+ GstVideoFrame * frame)
+{
+ GstSchroParse *schro_parse;
+
+ schro_parse = GST_SCHRO_PARSE (base_video_parse);
+
+ switch (schro_parse->output_format) {
+ case GST_SCHRO_PARSE_OUTPUT_OGG:
+ return gst_schro_parse_shape_output_ogg (base_video_parse, frame);
+ case GST_SCHRO_PARSE_OUTPUT_QUICKTIME:
+ return gst_schro_parse_shape_output_quicktime (base_video_parse, frame);
+ case GST_SCHRO_PARSE_OUTPUT_MPEG_TS:
+ return gst_schro_parse_shape_output_mpeg_ts (base_video_parse, frame);
+ default:
+ break;
+ }
+
+ return GST_FLOW_ERROR;
+}
+
+static GstCaps *
+gst_schro_parse_get_caps (GstBaseVideoParse * base_video_parse)
+{
+ GstCaps *caps;
+ GstVideoState *state;
+ GstSchroParse *schro_parse;
+
+ schro_parse = GST_SCHRO_PARSE (base_video_parse);
+
+ state = gst_base_video_parse_get_state (base_video_parse);
+
+ if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_OGG) {
+ caps = gst_caps_new_simple ("video/x-dirac",
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+
+ GST_BUFFER_FLAG_SET (schro_parse->seq_header_buffer,
+ GST_BUFFER_FLAG_IN_CAPS);
+
+ {
+ GValue array = { 0 };
+ GValue value = { 0 };
+ GstBuffer *buf;
+ int size;
+
+ g_value_init (&array, GST_TYPE_ARRAY);
+ g_value_init (&value, GST_TYPE_BUFFER);
+ size = GST_BUFFER_SIZE (schro_parse->seq_header_buffer);
+ buf = gst_buffer_new_and_alloc (size + SCHRO_PARSE_HEADER_SIZE);
+ memcpy (GST_BUFFER_DATA (buf),
+ GST_BUFFER_DATA (schro_parse->seq_header_buffer), size);
+ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 0, 0x42424344);
+ GST_WRITE_UINT8 (GST_BUFFER_DATA (buf) + size + 4,
+ SCHRO_PARSE_CODE_END_OF_SEQUENCE);
+ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 5, 0);
+ GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 9, size);
+ gst_value_set_buffer (&value, buf);
+ gst_buffer_unref (buf);
+ gst_value_array_append_value (&array, &value);
+ gst_structure_set_value (gst_caps_get_structure (caps, 0),
+ "streamheader", &array);
+ g_value_unset (&value);
+ g_value_unset (&array);
+ }
+ } else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_QUICKTIME) {
+ caps = gst_caps_new_simple ("video/x-qt-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_AVI) {
+ caps = gst_caps_new_simple ("video/x-avi-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_MPEG_TS) {
+ caps = gst_caps_new_simple ("video/x-mpegts-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_MP4) {
+ caps = gst_caps_new_simple ("video/x-mp4-part",
+ "format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
+ "width", G_TYPE_INT, state->width,
+ "height", G_TYPE_INT, state->height,
+ "framerate", GST_TYPE_FRACTION, state->fps_n,
+ state->fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
+ state->par_d, NULL);
+ } else {
+ g_assert_not_reached ();
+ }
+
+ return caps;
+}
diff --git a/ext/schroedinger/gstschroutils.c b/ext/schroedinger/gstschroutils.c
new file mode 100644
index 00000000..9f6a5793
--- /dev/null
+++ b/ext/schroedinger/gstschroutils.c
@@ -0,0 +1,149 @@
+/* Schrodinger
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+//#define SCHRO_ENABLE_UNSTABLE_API
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <schroedinger/schro.h>
+#include <schroedinger/schrobitstream.h>
+#include <schroedinger/schrovirtframe.h>
+#include <math.h>
+#include <string.h>
+
+GST_DEBUG_CATEGORY_EXTERN (schro_debug);
+#define GST_CAT_DEFAULT schro_debug
+
+
+
+
+static void
+gst_schro_frame_free (SchroFrame * frame, void *priv)
+{
+ gst_buffer_unref (GST_BUFFER (priv));
+}
+
+SchroFrame *
+gst_schro_buffer_wrap (GstBuffer * buf, GstVideoFormat format, int width,
+ int height)
+{
+ SchroFrame *frame;
+
+ switch (format) {
+ case GST_VIDEO_FORMAT_I420:
+ frame =
+ schro_frame_new_from_data_I420 (GST_BUFFER_DATA (buf), width, height);
+ break;
+ case GST_VIDEO_FORMAT_YV12:
+ frame =
+ schro_frame_new_from_data_YV12 (GST_BUFFER_DATA (buf), width, height);
+ break;
+ case GST_VIDEO_FORMAT_YUY2:
+ frame =
+ schro_frame_new_from_data_YUY2 (GST_BUFFER_DATA (buf), width, height);
+ break;
+ case GST_VIDEO_FORMAT_UYVY:
+ frame =
+ schro_frame_new_from_data_UYVY (GST_BUFFER_DATA (buf), width, height);
+ break;
+ case GST_VIDEO_FORMAT_AYUV:
+ frame =
+ schro_frame_new_from_data_AYUV (GST_BUFFER_DATA (buf), width, height);
+ break;
+#if 0
+ case GST_VIDEO_FORMAT_ARGB:
+ {
+ SchroFrame *rgbframe =
+ schro_frame_new_from_data_AYUV (GST_BUFFER_DATA (buf), width, height);
+ SchroFrame *vframe1;
+ SchroFrame *vframe2;
+ SchroFrame *vframe3;
+
+ vframe1 = schro_virt_frame_new_unpack (rgbframe);
+ vframe2 = schro_virt_frame_new_color_matrix (vframe1);
+ vframe3 =
+ schro_virt_frame_new_subsample (vframe2, SCHRO_FRAME_FORMAT_U8_420);
+
+ frame = schro_frame_new_and_alloc (NULL, SCHRO_FRAME_FORMAT_U8_420,
+ width, height);
+ schro_virt_frame_render (vframe3, frame);
+ schro_frame_unref (vframe3);
+ }
+ break;
+#endif
+ default:
+ g_assert_not_reached ();
+ }
+ schro_frame_set_free_callback (frame, gst_schro_frame_free, buf);
+
+ return frame;
+}
+
+#ifdef GST_BUFFER_FREE_FUNC
+static void
+schro_buf_free_func (gpointer priv)
+{
+ SchroBuffer *buffer = (SchroBuffer *) priv;
+
+ schro_buffer_unref (buffer);
+}
+#endif
+
+/* takes the reference */
+GstBuffer *
+gst_schro_wrap_schro_buffer (SchroBuffer * buffer)
+{
+ GstBuffer *gstbuf;
+
+#ifdef GST_BUFFER_FREE_FUNC
+ gstbuf = gst_buffer_new ();
+ GST_BUFFER_DATA (gstbuf) = buffer->data;
+ GST_BUFFER_SIZE (gstbuf) = buffer->length;
+ GST_BUFFER_MALLOCDATA (gstbuf) = (void *) buffer;
+ GST_BUFFER_FREE_FUNC (gstbuf) = schro_buf_free_func;
+#else
+ gstbuf = gst_buffer_new_and_alloc (buffer->length);
+ memcpy (GST_BUFFER_DATA (gstbuf), buffer->data, buffer->length);
+#endif
+
+ return gstbuf;
+}
+
+static void
+gst_schro_buffer_free (SchroBuffer * buffer, void *priv)
+{
+ gst_buffer_unref (GST_BUFFER (priv));
+}
+
+SchroBuffer *
+gst_schro_wrap_gst_buffer (GstBuffer * buffer)
+{
+ SchroBuffer *schrobuf;
+
+ schrobuf = schro_buffer_new_with_data (GST_BUFFER_DATA (buffer),
+ GST_BUFFER_SIZE (buffer));
+ schrobuf->free = gst_schro_buffer_free;
+ schrobuf->priv = buffer;
+
+ return schrobuf;
+}
diff --git a/ext/schroedinger/gstschroutils.h b/ext/schroedinger/gstschroutils.h
new file mode 100644
index 00000000..4e8ca2de
--- /dev/null
+++ b/ext/schroedinger/gstschroutils.h
@@ -0,0 +1,34 @@
+/* Schrodinger
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_SCHRO_UTILS_H_
+#define _GST_SCHRO_UTILS_H_
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <schroedinger/schro.h>
+
+SchroFrame *
+gst_schro_buffer_wrap (GstBuffer *buf, GstVideoFormat format, int width,
+ int height);
+GstBuffer * gst_schro_wrap_schro_buffer (SchroBuffer *buffer);
+SchroBuffer * gst_schro_wrap_gst_buffer (GstBuffer *buffer);
+
+#endif
+
diff --git a/ext/sdl/sdlvideosink.c b/ext/sdl/sdlvideosink.c
index 85f26534..22c7aa5e 100644
--- a/ext/sdl/sdlvideosink.c
+++ b/ext/sdl/sdlvideosink.c
@@ -25,7 +25,9 @@
#include <signal.h>
#include <string.h>
+#ifdef HAVE_SYS_TIME_H
#include <sys/time.h>
+#endif
#include <stdlib.h>
#include <gst/interfaces/xoverlay.h>
diff --git a/gst-libs/gst/Makefile.am b/gst-libs/gst/Makefile.am
index 3471a759..b123a4c6 100644
--- a/gst-libs/gst/Makefile.am
+++ b/gst-libs/gst/Makefile.am
@@ -1,5 +1,5 @@
-SUBDIRS = interfaces signalprocessor
+SUBDIRS = interfaces signalprocessor video
noinst_HEADERS = gst-i18n-plugin.h gettext.h
-DIST_SUBDIRS = dshow interfaces signalprocessor
+DIST_SUBDIRS = dshow interfaces signalprocessor video
diff --git a/gst-libs/gst/video/Makefile.am b/gst-libs/gst/video/Makefile.am
new file mode 100644
index 00000000..6836a058
--- /dev/null
+++ b/gst-libs/gst/video/Makefile.am
@@ -0,0 +1,24 @@
+
+lib_LTLIBRARIES = libgstbasevideo-@GST_MAJORMINOR@.la
+
+CLEANFILES = $(BUILT_SOURCES)
+
+libgstbasevideo_@GST_MAJORMINOR@_la_SOURCES = \
+ gstbasevideoutils.c \
+ gstbasevideocodec.c \
+ gstbasevideodecoder.c \
+ gstbasevideoencoder.c \
+ gstbasevideoparse.c
+
+libgstbasevideo_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/video
+libgstbasevideo_@GST_MAJORMINOR@include_HEADERS = \
+ gstbasevideoutils.h \
+ gstbasevideocodec.h \
+ gstbasevideodecoder.h \
+ gstbasevideoencoder.h \
+ gstbasevideoparse.h
+
+libgstbasevideo_@GST_MAJORMINOR@_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) -DGST_USE_UNSTABLE_API
+libgstbasevideo_@GST_MAJORMINOR@_la_LIBADD = $(GST_BASE_LIBS) $(GST_LIBS)
+libgstbasevideo_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_ALL_LDFLAGS) $(GST_LT_LDFLAGS)
+
diff --git a/gst-libs/gst/video/gstbasevideocodec.c b/gst-libs/gst/video/gstbasevideocodec.c
new file mode 100644
index 00000000..9cddc3bf
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideocodec.c
@@ -0,0 +1,565 @@
+/* Schrodinger
+ * Copyright (C) 2006 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstbasevideocodec.h"
+
+#include <string.h>
+#include <math.h>
+
+GST_DEBUG_CATEGORY (basevideo_debug);
+#define GST_CAT_DEFAULT basevideo_debug
+
+/* GstBaseVideoCodec signals and args */
+enum
+{
+ LAST_SIGNAL
+};
+
+enum
+{
+ ARG_0
+};
+
+static void gst_base_video_codec_finalize (GObject * object);
+
+static GstStateChangeReturn gst_base_video_codec_change_state (GstElement *
+ element, GstStateChange transition);
+
+
+GST_BOILERPLATE (GstBaseVideoCodec, gst_base_video_codec, GstElement,
+ GST_TYPE_ELEMENT);
+
+static void
+gst_base_video_codec_base_init (gpointer g_class)
+{
+ GST_DEBUG_CATEGORY_INIT (basevideo_debug, "basevideo", 0,
+ "Base Video Classes");
+
+}
+
+static void
+gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_base_video_codec_finalize;
+
+ element_class->change_state = gst_base_video_codec_change_state;
+}
+
+static void
+gst_base_video_codec_init (GstBaseVideoCodec * base_video_codec,
+ GstBaseVideoCodecClass * klass)
+{
+ GstPadTemplate *pad_template;
+
+ GST_DEBUG ("gst_base_video_codec_init");
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
+ g_return_if_fail (pad_template != NULL);
+
+ base_video_codec->sinkpad = gst_pad_new_from_template (pad_template, "sink");
+ gst_element_add_pad (GST_ELEMENT (base_video_codec),
+ base_video_codec->sinkpad);
+
+ pad_template =
+ gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
+ g_return_if_fail (pad_template != NULL);
+
+ base_video_codec->srcpad = gst_pad_new_from_template (pad_template, "src");
+ gst_pad_use_fixed_caps (base_video_codec->srcpad);
+ gst_element_add_pad (GST_ELEMENT (base_video_codec),
+ base_video_codec->srcpad);
+
+ base_video_codec->input_adapter = gst_adapter_new ();
+ base_video_codec->output_adapter = gst_adapter_new ();
+
+}
+
+static void
+gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec)
+{
+ GST_DEBUG ("reset");
+
+ base_video_codec->system_frame_number = 0;
+
+ gst_adapter_clear (base_video_codec->input_adapter);
+ gst_adapter_clear (base_video_codec->output_adapter);
+
+}
+
+static void
+gst_base_video_codec_finalize (GObject * object)
+{
+ GstBaseVideoCodec *base_video_codec;
+
+ g_return_if_fail (GST_IS_BASE_VIDEO_CODEC (object));
+ base_video_codec = GST_BASE_VIDEO_CODEC (object);
+
+ if (base_video_codec->input_adapter) {
+ g_object_unref (base_video_codec->input_adapter);
+ }
+ if (base_video_codec->output_adapter) {
+ g_object_unref (base_video_codec->output_adapter);
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+#ifdef unused
+static const GstQueryType *
+gst_base_video_codec_get_query_types (GstPad * pad)
+{
+ static const GstQueryType query_types[] = {
+ GST_QUERY_POSITION,
+ GST_QUERY_DURATION,
+ GST_QUERY_CONVERT,
+ 0
+ };
+
+ return query_types;
+}
+#endif
+
+#if 0
+static gboolean
+gst_base_video_codec_src_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res;
+ GstBaseVideoCodec *dec;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ dec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));
+
+ if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) {
+ if (dec->fps_d != 0) {
+ *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
+ dec->fps_d * GST_SECOND, dec->fps_n);
+ res = TRUE;
+ } else {
+ res = FALSE;
+ }
+ } else {
+ GST_WARNING ("unhandled conversion from %d to %d", src_format,
+ *dest_format);
+ res = FALSE;
+ }
+
+ gst_object_unref (dec);
+
+ return res;
+}
+
+static gboolean
+gst_base_video_codec_sink_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ GstBaseVideoCodec *dec;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ dec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));
+
+ /* FIXME: check if we are in a decoding state */
+
+ switch (src_format) {
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale (src_value,
+ dec->fps_d * GST_SECOND, dec->fps_n);
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ {
+ *dest_value = gst_util_uint64_scale (src_value,
+ dec->fps_n, dec->fps_d * GST_SECOND);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+ gst_object_unref (dec);
+
+ return res;
+}
+#endif
+
+#ifdef unused
+static gboolean
+gst_base_video_codec_src_query (GstPad * pad, GstQuery * query)
+{
+ GstBaseVideoCodec *base_codec;
+ gboolean res = FALSE;
+
+ base_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+ gint64 time;
+ gint64 value;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ time = gst_util_uint64_scale (base_codec->system_frame_number,
+ base_codec->state.fps_n, base_codec->state.fps_d);
+ time += base_codec->state.segment.time;
+ GST_DEBUG ("query position %lld", time);
+ res = gst_base_video_encoded_video_convert (&base_codec->state,
+ GST_FORMAT_TIME, time, &format, &value);
+ if (!res)
+ goto error;
+
+ gst_query_set_position (query, format, value);
+ break;
+ }
+ case GST_QUERY_DURATION:
+ res = gst_pad_query (GST_PAD_PEER (base_codec->sinkpad), query);
+ if (!res)
+ goto error;
+ break;
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ GST_DEBUG ("query convert");
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res = gst_base_video_encoded_video_convert (&base_codec->state,
+ src_fmt, src_val, &dest_fmt, &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+done:
+ gst_object_unref (base_codec);
+
+ return res;
+error:
+ GST_DEBUG_OBJECT (base_codec, "query failed");
+ goto done;
+}
+#endif
+
+#ifdef unused
+static gboolean
+gst_base_video_codec_sink_query (GstPad * pad, GstQuery * query)
+{
+ GstBaseVideoCodec *base_video_codec;
+ gboolean res = FALSE;
+
+ base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res = gst_base_video_encoded_video_convert (&base_video_codec->state,
+ src_fmt, src_val, &dest_fmt, &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+done:
+ gst_object_unref (base_video_codec);
+
+ return res;
+error:
+ GST_DEBUG_OBJECT (base_video_codec, "query failed");
+ goto done;
+}
+#endif
+
+#ifdef unused
+static gboolean
+gst_base_video_codec_src_event (GstPad * pad, GstEvent * event)
+{
+ GstBaseVideoCodec *base_video_codec;
+ gboolean res = FALSE;
+
+ base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFormat format, tformat;
+ gdouble rate;
+ GstEvent *real_seek;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gint64 tcur, tstop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
+ &cur, &stop_type, &stop);
+ gst_event_unref (event);
+
+ tformat = GST_FORMAT_TIME;
+ res = gst_base_video_encoded_video_convert (&base_video_codec->state,
+ format, cur, &tformat, &tcur);
+ if (!res)
+ goto convert_error;
+ res = gst_base_video_encoded_video_convert (&base_video_codec->state,
+ format, stop, &tformat, &tstop);
+ if (!res)
+ goto convert_error;
+
+ real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
+ flags, cur_type, tcur, stop_type, tstop);
+
+ res = gst_pad_push_event (base_video_codec->sinkpad, real_seek);
+
+ break;
+ }
+#if 0
+ case GST_EVENT_QOS:
+ {
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, &proportion, &diff, &timestamp);
+
+ GST_OBJECT_LOCK (base_video_codec);
+ base_video_codec->proportion = proportion;
+ base_video_codec->earliest_time = timestamp + diff;
+ GST_OBJECT_UNLOCK (base_video_codec);
+
+ GST_DEBUG_OBJECT (base_video_codec,
+ "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (timestamp), diff);
+
+ res = gst_pad_push_event (base_video_codec->sinkpad, event);
+ break;
+ }
+#endif
+ default:
+ res = gst_pad_push_event (base_video_codec->sinkpad, event);
+ break;
+ }
+done:
+ gst_object_unref (base_video_codec);
+ return res;
+
+convert_error:
+ GST_DEBUG_OBJECT (base_video_codec, "could not convert format");
+ goto done;
+}
+#endif
+
+#ifdef unused
+static gboolean
+gst_base_video_codec_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstBaseVideoCodec *base_video_codec;
+ gboolean ret = FALSE;
+
+ base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ ret = gst_pad_push_event (base_video_codec->srcpad, event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_base_video_codec_reset (base_video_codec);
+ ret = gst_pad_push_event (base_video_codec->srcpad, event);
+ break;
+ case GST_EVENT_EOS:
+ if (gst_base_video_codec_push_all (base_video_codec,
+ FALSE) == GST_FLOW_ERROR) {
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ ret = gst_pad_push_event (base_video_codec->srcpad, event);
+ break;
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ GstFormat format;
+ gdouble rate;
+ gint64 start, stop, time;
+
+ gst_event_parse_new_segment (event, &update, &rate, &format, &start,
+ &stop, &time);
+
+ if (format != GST_FORMAT_TIME)
+ goto newseg_wrong_format;
+
+ if (rate <= 0.0)
+ goto newseg_wrong_rate;
+
+ GST_DEBUG ("newsegment %lld %lld", start, time);
+ gst_segment_set_newsegment (&base_video_codec->state.segment, update,
+ rate, format, start, stop, time);
+
+ ret = gst_pad_push_event (base_video_codec->srcpad, event);
+ break;
+ }
+ default:
+ ret = gst_pad_push_event (base_video_codec->srcpad, event);
+ break;
+ }
+done:
+ gst_object_unref (base_video_codec);
+ return ret;
+
+newseg_wrong_format:
+ GST_DEBUG_OBJECT (base_video_codec, "received non TIME newsegment");
+ gst_event_unref (event);
+ goto done;
+
+newseg_wrong_rate:
+ GST_DEBUG_OBJECT (base_video_codec, "negative rates not supported");
+ gst_event_unref (event);
+ goto done;
+}
+#endif
+
+
+static GstStateChangeReturn
+gst_base_video_codec_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstBaseVideoCodec *base_video_codec = GST_BASE_VIDEO_CODEC (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_base_video_codec_reset (base_video_codec);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = parent_class->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_base_video_codec_reset (base_video_codec);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+#if 0
+guint64
+gst_base_video_codec_get_timestamp (GstBaseVideoCodec * base_video_codec,
+ int picture_number)
+{
+ if (picture_number < 0) {
+ return base_video_codec->timestamp_offset -
+ (gint64) gst_util_uint64_scale (-picture_number,
+ base_video_codec->state.fps_d * GST_SECOND,
+ base_video_codec->state.fps_n);
+ } else {
+ return base_video_codec->timestamp_offset +
+ gst_util_uint64_scale (picture_number,
+ base_video_codec->state.fps_d * GST_SECOND,
+ base_video_codec->state.fps_n);
+ }
+}
+#endif
+
+GstVideoFrame *
+gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec)
+{
+ GstVideoFrame *frame;
+
+ frame = g_malloc0 (sizeof (GstVideoFrame));
+
+ frame->system_frame_number = base_video_codec->system_frame_number;
+ base_video_codec->system_frame_number++;
+
+ return frame;
+}
+
+void
+gst_base_video_codec_free_frame (GstVideoFrame * frame)
+{
+ if (frame->sink_buffer) {
+ gst_buffer_unref (frame->sink_buffer);
+ }
+#if 0
+ if (frame->src_buffer) {
+ gst_buffer_unref (frame->src_buffer);
+ }
+#endif
+
+ g_free (frame);
+}
diff --git a/gst-libs/gst/video/gstbasevideocodec.h b/gst-libs/gst/video/gstbasevideocodec.h
new file mode 100644
index 00000000..b6acf62e
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideocodec.h
@@ -0,0 +1,150 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_BASE_VIDEO_CODEC_H_
+#define _GST_BASE_VIDEO_CODEC_H_
+
+#ifndef GST_USE_UNSTABLE_API
+#warning "GstBaseVideoCodec is unstable API and may change in future."
+#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/gstbasevideoutils.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_BASE_VIDEO_CODEC \
+ (gst_base_video_codec_get_type())
+#define GST_BASE_VIDEO_CODEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodec))
+#define GST_BASE_VIDEO_CODEC_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodecClass))
+#define GST_BASE_VIDEO_CODEC_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodecClass))
+#define GST_IS_BASE_VIDEO_CODEC(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_CODEC))
+#define GST_IS_BASE_VIDEO_CODEC_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_CODEC))
+
+/**
+ * GST_BASE_VIDEO_CODEC_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+#define GST_BASE_VIDEO_CODEC_SINK_NAME "sink"
+/**
+ * GST_BASE_VIDEO_CODEC_SRC_NAME:
+ *
+ * The name of the templates for the source pad.
+ */
+#define GST_BASE_VIDEO_CODEC_SRC_NAME "src"
+
+/**
+ * GST_BASE_VIDEO_CODEC_SRC_PAD:
+ * @obj: base video codec instance
+ *
+ * Gives the pointer to the source #GstPad object of the element.
+ */
+#define GST_BASE_VIDEO_CODEC_SRC_PAD(obj) (((GstBaseVideoCodec *) (obj))->srcpad)
+
+/**
+ * GST_BASE_VIDEO_CODEC_SINK_PAD:
+ * @obj: base video codec instance
+ *
+ * Gives the pointer to the sink #GstPad object of the element.
+ */
+#define GST_BASE_VIDEO_CODEC_SINK_PAD(obj) (((GstBaseVideoCodec *) (obj))->sinkpad)
+
+/**
+ * GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA:
+ *
+ */
+#define GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
+
+typedef struct _GstBaseVideoCodec GstBaseVideoCodec;
+typedef struct _GstBaseVideoCodecClass GstBaseVideoCodecClass;
+
+struct _GstBaseVideoCodec
+{
+ GstElement element;
+
+ /*< private >*/
+ GstPad *sinkpad;
+ GstPad *srcpad;
+ GstAdapter *input_adapter;
+ GstAdapter *output_adapter;
+
+#if 0
+ /* FIXME need to move from subclasses */
+ GstVideoState state;
+#endif
+
+ //int reorder_depth;
+
+ //gboolean have_sync;
+ //gboolean discont;
+ //gboolean started;
+
+ //GstVideoFrame *current_frame;
+ //int distance_from_sync;
+
+ //gboolean sink_clipping;
+
+ //guint64 presentation_frame_number;
+ guint64 system_frame_number;
+
+ //GstCaps *caps;
+ //gboolean set_output_caps;
+
+ //GstClockTime buffer_timestamp;
+
+ GstClockTime timestamp_offset;
+};
+
+struct _GstBaseVideoCodecClass
+{
+ GstElementClass element_class;
+
+ gboolean (*start) (GstBaseVideoCodec *codec);
+ gboolean (*stop) (GstBaseVideoCodec *codec);
+ gboolean (*reset) (GstBaseVideoCodec *codec);
+ GstFlowReturn (*parse_data) (GstBaseVideoCodec *codec, gboolean at_eos);
+ int (*scan_for_sync) (GstAdapter *adapter, gboolean at_eos,
+ int offset, int n);
+ GstFlowReturn (*shape_output) (GstBaseVideoCodec *codec, GstVideoFrame *frame);
+ GstCaps *(*get_caps) (GstBaseVideoCodec *codec);
+
+};
+
+GType gst_base_video_codec_get_type (void);
+
+#if 0
+guint64 gst_base_video_codec_get_timestamp (GstBaseVideoCodec *codec,
+ int picture_number);
+#endif
+
+GstVideoFrame * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec);
+void gst_base_video_codec_free_frame (GstVideoFrame *frame);
+
+
+G_END_DECLS
+
+#endif
+
diff --git a/gst-libs/gst/video/gstbasevideodecoder.c b/gst-libs/gst/video/gstbasevideodecoder.c
new file mode 100644
index 00000000..9aa79eee
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideodecoder.c
@@ -0,0 +1,1173 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstbasevideodecoder.h"
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_EXTERN (basevideo_debug);
+#define GST_CAT_DEFAULT basevideo_debug
+
+static void gst_base_video_decoder_finalize (GObject * object);
+
+static gboolean gst_base_video_decoder_sink_setcaps (GstPad * pad,
+ GstCaps * caps);
+static gboolean gst_base_video_decoder_sink_event (GstPad * pad,
+ GstEvent * event);
+static gboolean gst_base_video_decoder_src_event (GstPad * pad,
+ GstEvent * event);
+static GstFlowReturn gst_base_video_decoder_chain (GstPad * pad,
+ GstBuffer * buf);
+static gboolean gst_base_video_decoder_sink_query (GstPad * pad,
+ GstQuery * query);
+static GstStateChangeReturn gst_base_video_decoder_change_state (GstElement *
+ element, GstStateChange transition);
+static const GstQueryType *gst_base_video_decoder_get_query_types (GstPad *
+ pad);
+static gboolean gst_base_video_decoder_src_query (GstPad * pad,
+ GstQuery * query);
+static gboolean gst_base_video_decoder_src_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value, GstFormat * dest_format,
+ gint64 * dest_value);
+static void gst_base_video_decoder_reset (GstBaseVideoDecoder *
+ base_video_decoder);
+
+static guint64
+gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder,
+ int picture_number);
+static guint64
+gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder *
+ base_video_decoder, int field_offset);
+static GstVideoFrame *gst_base_video_decoder_new_frame (GstBaseVideoDecoder *
+ base_video_decoder);
+static void gst_base_video_decoder_free_frame (GstVideoFrame * frame);
+
+GST_BOILERPLATE (GstBaseVideoDecoder, gst_base_video_decoder,
+ GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC);
+
+static void
+gst_base_video_decoder_base_init (gpointer g_class)
+{
+
+}
+
+static void
+gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_base_video_decoder_finalize;
+
+ gstelement_class->change_state = gst_base_video_decoder_change_state;
+
+ parent_class = g_type_class_peek_parent (klass);
+}
+
+static void
+gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder,
+ GstBaseVideoDecoderClass * klass)
+{
+ GstPad *pad;
+
+ GST_DEBUG ("gst_base_video_decoder_init");
+
+ pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_decoder);
+
+ gst_pad_set_chain_function (pad, gst_base_video_decoder_chain);
+ gst_pad_set_event_function (pad, gst_base_video_decoder_sink_event);
+ gst_pad_set_setcaps_function (pad, gst_base_video_decoder_sink_setcaps);
+ gst_pad_set_query_function (pad, gst_base_video_decoder_sink_query);
+
+ pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder);
+
+ gst_pad_set_event_function (pad, gst_base_video_decoder_src_event);
+ gst_pad_set_query_type_function (pad, gst_base_video_decoder_get_query_types);
+ gst_pad_set_query_function (pad, gst_base_video_decoder_src_query);
+
+ base_video_decoder->input_adapter = gst_adapter_new ();
+ base_video_decoder->output_adapter = gst_adapter_new ();
+
+ gst_segment_init (&base_video_decoder->state.segment, GST_FORMAT_TIME);
+ gst_base_video_decoder_reset (base_video_decoder);
+
+ base_video_decoder->current_frame =
+ gst_base_video_decoder_new_frame (base_video_decoder);
+
+ base_video_decoder->sink_clipping = TRUE;
+}
+
+static gboolean
+gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstBaseVideoDecoder *base_video_decoder;
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+ GstStructure *structure;
+ const GValue *codec_data;
+
+ base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+ base_video_decoder_class =
+ GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
+
+ GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps);
+
+ if (base_video_decoder->codec_data) {
+ gst_buffer_unref (base_video_decoder->codec_data);
+ base_video_decoder->codec_data = NULL;
+ }
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
+ base_video_decoder->codec_data = gst_value_get_buffer (codec_data);
+ }
+
+ if (base_video_decoder_class->start) {
+ base_video_decoder_class->start (base_video_decoder);
+ }
+
+ g_object_unref (base_video_decoder);
+
+ return TRUE;
+}
+
+static void
+gst_base_video_decoder_finalize (GObject * object)
+{
+ GstBaseVideoDecoder *base_video_decoder;
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+
+ g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (object));
+ base_video_decoder = GST_BASE_VIDEO_DECODER (object);
+ base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (object);
+
+ gst_base_video_decoder_reset (base_video_decoder);
+
+ GST_DEBUG_OBJECT (object, "finalize");
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstBaseVideoDecoder *base_video_decoder;
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+ gboolean ret = FALSE;
+
+ base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+ base_video_decoder_class =
+ GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ {
+ GstVideoFrame *frame;
+
+ frame = g_malloc0 (sizeof (GstVideoFrame));
+ frame->presentation_frame_number =
+ base_video_decoder->presentation_frame_number;
+ frame->presentation_duration = 0;
+ base_video_decoder->presentation_frame_number++;
+
+ base_video_decoder->frames =
+ g_list_append (base_video_decoder->frames, frame);
+ if (base_video_decoder_class->finish) {
+ base_video_decoder_class->finish (base_video_decoder, frame);
+ }
+
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
+ event);
+ }
+ break;
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ double rate;
+ double applied_rate;
+ GstFormat format;
+ gint64 start;
+ gint64 stop;
+ gint64 position;
+
+ gst_event_parse_new_segment_full (event, &update, &rate,
+ &applied_rate, &format, &start, &stop, &position);
+
+ if (format != GST_FORMAT_TIME)
+ goto newseg_wrong_format;
+
+ GST_DEBUG ("new segment %lld %lld", start, position);
+
+ gst_segment_set_newsegment_full (&base_video_decoder->state.segment,
+ update, rate, applied_rate, format, start, stop, position);
+
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
+ event);
+ }
+ break;
+ default:
+ /* FIXME this changes the order of events */
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
+ event);
+ break;
+ }
+
+done:
+ gst_object_unref (base_video_decoder);
+ return ret;
+
+newseg_wrong_format:
+ {
+ GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment");
+ gst_event_unref (event);
+ goto done;
+ }
+}
+
+static gboolean
+gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event)
+{
+ GstBaseVideoDecoder *base_video_decoder;
+ gboolean res = FALSE;
+
+ base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFormat format, tformat;
+ gdouble rate;
+ GstEvent *real_seek;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gint64 tcur, tstop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
+ &cur, &stop_type, &stop);
+ gst_event_unref (event);
+
+ tformat = GST_FORMAT_TIME;
+ res =
+ gst_base_video_decoder_src_convert (pad, format, cur, &tformat,
+ &tcur);
+ if (!res)
+ goto convert_error;
+ res =
+ gst_base_video_decoder_src_convert (pad, format, stop, &tformat,
+ &tstop);
+ if (!res)
+ goto convert_error;
+
+ real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
+ flags, cur_type, tcur, stop_type, tstop);
+
+ res =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD
+ (base_video_decoder), real_seek);
+
+ break;
+ }
+ case GST_EVENT_QOS:
+ {
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, &proportion, &diff, &timestamp);
+
+ GST_OBJECT_LOCK (base_video_decoder);
+ base_video_decoder->proportion = proportion;
+ base_video_decoder->earliest_time = timestamp + diff;
+ GST_OBJECT_UNLOCK (base_video_decoder);
+
+ GST_DEBUG_OBJECT (base_video_decoder,
+ "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
+ GST_TIME_ARGS (timestamp), diff, proportion);
+
+ res =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD
+ (base_video_decoder), event);
+ break;
+ }
+ default:
+ res =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD
+ (base_video_decoder), event);
+ break;
+ }
+done:
+ gst_object_unref (base_video_decoder);
+ return res;
+
+convert_error:
+ GST_DEBUG_OBJECT (base_video_decoder, "could not convert format");
+ goto done;
+}
+
+
+#if 0
+static gboolean
+gst_base_video_decoder_sink_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ GstBaseVideoDecoder *enc;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+
+ /* FIXME: check if we are in a decoding state */
+
+ switch (src_format) {
+ case GST_FORMAT_BYTES:
+ switch (*dest_format) {
+#if 0
+ case GST_FORMAT_DEFAULT:
+ *dest_value = gst_util_uint64_scale_int (src_value, 1,
+ enc->bytes_per_picture);
+ break;
+#endif
+ case GST_FORMAT_TIME:
+ /* seems like a rather silly conversion, implement me if you like */
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale (src_value,
+ GST_SECOND * enc->fps_d, enc->fps_n);
+ break;
+#if 0
+ case GST_FORMAT_BYTES:
+ *dest_value = gst_util_uint64_scale_int (src_value,
+ enc->bytes_per_picture, 1);
+ break;
+#endif
+ default:
+ res = FALSE;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+}
+#endif
+
+static gboolean
+gst_base_video_decoder_src_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ GstBaseVideoDecoder *enc;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+
+ /* FIXME: check if we are in a encoding state */
+
+ GST_DEBUG ("src convert");
+ switch (src_format) {
+#if 0
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
+ enc->fps_d * GST_SECOND, enc->fps_n);
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ {
+ *dest_value = gst_util_uint64_scale (src_value,
+ enc->fps_n, enc->fps_d * GST_SECOND);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+#endif
+ default:
+ res = FALSE;
+ break;
+ }
+
+ gst_object_unref (enc);
+
+ return res;
+}
+
+static const GstQueryType *
+gst_base_video_decoder_get_query_types (GstPad * pad)
+{
+ static const GstQueryType query_types[] = {
+ GST_QUERY_CONVERT,
+ 0
+ };
+
+ return query_types;
+}
+
+static gboolean
+gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query)
+{
+ GstBaseVideoDecoder *enc;
+ gboolean res;
+
+ enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+
+ switch GST_QUERY_TYPE
+ (query) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res =
+ gst_base_video_decoder_src_convert (pad, src_fmt, src_val, &dest_fmt,
+ &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ }
+ gst_object_unref (enc);
+ return res;
+
+error:
+ GST_DEBUG_OBJECT (enc, "query failed");
+ gst_object_unref (enc);
+ return res;
+}
+
+static gboolean
+gst_base_video_decoder_sink_query (GstPad * pad, GstQuery * query)
+{
+ GstBaseVideoDecoder *base_video_decoder;
+ gboolean res = FALSE;
+
+ base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+
+ GST_DEBUG_OBJECT (base_video_decoder, "sink query fps=%d/%d",
+ base_video_decoder->state.fps_n, base_video_decoder->state.fps_d);
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res = gst_base_video_rawvideo_convert (&base_video_decoder->state,
+ src_fmt, src_val, &dest_fmt, &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+done:
+ gst_object_unref (base_video_decoder);
+
+ return res;
+error:
+ GST_DEBUG_OBJECT (base_video_decoder, "query failed");
+ goto done;
+}
+
+
+#if 0
+static gboolean
+gst_pad_is_negotiated (GstPad * pad)
+{
+ GstCaps *caps;
+
+ g_return_val_if_fail (pad != NULL, FALSE);
+
+ caps = gst_pad_get_negotiated_caps (pad);
+ if (caps) {
+ gst_caps_unref (caps);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+#endif
+
+static void
+gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder)
+{
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+ GList *g;
+
+ base_video_decoder_class =
+ GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
+
+ GST_DEBUG ("reset");
+
+ base_video_decoder->started = FALSE;
+
+ base_video_decoder->discont = TRUE;
+ base_video_decoder->have_sync = FALSE;
+
+ base_video_decoder->timestamp_offset = GST_CLOCK_TIME_NONE;
+ base_video_decoder->system_frame_number = 0;
+ base_video_decoder->presentation_frame_number = 0;
+ base_video_decoder->last_sink_timestamp = GST_CLOCK_TIME_NONE;
+ base_video_decoder->last_sink_offset_end = GST_CLOCK_TIME_NONE;
+ base_video_decoder->base_picture_number = 0;
+ base_video_decoder->last_timestamp = GST_CLOCK_TIME_NONE;
+
+ base_video_decoder->offset = 0;
+
+ if (base_video_decoder->caps) {
+ gst_caps_unref (base_video_decoder->caps);
+ base_video_decoder->caps = NULL;
+ }
+
+ if (base_video_decoder->current_frame) {
+ gst_base_video_decoder_free_frame (base_video_decoder->current_frame);
+ base_video_decoder->current_frame = NULL;
+ }
+
+ base_video_decoder->have_src_caps = FALSE;
+
+ for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) {
+ GstVideoFrame *frame = g->data;
+ gst_base_video_decoder_free_frame (frame);
+ }
+ g_list_free (base_video_decoder->frames);
+ base_video_decoder->frames = NULL;
+
+ if (base_video_decoder_class->reset) {
+ base_video_decoder_class->reset (base_video_decoder);
+ }
+}
+
+static GstBuffer *
+gst_adapter_get_buffer (GstAdapter * adapter)
+{
+ return gst_buffer_ref (GST_BUFFER (adapter->buflist->data));
+
+}
+
+static GstFlowReturn
+gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstBaseVideoDecoder *base_video_decoder;
+ GstBaseVideoDecoderClass *klass;
+ GstBuffer *buffer;
+ GstFlowReturn ret;
+
+ GST_DEBUG ("chain %lld", GST_BUFFER_TIMESTAMP (buf));
+
+#if 0
+ /* requiring the pad to be negotiated makes it impossible to use
+ * oggdemux or filesrc ! decoder */
+ if (!gst_pad_is_negotiated (pad)) {
+ GST_DEBUG ("not negotiated");
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+#endif
+
+ base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
+ klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
+
+ GST_DEBUG_OBJECT (base_video_decoder, "chain");
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
+ GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer");
+ if (base_video_decoder->started) {
+ gst_base_video_decoder_reset (base_video_decoder);
+ }
+ }
+
+ if (!base_video_decoder->started) {
+ klass->start (base_video_decoder);
+ base_video_decoder->started = TRUE;
+ }
+
+ if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
+ GST_DEBUG ("timestamp %lld offset %lld", GST_BUFFER_TIMESTAMP (buf),
+ base_video_decoder->offset);
+ base_video_decoder->last_sink_timestamp = GST_BUFFER_TIMESTAMP (buf);
+ }
+ if (GST_BUFFER_OFFSET_END (buf) != -1) {
+ GST_DEBUG ("gp %lld", GST_BUFFER_OFFSET_END (buf));
+ base_video_decoder->last_sink_offset_end = GST_BUFFER_OFFSET_END (buf);
+ }
+ base_video_decoder->offset += GST_BUFFER_SIZE (buf);
+
+#if 0
+ if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE &&
+ GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
+ GST_DEBUG ("got new offset %lld", GST_BUFFER_TIMESTAMP (buf));
+ base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf);
+ }
+#endif
+
+ if (base_video_decoder->current_frame == NULL) {
+ base_video_decoder->current_frame =
+ gst_base_video_decoder_new_frame (base_video_decoder);
+ }
+
+ gst_adapter_push (base_video_decoder->input_adapter, buf);
+
+ if (!base_video_decoder->have_sync) {
+ int n, m;
+
+ GST_DEBUG ("no sync, scanning");
+
+ n = gst_adapter_available (base_video_decoder->input_adapter);
+ m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n);
+
+ if (m >= n) {
+ g_warning ("subclass scanned past end %d >= %d", m, n);
+ }
+
+ gst_adapter_flush (base_video_decoder->input_adapter, m);
+
+ if (m < n) {
+ GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);
+
+ /* this is only "maybe" sync */
+ base_video_decoder->have_sync = TRUE;
+ }
+
+ if (!base_video_decoder->have_sync) {
+ gst_object_unref (base_video_decoder);
+ return GST_FLOW_OK;
+ }
+ }
+
+ /* FIXME: use gst_adapter_prev_timestamp() here instead? */
+ buffer = gst_adapter_get_buffer (base_video_decoder->input_adapter);
+
+ base_video_decoder->buffer_timestamp = GST_BUFFER_TIMESTAMP (buffer);
+ gst_buffer_unref (buffer);
+
+ do {
+ ret = klass->parse_data (base_video_decoder, FALSE);
+ } while (ret == GST_FLOW_OK);
+
+ if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) {
+ gst_object_unref (base_video_decoder);
+ return GST_FLOW_OK;
+ }
+
+ gst_object_unref (base_video_decoder);
+ return ret;
+}
+
+static GstStateChangeReturn
+gst_base_video_decoder_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstBaseVideoDecoder *base_video_decoder;
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+ GstStateChangeReturn ret;
+
+ base_video_decoder = GST_BASE_VIDEO_DECODER (element);
+ base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (element);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (base_video_decoder_class->stop) {
+ base_video_decoder_class->stop (base_video_decoder);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static void
+gst_base_video_decoder_free_frame (GstVideoFrame * frame)
+{
+ g_return_if_fail (frame != NULL);
+
+ if (frame->sink_buffer) {
+ gst_buffer_unref (frame->sink_buffer);
+ }
+#if 0
+ if (frame->src_buffer) {
+ gst_buffer_unref (frame->src_buffer);
+ }
+#endif
+
+ g_free (frame);
+}
+
+static GstVideoFrame *
+gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
+{
+ GstVideoFrame *frame;
+
+ frame = g_malloc0 (sizeof (GstVideoFrame));
+
+ frame->system_frame_number = base_video_decoder->system_frame_number;
+ base_video_decoder->system_frame_number++;
+
+ frame->decode_frame_number = frame->system_frame_number -
+ base_video_decoder->reorder_depth;
+
+ frame->decode_timestamp = -1;
+ frame->presentation_timestamp = -1;
+ frame->presentation_duration = -1;
+ frame->n_fields = 2;
+
+ return frame;
+}
+
+GstFlowReturn
+gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
+ GstVideoFrame * frame)
+{
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+ GstBuffer *src_buffer;
+
+ GST_DEBUG ("finish frame");
+
+ base_video_decoder_class =
+ GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
+
+ GST_DEBUG ("finish frame sync=%d pts=%lld", frame->is_sync_point,
+ frame->presentation_timestamp);
+
+ if (frame->is_sync_point) {
+ if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) {
+ if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) {
+ GST_DEBUG ("sync timestamp %lld diff %lld",
+ frame->presentation_timestamp,
+ frame->presentation_timestamp -
+ base_video_decoder->state.segment.start);
+ base_video_decoder->timestamp_offset = frame->presentation_timestamp;
+ base_video_decoder->field_index = 0;
+ } else {
+ /* This case is for one initial timestamp and no others, e.g.,
+ * filesrc ! decoder ! xvimagesink */
+ GST_WARNING ("sync timestamp didn't change, ignoring");
+ frame->presentation_timestamp = GST_CLOCK_TIME_NONE;
+ }
+ } else {
+ GST_WARNING ("sync point doesn't have timestamp");
+ if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) {
+ GST_ERROR ("No base timestamp. Assuming frames start at 0");
+ base_video_decoder->timestamp_offset = 0;
+ base_video_decoder->field_index = 0;
+ }
+ }
+ }
+ frame->field_index = base_video_decoder->field_index;
+ base_video_decoder->field_index += frame->n_fields;
+
+ if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
+ frame->presentation_timestamp =
+ gst_base_video_decoder_get_field_timestamp (base_video_decoder,
+ frame->field_index);
+ frame->presentation_duration = GST_CLOCK_TIME_NONE;
+ frame->decode_timestamp =
+ gst_base_video_decoder_get_timestamp (base_video_decoder,
+ frame->decode_frame_number);
+ }
+ if (frame->presentation_duration == GST_CLOCK_TIME_NONE) {
+ frame->presentation_duration =
+ gst_base_video_decoder_get_field_timestamp (base_video_decoder,
+ frame->field_index + frame->n_fields) - frame->presentation_timestamp;
+ }
+
+ if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) {
+ if (frame->presentation_timestamp < base_video_decoder->last_timestamp) {
+ GST_WARNING ("decreasing timestamp (%lld < %lld)",
+ frame->presentation_timestamp, base_video_decoder->last_timestamp);
+ }
+ }
+ base_video_decoder->last_timestamp = frame->presentation_timestamp;
+
+ GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ if (base_video_decoder->state.interlaced) {
+#ifndef GST_VIDEO_BUFFER_TFF
+#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5)
+#endif
+#ifndef GST_VIDEO_BUFFER_RFF
+#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6)
+#endif
+#ifndef GST_VIDEO_BUFFER_ONEFIELD
+#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7)
+#endif
+ int tff = base_video_decoder->state.top_field_first;
+
+ if (frame->field_index & 1) {
+ tff ^= 1;
+ }
+ if (tff) {
+ GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
+ } else {
+ GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF);
+ }
+ GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
+ GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
+ if (frame->n_fields == 3) {
+ GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF);
+ } else if (frame->n_fields == 1) {
+ GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
+ }
+ }
+
+ GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
+ GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
+ GST_BUFFER_OFFSET (frame->src_buffer) = -1;
+ GST_BUFFER_OFFSET_END (frame->src_buffer) = -1;
+
+ GST_DEBUG ("pushing frame %lld", frame->presentation_timestamp);
+
+ base_video_decoder->frames =
+ g_list_remove (base_video_decoder->frames, frame);
+
+ gst_base_video_decoder_set_src_caps (base_video_decoder);
+
+ src_buffer = frame->src_buffer;
+ frame->src_buffer = NULL;
+
+ gst_base_video_decoder_free_frame (frame);
+
+ if (base_video_decoder->sink_clipping) {
+ gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
+ gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
+ GST_BUFFER_DURATION (src_buffer);
+
+ if (gst_segment_clip (&base_video_decoder->state.segment, GST_FORMAT_TIME,
+ start, stop, &start, &stop)) {
+ GST_BUFFER_TIMESTAMP (src_buffer) = start;
+ GST_BUFFER_DURATION (src_buffer) = stop - start;
+ } else {
+ GST_DEBUG ("dropping buffer outside segment");
+ gst_buffer_unref (src_buffer);
+ return GST_FLOW_OK;
+ }
+ }
+
+ return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
+ src_buffer);
+}
+
+int
+gst_base_video_decoder_get_height (GstBaseVideoDecoder * base_video_decoder)
+{
+ return base_video_decoder->state.height;
+}
+
+int
+gst_base_video_decoder_get_width (GstBaseVideoDecoder * base_video_decoder)
+{
+ return base_video_decoder->state.width;
+}
+
+GstFlowReturn
+gst_base_video_decoder_end_of_stream (GstBaseVideoDecoder * base_video_decoder,
+ GstBuffer * buffer)
+{
+
+ if (base_video_decoder->frames) {
+ GST_DEBUG ("EOS with frames left over");
+ }
+
+ return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
+ buffer);
+}
+
+void
+gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder * base_video_decoder,
+ int n_bytes)
+{
+ GstBuffer *buf;
+
+ GST_DEBUG ("add to frame");
+
+#if 0
+ if (gst_adapter_available (base_video_decoder->output_adapter) == 0) {
+ GstBuffer *buffer;
+
+ buffer =
+ gst_adapter_get_orig_buffer_at_offset
+ (base_video_decoder->input_adapter, 0);
+ if (buffer) {
+ base_video_decoder->current_frame->presentation_timestamp =
+ GST_BUFFER_TIMESTAMP (buffer);
+ gst_buffer_unref (buffer);
+ }
+ }
+#endif
+
+ if (n_bytes == 0)
+ return;
+
+ buf = gst_adapter_take_buffer (base_video_decoder->input_adapter, n_bytes);
+
+ gst_adapter_push (base_video_decoder->output_adapter, buf);
+}
+
+static guint64
+gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder,
+ int picture_number)
+{
+ if (base_video_decoder->state.fps_d == 0) {
+ return -1;
+ }
+ if (picture_number < base_video_decoder->base_picture_number) {
+ return base_video_decoder->timestamp_offset -
+ (gint64) gst_util_uint64_scale (base_video_decoder->base_picture_number
+ - picture_number, base_video_decoder->state.fps_d * GST_SECOND,
+ base_video_decoder->state.fps_n);
+ } else {
+ return base_video_decoder->timestamp_offset +
+ gst_util_uint64_scale (picture_number -
+ base_video_decoder->base_picture_number,
+ base_video_decoder->state.fps_d * GST_SECOND,
+ base_video_decoder->state.fps_n);
+ }
+}
+
+static guint64
+gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder *
+ base_video_decoder, int field_offset)
+{
+ if (base_video_decoder->state.fps_d == 0) {
+ return GST_CLOCK_TIME_NONE;
+ }
+ if (field_offset < 0) {
+ GST_WARNING ("field offset < 0");
+ return GST_CLOCK_TIME_NONE;
+ }
+ return base_video_decoder->timestamp_offset +
+ gst_util_uint64_scale (field_offset,
+ base_video_decoder->state.fps_d * GST_SECOND,
+ base_video_decoder->state.fps_n * 2);
+}
+
+
+GstFlowReturn
+gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder)
+{
+ GstVideoFrame *frame = base_video_decoder->current_frame;
+ GstBuffer *buffer;
+ GstBaseVideoDecoderClass *base_video_decoder_class;
+ GstFlowReturn ret = GST_FLOW_OK;
+ int n_available;
+
+ GST_DEBUG ("have_frame");
+
+ base_video_decoder_class =
+ GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
+
+ n_available = gst_adapter_available (base_video_decoder->output_adapter);
+ if (n_available) {
+ buffer = gst_adapter_take_buffer (base_video_decoder->output_adapter,
+ n_available);
+ } else {
+ buffer = gst_buffer_new_and_alloc (0);
+ }
+
+ frame->distance_from_sync = base_video_decoder->distance_from_sync;
+ base_video_decoder->distance_from_sync++;
+
+#if 0
+ if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) {
+ frame->presentation_timestamp =
+ gst_base_video_decoder_get_timestamp (base_video_decoder,
+ frame->presentation_frame_number);
+ frame->presentation_duration =
+ gst_base_video_decoder_get_timestamp (base_video_decoder,
+ frame->presentation_frame_number + 1) - frame->presentation_timestamp;
+ frame->decode_timestamp =
+ gst_base_video_decoder_get_timestamp (base_video_decoder,
+ frame->decode_frame_number);
+ }
+#endif
+
+#if 0
+ GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp;
+ GST_BUFFER_DURATION (buffer) = frame->presentation_duration;
+ if (frame->decode_frame_number < 0) {
+ GST_BUFFER_OFFSET (buffer) = 0;
+ } else {
+ GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp;
+ }
+ GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE;
+#endif
+
+ GST_DEBUG ("pts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (frame->presentation_timestamp));
+ GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
+ GST_DEBUG ("dist %d", frame->distance_from_sync);
+
+ if (frame->is_sync_point) {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+ if (base_video_decoder->discont) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ base_video_decoder->discont = FALSE;
+ }
+
+ frame->sink_buffer = buffer;
+
+ base_video_decoder->frames = g_list_append (base_video_decoder->frames,
+ frame);
+
+ /* do something with frame */
+ ret = base_video_decoder_class->handle_frame (base_video_decoder, frame);
+ if (!GST_FLOW_IS_SUCCESS (ret)) {
+ GST_DEBUG ("flow error!");
+ }
+
+ /* create new frame */
+ base_video_decoder->current_frame =
+ gst_base_video_decoder_new_frame (base_video_decoder);
+
+ return ret;
+}
+
+GstVideoState *
+gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder)
+{
+ return &base_video_decoder->state;
+
+}
+
+void
+gst_base_video_decoder_set_state (GstBaseVideoDecoder * base_video_decoder,
+ GstVideoState * state)
+{
+ memcpy (&base_video_decoder->state, state, sizeof (*state));
+
+}
+
+void
+gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder)
+{
+ g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder));
+
+ GST_DEBUG ("lost_sync");
+
+ if (gst_adapter_available (base_video_decoder->input_adapter) >= 1) {
+ gst_adapter_flush (base_video_decoder->input_adapter, 1);
+ }
+
+ base_video_decoder->have_sync = FALSE;
+}
+
+void
+gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder * base_video_decoder)
+{
+ GST_DEBUG ("set_sync_point");
+
+ base_video_decoder->current_frame->is_sync_point = TRUE;
+ base_video_decoder->distance_from_sync = 0;
+
+ base_video_decoder->current_frame->presentation_timestamp =
+ base_video_decoder->last_sink_timestamp;
+
+
+}
+
+GstVideoFrame *
+gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder,
+ int frame_number)
+{
+ GList *g;
+
+ for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) {
+ GstVideoFrame *frame = g->data;
+
+ if (frame->system_frame_number == frame_number) {
+ return frame;
+ }
+ }
+
+ return NULL;
+}
+
+void
+gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
+{
+ GstCaps *caps;
+ GstVideoState *state = &base_video_decoder->state;
+
+ if (base_video_decoder->have_src_caps)
+ return;
+
+ caps = gst_video_format_new_caps (state->format,
+ state->width, state->height,
+ state->fps_n, state->fps_d, state->par_n, state->par_d);
+ gst_caps_set_simple (caps, "interlaced",
+ G_TYPE_BOOLEAN, state->interlaced, NULL);
+
+ GST_DEBUG ("setting caps %" GST_PTR_FORMAT, caps);
+
+ gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), caps);
+
+ base_video_decoder->have_src_caps = TRUE;
+}
diff --git a/gst-libs/gst/video/gstbasevideodecoder.h b/gst-libs/gst/video/gstbasevideodecoder.h
new file mode 100644
index 00000000..1daf728c
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideodecoder.h
@@ -0,0 +1,166 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_BASE_VIDEO_DECODER_H_
+#define _GST_BASE_VIDEO_DECODER_H_
+
+#ifndef GST_USE_UNSTABLE_API
+#warning "GstBaseVideoDecoder is unstable API and may change in future."
+#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
+#endif
+
+#include <gst/video/gstbasevideocodec.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_BASE_VIDEO_DECODER \
+ (gst_base_video_decoder_get_type())
+#define GST_BASE_VIDEO_DECODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoder))
+#define GST_BASE_VIDEO_DECODER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
+#define GST_BASE_VIDEO_DECODER_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
+#define GST_IS_BASE_VIDEO_DECODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_DECODER))
+#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_DECODER))
+
+/**
+ * GST_BASE_VIDEO_DECODER_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+#define GST_BASE_VIDEO_DECODER_SINK_NAME "sink"
+/**
+ * GST_BASE_VIDEO_DECODER_SRC_NAME:
+ *
+ * The name of the templates for the source pad.
+ */
+#define GST_BASE_VIDEO_DECODER_SRC_NAME "src"
+
+/**
+ * GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA:
+ *
+ * Custom GstFlowReturn value indicating that more data is needed.
+ */
+#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
+
+
+typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
+typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
+
+struct _GstBaseVideoDecoder
+{
+ GstBaseVideoCodec base_video_codec;
+
+ /*< private >*/
+ GstAdapter *input_adapter;
+ GstAdapter *output_adapter;
+
+ GList *frames;
+
+ gboolean have_sync;
+ gboolean discont;
+ gboolean started;
+
+ GstVideoState state;
+
+ gboolean sink_clipping;
+
+ guint64 presentation_frame_number;
+ guint64 system_frame_number;
+
+ GstCaps *caps;
+ gboolean have_src_caps;
+
+ GstVideoFrame *current_frame;
+
+ int distance_from_sync;
+ int reorder_depth;
+
+ GstClockTime buffer_timestamp;
+
+ GstClockTime timestamp_offset;
+
+ gdouble proportion;
+ GstClockTime earliest_time;
+
+ GstBuffer *codec_data;
+
+ guint64 offset;
+ GstClockTime last_timestamp;
+
+ GstClockTime last_sink_timestamp;
+ GstClockTime last_sink_offset_end;
+ guint64 base_picture_number;
+
+ int field_index;
+};
+
+struct _GstBaseVideoDecoderClass
+{
+ GstBaseVideoCodecClass base_video_codec_class;
+
+ gboolean (*set_format) (GstBaseVideoDecoder *coder, GstVideoFormat,
+ int width, int height, int fps_n, int fps_d,
+ int par_n, int par_d);
+ gboolean (*start) (GstBaseVideoDecoder *coder);
+ gboolean (*stop) (GstBaseVideoDecoder *coder);
+ gboolean (*reset) (GstBaseVideoDecoder *coder);
+ int (*scan_for_sync) (GstBaseVideoDecoder *decoder, gboolean at_eos,
+ int offset, int n);
+ GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder, gboolean at_eos);
+ gboolean (*finish) (GstBaseVideoDecoder *coder, GstVideoFrame *frame);
+ GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame);
+ GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder, GstVideoFrame *frame);
+ GstCaps *(*get_caps) (GstBaseVideoDecoder *coder);
+
+};
+
+GType gst_base_video_decoder_get_type (void);
+
+int gst_base_video_decoder_get_width (GstBaseVideoDecoder *coder);
+int gst_base_video_decoder_get_height (GstBaseVideoDecoder *coder);
+
+guint64 gst_base_video_decoder_get_timestamp_offset (GstBaseVideoDecoder *coder);
+
+GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder,
+ int frame_number);
+void gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder *base_video_decoder,
+ int n_bytes);
+GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder,
+ GstVideoFrame *frame);
+GstFlowReturn gst_base_video_decoder_end_of_stream (GstBaseVideoDecoder *base_video_decoder,
+ GstBuffer *buffer);
+GstFlowReturn
+gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder);
+GstVideoState * gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder);
+void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder,
+ GstVideoState *state);
+void gst_base_video_decoder_lost_sync (GstBaseVideoDecoder *base_video_decoder);
+void gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder *base_video_decoder);
+
+void gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder *base_video_decoder);
+
+
+G_END_DECLS
+
+#endif
+
diff --git a/gst-libs/gst/video/gstbasevideoencoder.c b/gst-libs/gst/video/gstbasevideoencoder.c
new file mode 100644
index 00000000..b45846c6
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideoencoder.c
@@ -0,0 +1,502 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstbasevideoencoder.h"
+#include "gstbasevideoutils.h"
+
+GST_DEBUG_CATEGORY_EXTERN (basevideo_debug);
+#define GST_CAT_DEFAULT basevideo_debug
+
+static void gst_base_video_encoder_finalize (GObject * object);
+
+static gboolean gst_base_video_encoder_sink_setcaps (GstPad * pad,
+ GstCaps * caps);
+static gboolean gst_base_video_encoder_sink_event (GstPad * pad,
+ GstEvent * event);
+static GstFlowReturn gst_base_video_encoder_chain (GstPad * pad,
+ GstBuffer * buf);
+static GstStateChangeReturn gst_base_video_encoder_change_state (GstElement *
+ element, GstStateChange transition);
+static const GstQueryType *gst_base_video_encoder_get_query_types (GstPad *
+ pad);
+static gboolean gst_base_video_encoder_src_query (GstPad * pad,
+ GstQuery * query);
+
+static void
+_do_init (GType object_type)
+{
+ const GInterfaceInfo preset_interface_info = {
+ NULL, /* interface_init */
+ NULL, /* interface_finalize */
+ NULL /* interface_data */
+ };
+
+ g_type_add_interface_static (object_type, GST_TYPE_PRESET,
+ &preset_interface_info);
+}
+
+GST_BOILERPLATE_FULL (GstBaseVideoEncoder, gst_base_video_encoder,
+ GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC, _do_init);
+
+static void
+gst_base_video_encoder_base_init (gpointer g_class)
+{
+
+}
+
+static void
+gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_base_video_encoder_finalize;
+
+ gstelement_class->change_state = gst_base_video_encoder_change_state;
+
+ parent_class = g_type_class_peek_parent (klass);
+}
+
+static void
+gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder,
+ GstBaseVideoEncoderClass * klass)
+{
+ GstPad *pad;
+
+ GST_DEBUG ("gst_base_video_encoder_init");
+
+ pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_encoder);
+
+ gst_pad_set_chain_function (pad, gst_base_video_encoder_chain);
+ gst_pad_set_event_function (pad, gst_base_video_encoder_sink_event);
+ gst_pad_set_setcaps_function (pad, gst_base_video_encoder_sink_setcaps);
+
+ pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder);
+
+ gst_pad_set_query_type_function (pad, gst_base_video_encoder_get_query_types);
+ gst_pad_set_query_function (pad, gst_base_video_encoder_src_query);
+}
+
+static gboolean
+gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstBaseVideoEncoder *base_video_encoder;
+ GstBaseVideoEncoderClass *base_video_encoder_class;
+
+ base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ base_video_encoder_class =
+ GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
+
+ GST_DEBUG ("setcaps");
+
+ gst_base_video_state_from_caps (&base_video_encoder->state, caps);
+
+ base_video_encoder_class->set_format (base_video_encoder,
+ &base_video_encoder->state);
+
+ base_video_encoder_class->start (base_video_encoder);
+
+ g_object_unref (base_video_encoder);
+
+ return TRUE;
+}
+
+static void
+gst_base_video_encoder_finalize (GObject * object)
+{
+ GstBaseVideoEncoder *base_video_encoder;
+ GstBaseVideoEncoderClass *base_video_encoder_class;
+
+ g_return_if_fail (GST_IS_BASE_VIDEO_ENCODER (object));
+ base_video_encoder = GST_BASE_VIDEO_ENCODER (object);
+ base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (object);
+
+ GST_DEBUG ("finalize");
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstBaseVideoEncoder *base_video_encoder;
+ GstBaseVideoEncoderClass *base_video_encoder_class;
+ gboolean ret = FALSE;
+
+ base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ base_video_encoder_class =
+ GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_EOS:
+ {
+ GstVideoFrame *frame;
+
+ frame = g_malloc0 (sizeof (GstVideoFrame));
+ frame->presentation_frame_number =
+ base_video_encoder->presentation_frame_number;
+ frame->presentation_duration = 0;
+ frame->is_eos = TRUE;
+ base_video_encoder->presentation_frame_number++;
+
+ base_video_encoder->frames =
+ g_list_append (base_video_encoder->frames, frame);
+ base_video_encoder_class->finish (base_video_encoder, frame);
+
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
+ event);
+ }
+ break;
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ double rate;
+ double applied_rate;
+ GstFormat format;
+ gint64 start;
+ gint64 stop;
+ gint64 position;
+
+ gst_event_parse_new_segment_full (event, &update, &rate,
+ &applied_rate, &format, &start, &stop, &position);
+
+ if (format != GST_FORMAT_TIME)
+ goto newseg_wrong_format;
+
+ GST_DEBUG ("new segment %lld %lld", start, position);
+
+ gst_segment_set_newsegment_full (&base_video_encoder->state.segment,
+ update, rate, applied_rate, format, start, stop, position);
+
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
+ event);
+ }
+ break;
+ default:
+ /* FIXME this changes the order of events */
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
+ event);
+ break;
+ }
+
+done:
+ gst_object_unref (base_video_encoder);
+ return ret;
+
+newseg_wrong_format:
+ {
+ GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment");
+ gst_event_unref (event);
+ goto done;
+ }
+}
+
+static const GstQueryType *
+gst_base_video_encoder_get_query_types (GstPad * pad)
+{
+ static const GstQueryType query_types[] = {
+ GST_QUERY_CONVERT,
+ GST_QUERY_LATENCY,
+ 0
+ };
+
+ return query_types;
+}
+
+static gboolean
+gst_base_video_encoder_src_query (GstPad * pad, GstQuery * query)
+{
+ GstBaseVideoEncoder *enc;
+ gboolean res;
+ GstPad *peerpad;
+
+ enc = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ peerpad = gst_pad_get_peer (GST_BASE_VIDEO_CODEC_SINK_PAD (enc));
+
+ switch GST_QUERY_TYPE
+ (query) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res =
+ gst_base_video_encoded_video_convert (&enc->state, src_fmt, src_val,
+ &dest_fmt, &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ case GST_QUERY_LATENCY:
+ {
+ gboolean live;
+ GstClockTime min_latency, max_latency;
+
+ res = gst_pad_query (peerpad, query);
+ if (res) {
+ gst_query_parse_latency (query, &live, &min_latency, &max_latency);
+
+ min_latency += enc->min_latency;
+ if (max_latency != GST_CLOCK_TIME_NONE) {
+ max_latency += enc->max_latency;
+ }
+
+ gst_query_set_latency (query, live, min_latency, max_latency);
+ }
+ }
+ break;
+ default:
+ res = gst_pad_query_default (pad, query);
+ }
+ gst_object_unref (peerpad);
+ gst_object_unref (enc);
+ return res;
+
+error:
+ GST_DEBUG_OBJECT (enc, "query failed");
+ gst_object_unref (peerpad);
+ gst_object_unref (enc);
+ return res;
+}
+
+static gboolean
+gst_pad_is_negotiated (GstPad * pad)
+{
+ GstCaps *caps;
+
+ g_return_val_if_fail (pad != NULL, FALSE);
+
+ caps = gst_pad_get_negotiated_caps (pad);
+ if (caps) {
+ gst_caps_unref (caps);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static GstFlowReturn
+gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstBaseVideoEncoder *base_video_encoder;
+ GstBaseVideoEncoderClass *klass;
+ GstVideoFrame *frame;
+
+ if (!gst_pad_is_negotiated (pad)) {
+ return GST_FLOW_NOT_NEGOTIATED;
+ }
+
+ base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
+ klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
+
+ if (base_video_encoder->sink_clipping) {
+ gint64 start = GST_BUFFER_TIMESTAMP (buf);
+ gint64 stop = start + GST_BUFFER_DURATION (buf);
+ gint64 clip_start;
+ gint64 clip_stop;
+
+ if (!gst_segment_clip (&base_video_encoder->state.segment,
+ GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) {
+ GST_DEBUG ("clipping to segment dropped frame");
+ goto done;
+ }
+ }
+
+ frame = g_malloc0 (sizeof (GstVideoFrame));
+ frame->sink_buffer = buf;
+ frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf);
+ frame->presentation_duration = GST_BUFFER_DURATION (buf);
+ frame->presentation_frame_number =
+ base_video_encoder->presentation_frame_number;
+ base_video_encoder->presentation_frame_number++;
+
+ base_video_encoder->frames =
+ g_list_append (base_video_encoder->frames, frame);
+
+ klass->handle_frame (base_video_encoder, frame);
+
+done:
+ g_object_unref (base_video_encoder);
+
+ return GST_FLOW_OK;
+}
+
+static GstStateChangeReturn
+gst_base_video_encoder_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstBaseVideoEncoder *base_video_encoder;
+ GstBaseVideoEncoderClass *base_video_encoder_class;
+ GstStateChangeReturn ret;
+
+ base_video_encoder = GST_BASE_VIDEO_ENCODER (element);
+ base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (element);
+
+ switch (transition) {
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ if (base_video_encoder_class->stop) {
+ base_video_encoder_class->stop (base_video_encoder);
+ }
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+GstFlowReturn
+gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
+ GstVideoFrame * frame)
+{
+ GstFlowReturn ret;
+ GstBaseVideoEncoderClass *base_video_encoder_class;
+
+ base_video_encoder_class =
+ GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
+
+ frame->system_frame_number = base_video_encoder->system_frame_number;
+ base_video_encoder->system_frame_number++;
+
+ if (frame->is_sync_point) {
+ base_video_encoder->distance_from_sync = 0;
+ GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ frame->distance_from_sync = base_video_encoder->distance_from_sync;
+ base_video_encoder->distance_from_sync++;
+
+ frame->decode_frame_number = frame->system_frame_number - 1;
+ if (frame->decode_frame_number < 0) {
+ frame->decode_timestamp = 0;
+ } else {
+ frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number,
+ GST_SECOND * base_video_encoder->state.fps_d,
+ base_video_encoder->state.fps_n);
+ }
+
+ GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp;
+ GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration;
+ GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp;
+
+ base_video_encoder->frames =
+ g_list_remove (base_video_encoder->frames, frame);
+
+ if (!base_video_encoder->set_output_caps) {
+ GstCaps *caps;
+
+ if (base_video_encoder_class->get_caps) {
+ caps = base_video_encoder_class->get_caps (base_video_encoder);
+ } else {
+ caps = gst_caps_new_simple ("video/unknown", NULL);
+ }
+ base_video_encoder->caps = gst_caps_ref (caps);
+ gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), caps);
+ base_video_encoder->set_output_caps = TRUE;
+ }
+
+ if (base_video_encoder_class->shape_output) {
+ ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
+ } else {
+ ret =
+ gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
+ frame->src_buffer);
+ }
+
+ g_free (frame);
+
+ return ret;
+}
+
+int
+gst_base_video_encoder_get_height (GstBaseVideoEncoder * base_video_encoder)
+{
+ return base_video_encoder->state.height;
+}
+
+int
+gst_base_video_encoder_get_width (GstBaseVideoEncoder * base_video_encoder)
+{
+ return base_video_encoder->state.width;
+}
+
+const GstVideoState *
+gst_base_video_encoder_get_state (GstBaseVideoEncoder * base_video_encoder)
+{
+ return &base_video_encoder->state;
+}
+
+GstFlowReturn
+gst_base_video_encoder_end_of_stream (GstBaseVideoEncoder * base_video_encoder,
+ GstBuffer * buffer)
+{
+
+ if (base_video_encoder->frames) {
+ GST_WARNING ("EOS with frames left over");
+ }
+
+ return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder),
+ buffer);
+}
+
+void
+gst_base_video_encoder_set_latency (GstBaseVideoEncoder * base_video_encoder,
+ GstClockTime min_latency, GstClockTime max_latency)
+{
+ g_return_if_fail (min_latency >= 0);
+ g_return_if_fail (max_latency >= min_latency);
+
+ base_video_encoder->min_latency = min_latency;
+ base_video_encoder->max_latency = max_latency;
+}
+
+void
+gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder *
+ base_video_encoder, int n_fields)
+{
+ gint64 latency;
+
+ latency = gst_util_uint64_scale (n_fields,
+ base_video_encoder->state.fps_d * GST_SECOND,
+ 2 * base_video_encoder->state.fps_n);
+
+ gst_base_video_encoder_set_latency (base_video_encoder, latency, latency);
+
+}
diff --git a/gst-libs/gst/video/gstbasevideoencoder.h b/gst-libs/gst/video/gstbasevideoencoder.h
new file mode 100644
index 00000000..7b9f23b4
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideoencoder.h
@@ -0,0 +1,123 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_BASE_VIDEO_ENCODER_H_
+#define _GST_BASE_VIDEO_ENCODER_H_
+
+#ifndef GST_USE_UNSTABLE_API
+#warning "GstBaseVideoEncoder is unstable API and may change in future."
+#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
+#endif
+
+#include <gst/video/gstbasevideocodec.h>
+#include <gst/video/gstbasevideoutils.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_BASE_VIDEO_ENCODER \
+ (gst_base_video_encoder_get_type())
+#define GST_BASE_VIDEO_ENCODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoder))
+#define GST_BASE_VIDEO_ENCODER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoderClass))
+#define GST_BASE_VIDEO_ENCODER_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoderClass))
+#define GST_IS_BASE_VIDEO_ENCODER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_ENCODER))
+#define GST_IS_BASE_VIDEO_ENCODER_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_ENCODER))
+
+/**
+ * GST_BASE_VIDEO_ENCODER_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+#define GST_BASE_VIDEO_ENCODER_SINK_NAME "sink"
+/**
+ * GST_BASE_VIDEO_ENCODER_SRC_NAME:
+ *
+ * The name of the templates for the source pad.
+ */
+#define GST_BASE_VIDEO_ENCODER_SRC_NAME "src"
+
+
+typedef struct _GstBaseVideoEncoder GstBaseVideoEncoder;
+typedef struct _GstBaseVideoEncoderClass GstBaseVideoEncoderClass;
+
+struct _GstBaseVideoEncoder
+{
+ GstBaseVideoCodec base_video_codec;
+
+ /*< private >*/
+ GList *frames;
+
+ GstVideoState state;
+
+ gboolean sink_clipping;
+
+ guint64 presentation_frame_number;
+ guint64 system_frame_number;
+ int distance_from_sync;
+
+ GstCaps *caps;
+ gboolean set_output_caps;
+
+ gint64 min_latency;
+ gint64 max_latency;
+};
+
+struct _GstBaseVideoEncoderClass
+{
+ GstBaseVideoCodecClass base_video_codec_class;
+
+ gboolean (*set_format) (GstBaseVideoEncoder *coder, GstVideoState *state);
+ gboolean (*start) (GstBaseVideoEncoder *coder);
+ gboolean (*stop) (GstBaseVideoEncoder *coder);
+ gboolean (*finish) (GstBaseVideoEncoder *coder, GstVideoFrame *frame);
+ gboolean (*handle_frame) (GstBaseVideoEncoder *coder, GstVideoFrame *frame);
+ GstFlowReturn (*shape_output) (GstBaseVideoEncoder *coder, GstVideoFrame *frame);
+ GstCaps *(*get_caps) (GstBaseVideoEncoder *coder);
+
+};
+
+GType gst_base_video_encoder_get_type (void);
+
+int gst_base_video_encoder_get_width (GstBaseVideoEncoder *coder);
+int gst_base_video_encoder_get_height (GstBaseVideoEncoder *coder);
+const GstVideoState *gst_base_video_encoder_get_state (GstBaseVideoEncoder *coder);
+
+guint64 gst_base_video_encoder_get_timestamp_offset (GstBaseVideoEncoder *coder);
+
+GstVideoFrame *gst_base_video_encoder_get_frame (GstBaseVideoEncoder *coder,
+ int frame_number);
+GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder *base_video_encoder,
+ GstVideoFrame *frame);
+GstFlowReturn gst_base_video_encoder_end_of_stream (GstBaseVideoEncoder *base_video_encoder,
+ GstBuffer *buffer);
+
+void gst_base_video_encoder_set_latency (GstBaseVideoEncoder *base_video_encoder,
+ GstClockTime min_latency, GstClockTime max_latency);
+void gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder *base_video_encoder,
+ int n_fields);
+
+
+G_END_DECLS
+
+#endif
+
diff --git a/gst-libs/gst/video/gstbasevideoparse.c b/gst-libs/gst/video/gstbasevideoparse.c
new file mode 100644
index 00000000..66695c7b
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideoparse.c
@@ -0,0 +1,870 @@
+/* Schrodinger
+ * Copyright (C) 2006 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstbasevideoparse.h"
+
+#include <string.h>
+#include <math.h>
+
+GST_DEBUG_CATEGORY_EXTERN (basevideo_debug);
+#define GST_CAT_DEFAULT basevideo_debug
+
+
+
+/* GstBaseVideoParse signals and args */
+enum
+{
+ LAST_SIGNAL
+};
+
+enum
+{
+ ARG_0
+};
+
+static void gst_base_video_parse_finalize (GObject * object);
+
+static const GstQueryType *gst_base_video_parse_get_query_types (GstPad * pad);
+static gboolean gst_base_video_parse_src_query (GstPad * pad, GstQuery * query);
+static gboolean gst_base_video_parse_sink_query (GstPad * pad,
+ GstQuery * query);
+static gboolean gst_base_video_parse_src_event (GstPad * pad, GstEvent * event);
+static gboolean gst_base_video_parse_sink_event (GstPad * pad,
+ GstEvent * event);
+static GstStateChangeReturn gst_base_video_parse_change_state (GstElement *
+ element, GstStateChange transition);
+static GstFlowReturn gst_base_video_parse_push_all (GstBaseVideoParse *
+ base_video_parse, gboolean at_eos);
+static GstFlowReturn gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf);
+static void gst_base_video_parse_free_frame (GstVideoFrame * frame);
+static GstVideoFrame *gst_base_video_parse_new_frame (GstBaseVideoParse *
+ base_video_parse);
+
+
+GST_BOILERPLATE (GstBaseVideoParse, gst_base_video_parse,
+ GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC);
+
+static void
+gst_base_video_parse_base_init (gpointer g_class)
+{
+
+}
+
+static void
+gst_base_video_parse_class_init (GstBaseVideoParseClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *element_class;
+
+ gobject_class = G_OBJECT_CLASS (klass);
+ element_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->finalize = gst_base_video_parse_finalize;
+
+ element_class->change_state = gst_base_video_parse_change_state;
+}
+
+static void
+gst_base_video_parse_init (GstBaseVideoParse * base_video_parse,
+ GstBaseVideoParseClass * klass)
+{
+ GstPad *pad;
+
+ GST_DEBUG ("gst_base_video_parse_init");
+
+ pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse);
+
+ gst_pad_set_chain_function (pad, gst_base_video_parse_chain);
+ gst_pad_set_query_function (pad, gst_base_video_parse_sink_query);
+ gst_pad_set_event_function (pad, gst_base_video_parse_sink_event);
+
+ pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse);
+
+ gst_pad_set_query_type_function (pad, gst_base_video_parse_get_query_types);
+ gst_pad_set_query_function (pad, gst_base_video_parse_src_query);
+ gst_pad_set_event_function (pad, gst_base_video_parse_src_event);
+
+ base_video_parse->input_adapter = gst_adapter_new ();
+ base_video_parse->output_adapter = gst_adapter_new ();
+
+ base_video_parse->reorder_depth = 1;
+
+ base_video_parse->current_frame =
+ gst_base_video_parse_new_frame (base_video_parse);
+}
+
+static void
+gst_base_video_parse_reset (GstBaseVideoParse * base_video_parse)
+{
+ GST_DEBUG ("reset");
+
+ base_video_parse->discont = TRUE;
+ base_video_parse->have_sync = FALSE;
+
+ base_video_parse->system_frame_number = 0;
+ base_video_parse->presentation_frame_number = 0;
+
+ if (base_video_parse->caps) {
+ gst_caps_unref (base_video_parse->caps);
+ base_video_parse->caps = NULL;
+ }
+
+ gst_segment_init (&base_video_parse->state.segment, GST_FORMAT_TIME);
+ gst_adapter_clear (base_video_parse->input_adapter);
+ gst_adapter_clear (base_video_parse->output_adapter);
+
+ if (base_video_parse->current_frame) {
+ gst_base_video_parse_free_frame (base_video_parse->current_frame);
+ }
+ base_video_parse->current_frame =
+ gst_base_video_parse_new_frame (base_video_parse);
+
+}
+
+static void
+gst_base_video_parse_finalize (GObject * object)
+{
+ GstBaseVideoParse *base_video_parse;
+
+ g_return_if_fail (GST_IS_BASE_VIDEO_PARSE (object));
+ base_video_parse = GST_BASE_VIDEO_PARSE (object);
+
+ if (base_video_parse->input_adapter) {
+ g_object_unref (base_video_parse->input_adapter);
+ }
+ if (base_video_parse->output_adapter) {
+ g_object_unref (base_video_parse->output_adapter);
+ }
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static const GstQueryType *
+gst_base_video_parse_get_query_types (GstPad * pad)
+{
+ static const GstQueryType query_types[] = {
+ GST_QUERY_POSITION,
+ GST_QUERY_DURATION,
+ GST_QUERY_CONVERT,
+ 0
+ };
+
+ return query_types;
+}
+
+#if 0
+static gboolean
+gst_base_video_parse_src_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res;
+ GstBaseVideoParse *dec;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ dec = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));
+
+ if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) {
+ if (dec->fps_d != 0) {
+ *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
+ dec->fps_d * GST_SECOND, dec->fps_n);
+ res = TRUE;
+ } else {
+ res = FALSE;
+ }
+ } else {
+ GST_WARNING ("unhandled conversion from %d to %d", src_format,
+ *dest_format);
+ res = FALSE;
+ }
+
+ gst_object_unref (dec);
+
+ return res;
+}
+
+static gboolean
+gst_base_video_parse_sink_convert (GstPad * pad,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = TRUE;
+ GstBaseVideoParse *dec;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ dec = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));
+
+ /* FIXME: check if we are in a decoding state */
+
+ switch (src_format) {
+ case GST_FORMAT_DEFAULT:
+ switch (*dest_format) {
+ case GST_FORMAT_TIME:
+ *dest_value = gst_util_uint64_scale (src_value,
+ dec->fps_d * GST_SECOND, dec->fps_n);
+ break;
+ default:
+ res = FALSE;
+ }
+ break;
+ case GST_FORMAT_TIME:
+ switch (*dest_format) {
+ case GST_FORMAT_DEFAULT:
+ {
+ *dest_value = gst_util_uint64_scale (src_value,
+ dec->fps_n, dec->fps_d * GST_SECOND);
+ break;
+ }
+ default:
+ res = FALSE;
+ break;
+ }
+ break;
+ default:
+ res = FALSE;
+ break;
+ }
+
+ gst_object_unref (dec);
+
+ return res;
+}
+#endif
+
+static gboolean
+gst_base_video_parse_src_query (GstPad * pad, GstQuery * query)
+{
+ GstBaseVideoParse *base_parse;
+ gboolean res = FALSE;
+
+ base_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+ gint64 time;
+ gint64 value;
+
+ gst_query_parse_position (query, &format, NULL);
+
+ time = gst_util_uint64_scale (base_parse->presentation_frame_number,
+ base_parse->state.fps_n, base_parse->state.fps_d);
+ time += base_parse->state.segment.time;
+ GST_DEBUG ("query position %lld", time);
+ res = gst_base_video_encoded_video_convert (&base_parse->state,
+ GST_FORMAT_TIME, time, &format, &value);
+ if (!res)
+ goto error;
+
+ gst_query_set_position (query, format, value);
+ break;
+ }
+ case GST_QUERY_DURATION:
+ res =
+ gst_pad_query (GST_PAD_PEER (GST_BASE_VIDEO_CODEC_SINK_PAD
+ (base_parse)), query);
+ if (!res)
+ goto error;
+ break;
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ GST_WARNING ("query convert");
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res = gst_base_video_encoded_video_convert (&base_parse->state,
+ src_fmt, src_val, &dest_fmt, &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+done:
+ gst_object_unref (base_parse);
+
+ return res;
+error:
+ GST_DEBUG_OBJECT (base_parse, "query failed");
+ goto done;
+}
+
+static gboolean
+gst_base_video_parse_sink_query (GstPad * pad, GstQuery * query)
+{
+ GstBaseVideoParse *base_video_parse;
+ gboolean res = FALSE;
+
+ base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_CONVERT:
+ {
+ GstFormat src_fmt, dest_fmt;
+ gint64 src_val, dest_val;
+
+ gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
+ res = gst_base_video_encoded_video_convert (&base_video_parse->state,
+ src_fmt, src_val, &dest_fmt, &dest_val);
+ if (!res)
+ goto error;
+ gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
+ break;
+ }
+ default:
+ res = gst_pad_query_default (pad, query);
+ break;
+ }
+done:
+ gst_object_unref (base_video_parse);
+
+ return res;
+error:
+ GST_DEBUG_OBJECT (base_video_parse, "query failed");
+ goto done;
+}
+
+static gboolean
+gst_base_video_parse_src_event (GstPad * pad, GstEvent * event)
+{
+ GstBaseVideoParse *base_video_parse;
+ gboolean res = FALSE;
+
+ base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ GstFormat format, tformat;
+ gdouble rate;
+ GstEvent *real_seek;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 cur, stop;
+ gint64 tcur, tstop;
+
+ gst_event_parse_seek (event, &rate, &format, &flags, &cur_type,
+ &cur, &stop_type, &stop);
+ gst_event_unref (event);
+
+ tformat = GST_FORMAT_TIME;
+ res = gst_base_video_encoded_video_convert (&base_video_parse->state,
+ format, cur, &tformat, &tcur);
+ if (!res)
+ goto convert_error;
+ res = gst_base_video_encoded_video_convert (&base_video_parse->state,
+ format, stop, &tformat, &tstop);
+ if (!res)
+ goto convert_error;
+
+ real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME,
+ flags, cur_type, tcur, stop_type, tstop);
+
+ res =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse),
+ real_seek);
+
+ break;
+ }
+#if 0
+ case GST_EVENT_QOS:
+ {
+ gdouble proportion;
+ GstClockTimeDiff diff;
+ GstClockTime timestamp;
+
+ gst_event_parse_qos (event, &proportion, &diff, &timestamp);
+
+ GST_OBJECT_LOCK (base_video_parse);
+ base_video_parse->proportion = proportion;
+ base_video_parse->earliest_time = timestamp + diff;
+ GST_OBJECT_UNLOCK (base_video_parse);
+
+ GST_DEBUG_OBJECT (base_video_parse,
+ "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (timestamp), diff);
+
+ res = gst_pad_push_event (base_video_parse->sinkpad, event);
+ break;
+ }
+#endif
+ default:
+ res =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse),
+ event);
+ break;
+ }
+done:
+ gst_object_unref (base_video_parse);
+ return res;
+
+convert_error:
+ GST_DEBUG_OBJECT (base_video_parse, "could not convert format");
+ goto done;
+}
+
+static gboolean
+gst_base_video_parse_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstBaseVideoParse *base_video_parse;
+ gboolean ret = FALSE;
+
+ base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad));
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_START:
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
+ event);
+ break;
+ case GST_EVENT_FLUSH_STOP:
+ gst_base_video_parse_reset (base_video_parse);
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
+ event);
+ break;
+ case GST_EVENT_EOS:
+ if (gst_base_video_parse_push_all (base_video_parse,
+ FALSE) == GST_FLOW_ERROR) {
+ gst_event_unref (event);
+ return FALSE;
+ }
+
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
+ event);
+ break;
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ GstFormat format;
+ gdouble rate;
+ gint64 start, stop, time;
+
+ gst_event_parse_new_segment (event, &update, &rate, &format, &start,
+ &stop, &time);
+
+ if (format != GST_FORMAT_TIME)
+ goto newseg_wrong_format;
+
+ if (rate <= 0.0)
+ goto newseg_wrong_rate;
+
+ GST_DEBUG ("newsegment %lld %lld", start, time);
+ gst_segment_set_newsegment (&base_video_parse->state.segment, update,
+ rate, format, start, stop, time);
+
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
+ event);
+ break;
+ }
+ default:
+ ret =
+ gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
+ event);
+ break;
+ }
+done:
+ gst_object_unref (base_video_parse);
+ return ret;
+
+newseg_wrong_format:
+ GST_DEBUG_OBJECT (base_video_parse, "received non TIME newsegment");
+ gst_event_unref (event);
+ goto done;
+
+newseg_wrong_rate:
+ GST_DEBUG_OBJECT (base_video_parse, "negative rates not supported");
+ gst_event_unref (event);
+ goto done;
+}
+
+
+static GstStateChangeReturn
+gst_base_video_parse_change_state (GstElement * element,
+ GstStateChange transition)
+{
+ GstBaseVideoParse *base_parse = GST_BASE_VIDEO_PARSE (element);
+ GstStateChangeReturn ret;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_base_video_parse_reset (base_parse);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_base_video_parse_reset (base_parse);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+static guint64
+gst_base_video_parse_get_timestamp (GstBaseVideoParse * base_video_parse,
+ int picture_number)
+{
+ if (picture_number < 0) {
+ return base_video_parse->timestamp_offset -
+ (gint64) gst_util_uint64_scale (-picture_number,
+ base_video_parse->state.fps_d * GST_SECOND,
+ base_video_parse->state.fps_n);
+ } else {
+ return base_video_parse->timestamp_offset +
+ gst_util_uint64_scale (picture_number,
+ base_video_parse->state.fps_d * GST_SECOND,
+ base_video_parse->state.fps_n);
+ }
+}
+
+static GstFlowReturn
+gst_base_video_parse_push_all (GstBaseVideoParse * base_video_parse,
+ gboolean at_eos)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* FIXME do stuff */
+
+ return ret;
+}
+
+static GstBuffer *
+gst_adapter_get_buffer (GstAdapter * adapter)
+{
+ return gst_buffer_ref (GST_BUFFER (adapter->buflist->data));
+
+}
+
+static GstFlowReturn
+gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf)
+{
+ GstBaseVideoParse *base_video_parse;
+ GstBaseVideoParseClass *klass;
+ GstBuffer *buffer;
+ GstFlowReturn ret;
+
+ GST_DEBUG ("chain with %d bytes", GST_BUFFER_SIZE (buf));
+
+ base_video_parse = GST_BASE_VIDEO_PARSE (GST_PAD_PARENT (pad));
+ klass = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse);
+
+ if (!base_video_parse->started) {
+ klass->start (base_video_parse);
+ base_video_parse->started = TRUE;
+ }
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) {
+ GST_DEBUG_OBJECT (base_video_parse, "received DISCONT buffer");
+ gst_base_video_parse_reset (base_video_parse);
+ base_video_parse->discont = TRUE;
+ base_video_parse->have_sync = FALSE;
+ }
+
+ if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) {
+ base_video_parse->last_timestamp = GST_BUFFER_TIMESTAMP (buf);
+ }
+ gst_adapter_push (base_video_parse->input_adapter, buf);
+
+ if (!base_video_parse->have_sync) {
+ int n, m;
+
+ GST_DEBUG ("no sync, scanning");
+
+ n = gst_adapter_available (base_video_parse->input_adapter);
+ m = klass->scan_for_sync (base_video_parse->input_adapter, FALSE, 0, n);
+
+ gst_adapter_flush (base_video_parse->input_adapter, m);
+
+ if (m < n) {
+ GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n);
+
+ /* this is only "maybe" sync */
+ base_video_parse->have_sync = TRUE;
+ }
+
+ if (!base_video_parse->have_sync) {
+ return GST_FLOW_OK;
+ }
+ }
+
+ /* FIXME: use gst_adapter_prev_timestamp() here instead? */
+ buffer = gst_adapter_get_buffer (base_video_parse->input_adapter);
+
+ gst_buffer_unref (buffer);
+
+ /* FIXME check klass->parse_data */
+
+ do {
+ ret = klass->parse_data (base_video_parse, FALSE);
+ } while (ret == GST_FLOW_OK);
+
+ if (ret == GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA) {
+ return GST_FLOW_OK;
+ }
+ return ret;
+}
+
+GstVideoState *
+gst_base_video_parse_get_state (GstBaseVideoParse * base_video_parse)
+{
+ return &base_video_parse->state;
+}
+
+void
+gst_base_video_parse_set_state (GstBaseVideoParse * base_video_parse,
+ GstVideoState * state)
+{
+ GST_DEBUG ("set_state");
+
+ memcpy (&base_video_parse->state, state, sizeof (GstVideoState));
+
+ /* FIXME set caps */
+
+}
+
+
+gboolean
+gst_base_video_parse_set_src_caps (GstBaseVideoParse * base_video_parse,
+ GstCaps * caps)
+{
+ g_return_val_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse), FALSE);
+
+ GST_DEBUG ("set_src_caps");
+
+ return gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
+ caps);
+}
+
+void
+gst_base_video_parse_lost_sync (GstBaseVideoParse * base_video_parse)
+{
+ g_return_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse));
+
+ GST_DEBUG ("lost_sync");
+
+ if (gst_adapter_available (base_video_parse->input_adapter) >= 1) {
+ gst_adapter_flush (base_video_parse->input_adapter, 1);
+ }
+
+ base_video_parse->have_sync = FALSE;
+}
+
+GstVideoFrame *
+gst_base_video_parse_get_frame (GstBaseVideoParse * base_video_parse)
+{
+ g_return_val_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse), NULL);
+
+ return base_video_parse->current_frame;
+}
+
+void
+gst_base_video_parse_add_to_frame (GstBaseVideoParse * base_video_parse,
+ int n_bytes)
+{
+ GstBuffer *buf;
+
+ GST_DEBUG ("add_to_frame");
+
+ buf = gst_adapter_take_buffer (base_video_parse->input_adapter, n_bytes);
+
+ gst_adapter_push (base_video_parse->output_adapter, buf);
+}
+
+GstFlowReturn
+gst_base_video_parse_finish_frame (GstBaseVideoParse * base_video_parse)
+{
+ GstVideoFrame *frame = base_video_parse->current_frame;
+ GstBuffer *buffer;
+ GstBaseVideoParseClass *base_video_parse_class;
+ GstFlowReturn ret;
+
+ GST_DEBUG ("finish_frame");
+
+ base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse);
+
+ buffer = gst_adapter_take_buffer (base_video_parse->output_adapter,
+ gst_adapter_available (base_video_parse->output_adapter));
+
+ if (frame->is_sync_point) {
+ base_video_parse->timestamp_offset = base_video_parse->last_timestamp -
+ gst_util_uint64_scale (frame->presentation_frame_number,
+ base_video_parse->state.fps_d * GST_SECOND,
+ base_video_parse->state.fps_n);
+ base_video_parse->distance_from_sync = 0;
+ }
+
+ frame->distance_from_sync = base_video_parse->distance_from_sync;
+ base_video_parse->distance_from_sync++;
+
+ frame->presentation_timestamp =
+ gst_base_video_parse_get_timestamp (base_video_parse,
+ frame->presentation_frame_number);
+ frame->presentation_duration =
+ gst_base_video_parse_get_timestamp (base_video_parse,
+ frame->presentation_frame_number + 1) - frame->presentation_timestamp;
+ frame->decode_timestamp =
+ gst_base_video_parse_get_timestamp (base_video_parse,
+ frame->decode_frame_number);
+
+ GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp;
+ GST_BUFFER_DURATION (buffer) = frame->presentation_duration;
+ if (frame->decode_frame_number < 0) {
+ GST_BUFFER_OFFSET (buffer) = 0;
+ } else {
+ GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp;
+ }
+ GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE;
+
+ GST_DEBUG ("pts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (frame->presentation_timestamp));
+ GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp));
+ GST_DEBUG ("dist %d", frame->distance_from_sync);
+
+ if (frame->is_sync_point) {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+
+ frame->src_buffer = buffer;
+ ret = base_video_parse_class->shape_output (base_video_parse, frame);
+
+ gst_base_video_parse_free_frame (base_video_parse->current_frame);
+
+ /* create new frame */
+ base_video_parse->current_frame =
+ gst_base_video_parse_new_frame (base_video_parse);
+
+ return ret;
+}
+
+static void
+gst_base_video_parse_free_frame (GstVideoFrame * frame)
+{
+ if (frame->sink_buffer) {
+ gst_buffer_unref (frame->sink_buffer);
+ }
+#if 0
+ if (frame->src_buffer) {
+ gst_buffer_unref (frame->src_buffer);
+ }
+#endif
+
+ g_free (frame);
+}
+
+static GstVideoFrame *
+gst_base_video_parse_new_frame (GstBaseVideoParse * base_video_parse)
+{
+ GstVideoFrame *frame;
+
+ frame = g_malloc0 (sizeof (GstVideoFrame));
+
+ frame->system_frame_number = base_video_parse->system_frame_number;
+ base_video_parse->system_frame_number++;
+
+ frame->decode_frame_number = frame->system_frame_number -
+ base_video_parse->reorder_depth;
+
+ return frame;
+}
+
+void
+gst_base_video_parse_set_sync_point (GstBaseVideoParse * base_video_parse)
+{
+ GST_DEBUG ("set_sync_point");
+
+ base_video_parse->current_frame->is_sync_point = TRUE;
+
+ base_video_parse->distance_from_sync = 0;
+}
+
+GstFlowReturn
+gst_base_video_parse_push (GstBaseVideoParse * base_video_parse,
+ GstBuffer * buffer)
+{
+ GstBaseVideoParseClass *base_video_parse_class;
+
+ base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse);
+
+ if (base_video_parse->caps == NULL) {
+ gboolean ret;
+
+ base_video_parse->caps =
+ base_video_parse_class->get_caps (base_video_parse);
+
+ ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse),
+ base_video_parse->caps);
+
+ if (!ret) {
+ GST_WARNING ("pad didn't accept caps");
+ return GST_FLOW_ERROR;
+ }
+ }
+ gst_buffer_set_caps (buffer, base_video_parse->caps);
+
+ GST_DEBUG ("pushing ts=%lld dur=%lld off=%lld off_end=%lld",
+ GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_DURATION (buffer),
+ GST_BUFFER_OFFSET (buffer), GST_BUFFER_OFFSET_END (buffer));
+
+ if (base_video_parse->discont) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
+ base_video_parse->discont = FALSE;
+ } else {
+ GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT);
+ }
+
+ return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), buffer);
+}
diff --git a/gst-libs/gst/video/gstbasevideoparse.h b/gst-libs/gst/video/gstbasevideoparse.h
new file mode 100644
index 00000000..cdbcd27a
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideoparse.h
@@ -0,0 +1,141 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_BASE_VIDEO_PARSE_H_
+#define _GST_BASE_VIDEO_PARSE_H_
+
+#ifndef GST_USE_UNSTABLE_API
+#warning "GstBaseVideoParse is unstable API and may change in future."
+#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
+#endif
+
+#include <gst/video/gstbasevideocodec.h>
+#include <gst/video/gstbasevideoutils.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_BASE_VIDEO_PARSE \
+ (gst_base_video_parse_get_type())
+#define GST_BASE_VIDEO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_PARSE,GstBaseVideoParse))
+#define GST_BASE_VIDEO_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_PARSE,GstBaseVideoParseClass))
+#define GST_BASE_VIDEO_PARSE_GET_CLASS(obj) \
+ (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_PARSE,GstBaseVideoParseClass))
+#define GST_IS_BASE_VIDEO_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_PARSE))
+#define GST_IS_BASE_VIDEO_PARSE_CLASS(obj) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_PARSE))
+
+/**
+ * GST_BASE_VIDEO_PARSE_SINK_NAME:
+ *
+ * The name of the templates for the sink pad.
+ */
+#define GST_BASE_VIDEO_PARSE_SINK_NAME "sink"
+/**
+ * GST_BASE_VIDEO_PARSE_SRC_NAME:
+ *
+ * The name of the templates for the source pad.
+ */
+#define GST_BASE_VIDEO_PARSE_SRC_NAME "src"
+
+/**
+ * GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA:
+ *
+ */
+#define GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
+
+typedef struct _GstBaseVideoParse GstBaseVideoParse;
+typedef struct _GstBaseVideoParseClass GstBaseVideoParseClass;
+
+struct _GstBaseVideoParse
+{
+ GstBaseVideoCodec base_video_codec;
+
+ /*< private >*/
+ GstAdapter *input_adapter;
+ GstAdapter *output_adapter;
+
+ int reorder_depth;
+
+ gboolean have_sync;
+ gboolean discont;
+ gboolean started;
+
+ GstVideoFrame *current_frame;
+ GstVideoState state;
+ int distance_from_sync;
+
+ gboolean sink_clipping;
+
+ guint64 presentation_frame_number;
+ guint64 system_frame_number;
+
+ GstCaps *caps;
+ gboolean set_output_caps;
+
+ GstClockTime last_timestamp;
+
+ gint64 timestamp_offset;
+};
+
+struct _GstBaseVideoParseClass
+{
+ GstBaseVideoCodecClass base_video_codec_class;
+
+ gboolean (*start) (GstBaseVideoParse *parse);
+ gboolean (*stop) (GstBaseVideoParse *parse);
+ gboolean (*reset) (GstBaseVideoParse *parse);
+ GstFlowReturn (*parse_data) (GstBaseVideoParse *parse, gboolean at_eos);
+ int (*scan_for_sync) (GstAdapter *adapter, gboolean at_eos,
+ int offset, int n);
+ GstFlowReturn (*shape_output) (GstBaseVideoParse *parse, GstVideoFrame *frame);
+ GstCaps *(*get_caps) (GstBaseVideoParse *parse);
+
+};
+
+GType gst_base_video_parse_get_type (void);
+
+int gst_base_video_parse_get_width (GstBaseVideoParse *parse);
+int gst_base_video_parse_get_height (GstBaseVideoParse *parse);
+GstVideoState *gst_base_video_parse_get_state (GstBaseVideoParse *parse);
+void gst_base_video_parse_set_state (GstBaseVideoParse *parse,
+ GstVideoState *state);
+
+guint64 gst_base_video_parse_get_timestamp_offset (GstBaseVideoParse *parse);
+
+gboolean gst_base_video_parse_set_src_caps (GstBaseVideoParse *base_video_parse, GstCaps *caps);
+
+GstFlowReturn gst_base_video_parse_end_of_stream (GstBaseVideoParse *base_video_parse,
+ GstBuffer *buffer);
+
+void gst_base_video_parse_lost_sync (GstBaseVideoParse *base_video_parse);
+
+GstVideoFrame * gst_base_video_parse_get_frame (GstBaseVideoParse *base_video_parse);
+void gst_base_video_parse_add_to_frame (GstBaseVideoParse *base_video_parse, int n_bytes);
+GstFlowReturn gst_base_video_parse_finish_frame (GstBaseVideoParse *base_video_parse);
+void gst_base_video_parse_set_sync_point (GstBaseVideoParse *base_video_parse);
+GstFlowReturn gst_base_video_parse_push (GstBaseVideoParse *base_video_parse,
+ GstBuffer *buffer);
+
+G_END_DECLS
+
+#endif
+
diff --git a/gst-libs/gst/video/gstbasevideoutils.c b/gst-libs/gst/video/gstbasevideoutils.c
new file mode 100644
index 00000000..3de6038c
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideoutils.c
@@ -0,0 +1,162 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstbasevideoutils.h"
+
+#include <string.h>
+
+GST_DEBUG_CATEGORY_EXTERN (basevideo_debug);
+#define GST_CAT_DEFAULT basevideo_debug
+
+
+guint64
+gst_base_video_convert_bytes_to_frames (GstVideoState * state, guint64 bytes)
+{
+ return gst_util_uint64_scale_int (bytes, 1, state->bytes_per_picture);
+}
+
+guint64
+gst_base_video_convert_frames_to_bytes (GstVideoState * state, guint64 frames)
+{
+ return frames * state->bytes_per_picture;
+}
+
+
+gboolean
+gst_base_video_rawvideo_convert (GstVideoState * state,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = FALSE;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ if (src_format == GST_FORMAT_BYTES &&
+ *dest_format == GST_FORMAT_DEFAULT && state->bytes_per_picture != 0) {
+ /* convert bytes to frames */
+ *dest_value = gst_util_uint64_scale_int (src_value, 1,
+ state->bytes_per_picture);
+ res = TRUE;
+ } else if (src_format == GST_FORMAT_DEFAULT &&
+ *dest_format == GST_FORMAT_BYTES && state->bytes_per_picture != 0) {
+ /* convert bytes to frames */
+ *dest_value = src_value * state->bytes_per_picture;
+ res = TRUE;
+ } else if (src_format == GST_FORMAT_DEFAULT &&
+ *dest_format == GST_FORMAT_TIME && state->fps_n != 0) {
+ /* convert frames to time */
+ /* FIXME add segment time? */
+ *dest_value = gst_util_uint64_scale (src_value,
+ GST_SECOND * state->fps_d, state->fps_n);
+ res = TRUE;
+ } else if (src_format == GST_FORMAT_TIME &&
+ *dest_format == GST_FORMAT_DEFAULT && state->fps_d != 0) {
+ /* convert time to frames */
+ /* FIXME subtract segment time? */
+ *dest_value = gst_util_uint64_scale (src_value, state->fps_n,
+ GST_SECOND * state->fps_d);
+ res = TRUE;
+ }
+
+ /* FIXME add bytes <--> time */
+
+ return res;
+}
+
+gboolean
+gst_base_video_encoded_video_convert (GstVideoState * state,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 * dest_value)
+{
+ gboolean res = FALSE;
+
+ if (src_format == *dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ GST_DEBUG ("src convert");
+
+#if 0
+ if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) {
+ if (dec->fps_d != 0) {
+ *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
+ dec->fps_d * GST_SECOND, dec->fps_n);
+ res = TRUE;
+ } else {
+ res = FALSE;
+ }
+ } else {
+ GST_WARNING ("unhandled conversion from %d to %d", src_format,
+ *dest_format);
+ res = FALSE;
+ }
+#endif
+
+ return res;
+}
+
+gboolean
+gst_base_video_state_from_caps (GstVideoState * state, GstCaps * caps)
+{
+
+ gst_video_format_parse_caps (caps, &state->format,
+ &state->width, &state->height);
+
+ gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d);
+
+ state->par_n = 1;
+ state->par_d = 1;
+ gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d);
+
+ {
+ GstStructure *structure = gst_caps_get_structure (caps, 0);
+ state->interlaced = FALSE;
+ gst_structure_get_boolean (structure, "interlaced", &state->interlaced);
+ }
+
+ state->clean_width = state->width;
+ state->clean_height = state->height;
+ state->clean_offset_left = 0;
+ state->clean_offset_top = 0;
+
+ /* FIXME need better error handling */
+ return TRUE;
+}
+
+GstClockTime
+gst_video_state_get_timestamp (const GstVideoState * state, int frame_number)
+{
+ if (frame_number < 0) {
+ return state->segment.start -
+ (gint64) gst_util_uint64_scale (-frame_number,
+ state->fps_d * GST_SECOND, state->fps_n);
+ } else {
+ return state->segment.start +
+ gst_util_uint64_scale (frame_number,
+ state->fps_d * GST_SECOND, state->fps_n);
+ }
+}
diff --git a/gst-libs/gst/video/gstbasevideoutils.h b/gst-libs/gst/video/gstbasevideoutils.h
new file mode 100644
index 00000000..e832fe76
--- /dev/null
+++ b/gst-libs/gst/video/gstbasevideoutils.h
@@ -0,0 +1,95 @@
+/* GStreamer
+ * Copyright (C) 2008 David Schleef <ds@schleef.org>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_BASE_VIDEO_UTILS_H_
+#define _GST_BASE_VIDEO_UTILS_H_
+
+#ifndef GST_USE_UNSTABLE_API
+#warning "The base video utils API is unstable and may change in future."
+#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+#include <gst/base/gstadapter.h>
+
+G_BEGIN_DECLS
+
+typedef struct _GstVideoState GstVideoState;
+typedef struct _GstVideoFrame GstVideoFrame;
+
+struct _GstVideoState
+{
+ GstVideoFormat format;
+ int width, height;
+ int fps_n, fps_d;
+ int par_n, par_d;
+ gboolean interlaced;
+ gboolean top_field_first;
+
+ int clean_width, clean_height;
+ int clean_offset_left, clean_offset_top;
+
+ int bytes_per_picture;
+
+ GstSegment segment;
+
+ int picture_number;
+};
+
+struct _GstVideoFrame
+{
+ guint64 decode_timestamp;
+ guint64 presentation_timestamp;
+ guint64 presentation_duration;
+
+ gint system_frame_number;
+ gint decode_frame_number;
+ gint presentation_frame_number;
+
+ int distance_from_sync;
+ gboolean is_sync_point;
+ gboolean is_eos;
+
+ GstBuffer *sink_buffer;
+ GstBuffer *src_buffer;
+
+ int field_index;
+ int n_fields;
+
+ void *coder_hook;
+};
+
+gboolean gst_base_video_rawvideo_convert (GstVideoState *state,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 *dest_value);
+gboolean gst_base_video_encoded_video_convert (GstVideoState *state,
+ GstFormat src_format, gint64 src_value,
+ GstFormat * dest_format, gint64 *dest_value);
+
+gboolean gst_base_video_state_from_caps (GstVideoState *state,
+ GstCaps *caps);
+
+GstClockTime gst_video_state_get_timestamp (const GstVideoState *state,
+ int frame_number);
+
+G_END_DECLS
+
+#endif
+
diff --git a/gst/camerabin/camerabinpreview.c b/gst/camerabin/camerabinpreview.c
index b64b2143..9b12bbac 100644
--- a/gst/camerabin/camerabinpreview.c
+++ b/gst/camerabin/camerabinpreview.c
@@ -183,7 +183,7 @@ gst_camerabin_preview_convert (GstCameraBin * camera, GstBuffer * buf)
if (!src || !sink) {
GST_WARNING ("pipeline doesn't have src / sink elements");
- goto no_pipeline;
+ goto missing_elements;
}
g_object_set (src, "size", (gint64) GST_BUFFER_SIZE (buf),
@@ -234,6 +234,7 @@ gst_camerabin_preview_convert (GstCameraBin * camera, GstBuffer * buf)
g_return_val_if_reached (NULL);
}
}
+ gst_message_unref (msg);
} else {
g_warning ("Could not make preview image: %s", "timeout during conversion");
result = NULL;
@@ -245,9 +246,21 @@ gst_camerabin_preview_convert (GstCameraBin * camera, GstBuffer * buf)
GST_BUFFER_FLAGS (buf) = bflags;
+done:
+ if (src)
+ gst_object_unref (src);
+ if (sink)
+ gst_object_unref (sink);
+
return result;
/* ERRORS */
+missing_elements:
+ {
+ g_warning ("Could not make preview image: %s",
+ "missing elements in pipeline (unknown error)");
+ goto done;
+ }
no_pipeline:
{
g_warning ("Could not make preview image: %s",
diff --git a/gst/camerabin/camerabinvideo.c b/gst/camerabin/camerabinvideo.c
index 2569772a..4bbc5057 100644
--- a/gst/camerabin/camerabinvideo.c
+++ b/gst/camerabin/camerabinvideo.c
@@ -398,6 +398,7 @@ camerabin_video_pad_tee_src0_have_buffer (GstPad * pad, GstBuffer * buffer,
GstEvent *event;
GstObject *tee;
GstPad *sinkpad;
+
vid->adjust_ts_video = GST_BUFFER_TIMESTAMP (buffer) - vid->last_ts_video;
vid->calculate_adjust_ts_video = FALSE;
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
@@ -417,7 +418,6 @@ camerabin_video_pad_tee_src0_have_buffer (GstPad * pad, GstBuffer * buffer,
if (GST_BUFFER_DURATION_IS_VALID (buffer))
vid->last_ts_video += GST_BUFFER_DURATION (buffer);
-
GST_LOG ("buffer out with size %d ts %" GST_TIME_FORMAT,
GST_BUFFER_SIZE (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
return TRUE;
@@ -442,6 +442,7 @@ camerabin_video_pad_aud_src_have_buffer (GstPad * pad, GstBuffer * buffer,
if (vid->calculate_adjust_ts_aud) {
GstEvent *event;
GstPad *peerpad = NULL;
+
vid->adjust_ts_aud = GST_BUFFER_TIMESTAMP (buffer) - vid->last_ts_aud;
vid->calculate_adjust_ts_aud = FALSE;
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME,
@@ -459,6 +460,7 @@ camerabin_video_pad_aud_src_have_buffer (GstPad * pad, GstBuffer * buffer,
vid->last_ts_aud = GST_BUFFER_TIMESTAMP (buffer);
if (GST_BUFFER_DURATION_IS_VALID (buffer))
vid->last_ts_aud += GST_BUFFER_DURATION (buffer);
+
GST_LOG ("buffer out with size %d ts %" GST_TIME_FORMAT,
GST_BUFFER_SIZE (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
return TRUE;
@@ -549,6 +551,7 @@ gst_camerabin_video_create_elements (GstCameraBinVideo * vid)
vid_sinkpad = gst_element_get_static_pad (vid->tee, "sink");
}
gst_ghost_pad_set_target (GST_GHOST_PAD (vid->sinkpad), vid_sinkpad);
+ gst_object_unref (vid_sinkpad);
/* Add queue element for video */
@@ -668,6 +671,7 @@ gst_camerabin_video_create_elements (GstCameraBinVideo * vid)
/* Never let video bin eos events reach view finder */
gst_pad_add_event_probe (vid_srcpad,
G_CALLBACK (gst_camerabin_drop_eos_probe), vid);
+ gst_object_unref (vid_srcpad);
pad = gst_element_get_static_pad (vid->aud_src, "src");
gst_pad_add_buffer_probe (pad,
@@ -702,10 +706,12 @@ gst_camerabin_video_destroy_elements (GstCameraBinVideo * vid)
/* Release tee request pads */
if (vid->tee_video_srcpad) {
gst_element_release_request_pad (vid->tee, vid->tee_video_srcpad);
+ gst_object_unref (vid->tee_video_srcpad);
vid->tee_video_srcpad = NULL;
}
if (vid->tee_vf_srcpad) {
gst_element_release_request_pad (vid->tee, vid->tee_vf_srcpad);
+ gst_object_unref (vid->tee_vf_srcpad);
vid->tee_vf_srcpad = NULL;
}
diff --git a/gst/camerabin/gstcamerabin.c b/gst/camerabin/gstcamerabin.c
index 0a110f50..9d8a14cc 100644
--- a/gst/camerabin/gstcamerabin.c
+++ b/gst/camerabin/gstcamerabin.c
@@ -500,8 +500,7 @@ camerabin_setup_src_elements (GstCameraBin * camera)
if (camera->fps_n > 0 && camera->fps_d > 0) {
if (camera->night_mode) {
- GST_WARNING_OBJECT (camera,
- "night mode, lowest allowed fps will be forced");
+ GST_INFO_OBJECT (camera, "night mode, lowest allowed fps will be forced");
camera->pre_night_fps_n = camera->fps_n;
camera->pre_night_fps_d = camera->fps_d;
detect_framerate = TRUE;
@@ -531,7 +530,9 @@ camerabin_setup_src_elements (GstCameraBin * camera)
g_object_set (camera->src_zoom_scale, "method",
CAMERABIN_DEFAULT_ZOOM_METHOD, NULL);
+ /* we create new caps in any way and they take ownership of the structure st */
gst_caps_replace (&camera->view_finder_caps, new_caps);
+ gst_caps_unref (new_caps);
/* Set caps for view finder mode */
gst_camerabin_set_capsfilter_caps (camera, camera->view_finder_caps);
@@ -805,22 +806,27 @@ camerabin_destroy_elements (GstCameraBin * camera)
/* Release request pads */
if (camera->pad_view_vid) {
gst_element_release_request_pad (camera->view_in_sel, camera->pad_view_vid);
+ gst_object_unref (camera->pad_view_vid);
camera->pad_view_vid = NULL;
}
if (camera->pad_src_vid) {
gst_element_release_request_pad (camera->src_out_sel, camera->pad_src_vid);
+ gst_object_unref (camera->pad_src_vid);
camera->pad_src_vid = NULL;
}
if (camera->pad_src_img) {
gst_element_release_request_pad (camera->src_out_sel, camera->pad_src_img);
+ gst_object_unref (camera->pad_src_img);
camera->pad_src_img = NULL;
}
if (camera->pad_view_src) {
gst_element_release_request_pad (camera->view_in_sel, camera->pad_view_src);
+ /* don't unref, we have not requested it */
camera->pad_view_src = NULL;
}
if (camera->pad_src_view) {
gst_element_release_request_pad (camera->src_out_sel, camera->pad_src_view);
+ gst_object_unref (camera->pad_src_view);
camera->pad_src_view = NULL;
}
@@ -856,6 +862,8 @@ camerabin_destroy_elements (GstCameraBin * camera)
static void
camerabin_dispose_elements (GstCameraBin * camera)
{
+ GST_INFO ("cleaning");
+
if (camera->capture_mutex) {
g_mutex_free (camera->capture_mutex);
camera->capture_mutex = NULL;
@@ -878,24 +886,18 @@ camerabin_dispose_elements (GstCameraBin * camera)
camera->user_vid_src = NULL;
}
+ /* Free caps */
if (camera->image_capture_caps) {
- gst_caps_unref (camera->image_capture_caps);
- camera->image_capture_caps = NULL;
+ gst_caps_replace (&camera->image_capture_caps, NULL);
}
-
if (camera->view_finder_caps) {
- gst_caps_unref (camera->view_finder_caps);
- camera->view_finder_caps = NULL;
+ gst_caps_replace (&camera->view_finder_caps, NULL);
}
-
if (camera->allowed_caps) {
- gst_caps_unref (camera->allowed_caps);
- camera->allowed_caps = NULL;
+ gst_caps_replace (&camera->allowed_caps, NULL);
}
-
if (camera->preview_caps) {
- gst_caps_unref (camera->preview_caps);
- camera->preview_caps = NULL;
+ gst_caps_replace (&camera->preview_caps, NULL);
}
if (camera->event_tags) {
@@ -1685,6 +1687,7 @@ gst_camerabin_send_preview (GstCameraBin * camera, GstBuffer * buffer)
if (prev) {
s = gst_structure_new (PREVIEW_MESSAGE_NAME,
"buffer", GST_TYPE_BUFFER, prev, NULL);
+ gst_buffer_unref (prev);
msg = gst_message_new_element (GST_OBJECT (camera), s);
@@ -2202,30 +2205,32 @@ gst_camerabin_update_aspect_filter (GstCameraBin * camera, GstCaps * new_caps)
if (sink_pad) {
sink_caps = gst_pad_get_caps (sink_pad);
gst_object_unref (sink_pad);
- if (sink_caps && !gst_caps_is_any (sink_caps)) {
- GST_DEBUG_OBJECT (camera, "sink element caps %" GST_PTR_FORMAT,
- sink_caps);
- /* Get maximum resolution that view finder sink accepts */
- st = gst_caps_get_structure (sink_caps, 0);
- if (gst_structure_has_field_typed (st, "width", GST_TYPE_INT_RANGE)) {
- range = gst_structure_get_value (st, "width");
- sink_w = gst_value_get_int_range_max (range);
- }
- if (gst_structure_has_field_typed (st, "height", GST_TYPE_INT_RANGE)) {
- range = gst_structure_get_value (st, "height");
- sink_h = gst_value_get_int_range_max (range);
+ if (sink_caps) {
+ if (!gst_caps_is_any (sink_caps)) {
+ GST_DEBUG_OBJECT (camera, "sink element caps %" GST_PTR_FORMAT,
+ sink_caps);
+ /* Get maximum resolution that view finder sink accepts */
+ st = gst_caps_get_structure (sink_caps, 0);
+ if (gst_structure_has_field_typed (st, "width", GST_TYPE_INT_RANGE)) {
+ range = gst_structure_get_value (st, "width");
+ sink_w = gst_value_get_int_range_max (range);
+ }
+ if (gst_structure_has_field_typed (st, "height", GST_TYPE_INT_RANGE)) {
+ range = gst_structure_get_value (st, "height");
+ sink_h = gst_value_get_int_range_max (range);
+ }
+ GST_DEBUG_OBJECT (camera, "sink element accepts max %dx%d", sink_w,
+ sink_h);
+
+ /* Get incoming frames' resolution */
+ if (sink_h && sink_w) {
+ st = gst_caps_get_structure (new_caps, 0);
+ gst_structure_get_int (st, "width", &in_w);
+ gst_structure_get_int (st, "height", &in_h);
+ GST_DEBUG_OBJECT (camera, "new caps with %dx%d", in_w, in_h);
+ }
}
gst_caps_unref (sink_caps);
- GST_DEBUG_OBJECT (camera, "sink element accepts max %dx%d", sink_w,
- sink_h);
-
- /* Get incoming frames' resolution */
- if (sink_h && sink_w) {
- st = gst_caps_get_structure (new_caps, 0);
- gst_structure_get_int (st, "width", &in_w);
- gst_structure_get_int (st, "height", &in_h);
- GST_DEBUG_OBJECT (camera, "new caps with %dx%d", in_w, in_h);
- }
}
}
@@ -2251,13 +2256,14 @@ gst_camerabin_update_aspect_filter (GstCameraBin * camera, GstCaps * new_caps)
G_TYPE_INT, target_h, NULL);
} else {
GST_DEBUG_OBJECT (camera, "no scaling");
- ar_caps = gst_caps_ref (new_caps);
+ ar_caps = new_caps;
}
GST_DEBUG_OBJECT (camera, "aspect ratio filter caps %" GST_PTR_FORMAT,
ar_caps);
g_object_set (G_OBJECT (camera->aspect_filter), "caps", ar_caps, NULL);
- gst_caps_unref (ar_caps);
+ if (ar_caps != new_caps)
+ gst_caps_unref (ar_caps);
#endif
}
@@ -2742,7 +2748,6 @@ gst_camerabin_dispose (GObject * object)
gst_camerabin_preview_destroy_pipeline (camera);
camerabin_destroy_elements (camera);
-
camerabin_dispose_elements (camera);
G_OBJECT_CLASS (parent_class)->dispose (object);
@@ -2860,7 +2865,9 @@ gst_camerabin_set_property (GObject * object, guint prop_id,
if (camera->view_finder_caps) {
gst_caps_unref (camera->view_finder_caps);
}
- camera->view_finder_caps = gst_caps_copy (gst_value_get_caps (value));
+ /* just ref, we don't modify it inplace */
+ camera->view_finder_caps =
+ gst_caps_ref ((GstCaps *) gst_value_get_caps (value));
GST_OBJECT_UNLOCK (camera);
if (GST_STATE (camera) != GST_STATE_NULL) {
gst_camerabin_set_capsfilter_caps (camera, camera->view_finder_caps);
@@ -3139,6 +3146,8 @@ gst_camerabin_user_start (GstCameraBin * camera)
if (camera->capturing) {
GST_WARNING_OBJECT (camera, "capturing \"%s\" ongoing, set new filename",
camera->filename->str);
+ /* FIXME: we need to send something more to the app, so that it does not for
+ * for img-done */
g_mutex_unlock (camera->capture_mutex);
return;
}
diff --git a/gst/debugutils/Makefile.am b/gst/debugutils/Makefile.am
index 3b93fa91..60fb794a 100644
--- a/gst/debugutils/Makefile.am
+++ b/gst/debugutils/Makefile.am
@@ -1,10 +1,10 @@
plugin_LTLIBRARIES = libgstdebugutilsbad.la
-libgstdebugutilsbad_la_SOURCES = fpsdisplaysink.c debugutilsbad.c
+libgstdebugutilsbad_la_SOURCES = fpsdisplaysink.c gstcapssetter.c debugutilsbad.c
libgstdebugutilsbad_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
libgstdebugutilsbad_la_LIBADD = $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) -lgstinterfaces-$(GST_MAJORMINOR)
libgstdebugutilsbad_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstdebugutilsbad_la_LIBTOOLFLAGS = --tag=disable-static
-noinst_HEADERS = fpsdisplaysink.h
+noinst_HEADERS = fpsdisplaysink.h gstcapssetter.h
diff --git a/gst/debugutils/debugutilsbad.c b/gst/debugutils/debugutilsbad.c
index b1fb6e05..d1b10263 100644
--- a/gst/debugutils/debugutilsbad.c
+++ b/gst/debugutils/debugutilsbad.c
@@ -24,12 +24,15 @@
#include <gst/gst.h>
GType fps_display_sink_get_type (void);
+GType gst_caps_setter_get_type (void);
static gboolean
plugin_init (GstPlugin * plugin)
{
return gst_element_register (plugin, "fpsdisplaysink", GST_RANK_NONE,
- fps_display_sink_get_type ());
+ fps_display_sink_get_type ()) &&
+ gst_element_register (plugin, "capssetter", GST_RANK_NONE,
+ gst_caps_setter_get_type ());
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
diff --git a/gst/debugutils/gstcapssetter.c b/gst/debugutils/gstcapssetter.c
new file mode 100644
index 00000000..12077c0d
--- /dev/null
+++ b/gst/debugutils/gstcapssetter.c
@@ -0,0 +1,350 @@
+/* GStreamer Element
+ * Copyright (C) 2006-2009 Mark Nauwelaerts <mnauw@users.sourceforge.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1307, USA.
+ */
+
+/**
+ * SECTION:element-capssetter
+ *
+ * <refsect2>
+ * <para>
+ * Sets or merges caps on a stream's buffers.
+ * That is, a buffer's caps are updated using (fields of)
+ * <link linkend="GstCapsSetter--caps">caps</link>. Note that this may
+ * contain multiple structures (though not likely recommended), but each
+ * of these must be fixed (or will otherwise be rejected).
+ * </para>
+ * <para>
+ * If <link linkend="GstCapsSetter--join">join</link>
+ * is TRUE, then the incoming caps' mime-type is compared to the mime-type(s)
+ * of provided caps and only matching structure(s) are considered for updating.
+ * </para>
+ * <para>
+ * If <link linkend="GstCapsSetter--replace">replace</link>
+ * is TRUE, then any caps update is preceded by clearing existing fields,
+ * making provided fields (as a whole) replace incoming ones.
+ * Otherwise, no clearing is performed, in which case provided fields are
+ * added/merged onto incoming caps
+ * </para>
+ * <para>
+ * Although this element might mainly serve as debug helper,
+ * it can also practically be used to correct a faulty pixel-aspect-ratio,
+ * or to modify a yuv fourcc value to effectively swap chroma components or such
+ * alike.
+ * </para>
+ * </refsect2>
+ *
+ */
+
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstcapssetter.h"
+
+#include <string.h>
+
+
+GST_DEBUG_CATEGORY_STATIC (caps_setter_debug);
+#define GST_CAT_DEFAULT caps_setter_debug
+
+
+/* signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_CAPS,
+ PROP_JOIN,
+ PROP_REPLACE
+ /* FILL ME */
+};
+
+#define DEFAULT_JOIN TRUE
+#define DEFAULT_REPLACE FALSE
+
+static GstElementDetails caps_setter_details =
+GST_ELEMENT_DETAILS ("CapsSetter",
+ "Generic",
+ "Set/merge caps on stream",
+ "Mark Nauwelaerts <mnauw@users.sourceforge.net>");
+
+static GstStaticPadTemplate gst_caps_setter_src_template =
+GST_STATIC_PAD_TEMPLATE (GST_BASE_TRANSFORM_SRC_NAME,
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+static GstStaticPadTemplate gst_caps_setter_sink_template =
+GST_STATIC_PAD_TEMPLATE (GST_BASE_TRANSFORM_SINK_NAME,
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS_ANY);
+
+
+static gboolean gst_caps_setter_transform_size (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, guint size,
+ GstCaps * othercaps, guint * othersize);
+static GstCaps *gst_caps_setter_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps);
+static GstFlowReturn gst_caps_setter_transform_ip (GstBaseTransform * btrans,
+ GstBuffer * in);
+
+static void gst_caps_setter_finalize (GObject * object);
+
+static void gst_caps_setter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_caps_setter_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+GST_BOILERPLATE (GstCapsSetter, gst_caps_setter, GstBaseTransform,
+ GST_TYPE_BASE_TRANSFORM);
+
+static void
+gst_caps_setter_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_set_details (element_class, &caps_setter_details);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_setter_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_caps_setter_src_template));
+}
+
+static void
+gst_caps_setter_class_init (GstCapsSetterClass * g_class)
+{
+ GObjectClass *gobject_class;
+ GstBaseTransformClass *trans_class;
+
+ gobject_class = G_OBJECT_CLASS (g_class);
+ trans_class = GST_BASE_TRANSFORM_CLASS (g_class);
+
+ GST_DEBUG_CATEGORY_INIT (caps_setter_debug, "capssetter", 0, "capssetter");
+
+ gobject_class->set_property = gst_caps_setter_set_property;
+ gobject_class->get_property = gst_caps_setter_get_property;
+
+ gobject_class->finalize = GST_DEBUG_FUNCPTR (gst_caps_setter_finalize);
+
+ g_object_class_install_property (gobject_class, PROP_CAPS,
+ g_param_spec_boxed ("caps", "Merge caps",
+ "Merge these caps (thereby overwriting) in the stream",
+ GST_TYPE_CAPS, G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_JOIN,
+ g_param_spec_boolean ("join", "Join",
+ "Match incoming caps' mime-type to mime-type of provided caps",
+ DEFAULT_JOIN, G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_REPLACE,
+ g_param_spec_boolean ("replace", "Replace",
+ "Drop fields of incoming caps", DEFAULT_REPLACE, G_PARAM_READWRITE));
+
+ trans_class->transform_size =
+ GST_DEBUG_FUNCPTR (gst_caps_setter_transform_size);
+ trans_class->transform_caps =
+ GST_DEBUG_FUNCPTR (gst_caps_setter_transform_caps);
+ /* dummy seems needed */
+ trans_class->transform_ip = GST_DEBUG_FUNCPTR (gst_caps_setter_transform_ip);
+}
+
+static void
+gst_caps_setter_init (GstCapsSetter * filter, GstCapsSetterClass * g_class)
+{
+ filter->caps = gst_caps_new_any ();
+ filter->join = DEFAULT_JOIN;
+ filter->replace = DEFAULT_REPLACE;
+}
+
+static void
+gst_caps_setter_finalize (GObject * object)
+{
+ GstCapsSetter *filter = GST_CAPS_SETTER (object);
+
+ gst_caps_replace (&filter->caps, NULL);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static gboolean
+gst_caps_setter_transform_size (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps, guint size,
+ GstCaps * othercaps, guint * othersize)
+{
+ *othersize = size;
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_caps_setter_transform_caps (GstBaseTransform * trans,
+ GstPadDirection direction, GstCaps * caps)
+{
+ GstCapsSetter *filter;
+ GstCaps *ret, *filter_caps;
+ GstStructure *structure, *merge;
+ const gchar *name;
+ gint i, j;
+
+ filter = GST_CAPS_SETTER (trans);
+
+ GST_DEBUG_OBJECT (trans, "receiving caps: %" GST_PTR_FORMAT, caps);
+
+ ret = gst_caps_copy (caps);
+
+ /* this function is always called with a simple caps */
+ if (!GST_CAPS_IS_SIMPLE (ret) || direction != GST_PAD_SINK)
+ return ret;
+
+ structure = gst_caps_get_structure (ret, 0);
+ name = gst_structure_get_name (structure);
+
+ GST_OBJECT_LOCK (filter);
+ filter_caps = gst_caps_ref (filter->caps);
+ GST_OBJECT_UNLOCK (filter);
+
+ for (i = 0; i < gst_caps_get_size (filter_caps); ++i) {
+ merge = gst_caps_get_structure (filter_caps, i);
+ if (gst_structure_has_name (merge, name) || !filter->join) {
+
+ if (!filter->join)
+ gst_structure_set_name (structure, gst_structure_get_name (merge));
+
+ if (filter->replace)
+ gst_structure_remove_all_fields (structure);
+
+ for (j = 0; j < gst_structure_n_fields (merge); ++j) {
+ const gchar *fname;
+
+ fname = gst_structure_nth_field_name (merge, j);
+ gst_structure_set_value (structure, fname,
+ gst_structure_get_value (merge, fname));
+ }
+ }
+ }
+
+ GST_DEBUG_OBJECT (trans, "returning caps: %" GST_PTR_FORMAT, ret);
+
+ gst_caps_unref (filter_caps);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_caps_setter_transform_ip (GstBaseTransform * btrans, GstBuffer * in)
+{
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_caps_is_fixed_foreach (GQuark field_id, const GValue * value,
+ gpointer unused)
+{
+ return gst_value_is_fixed (value);
+}
+
+static void
+gst_caps_setter_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstCapsSetter *filter;
+
+ g_return_if_fail (GST_IS_CAPS_SETTER (object));
+ filter = GST_CAPS_SETTER (object);
+
+ switch (prop_id) {
+ case PROP_CAPS:{
+ GstCaps *new_caps;
+ const GstCaps *new_caps_val = gst_value_get_caps (value);
+ gint i;
+
+ if (new_caps_val == NULL) {
+ new_caps = gst_caps_new_any ();
+ } else {
+ new_caps = gst_caps_copy (new_caps_val);
+ }
+
+ for (i = 0; new_caps && (i < gst_caps_get_size (new_caps)); ++i) {
+ GstStructure *s;
+
+ s = gst_caps_get_structure (new_caps, i);
+ if (!gst_structure_foreach (s, gst_caps_is_fixed_foreach, NULL)) {
+ GST_ERROR_OBJECT (filter, "rejected unfixed caps: %" GST_PTR_FORMAT,
+ new_caps);
+ gst_caps_unref (new_caps);
+ new_caps = NULL;
+ break;
+ }
+ }
+
+ if (new_caps) {
+ GST_OBJECT_LOCK (filter);
+ gst_caps_replace (&filter->caps, new_caps);
+ /* drop extra ref */
+ gst_caps_unref (new_caps);
+ GST_OBJECT_UNLOCK (filter);
+
+ GST_DEBUG_OBJECT (filter, "set new caps %" GST_PTR_FORMAT, new_caps);
+ }
+
+ /* try to activate these new caps next time around */
+ gst_base_transform_reconfigure (GST_BASE_TRANSFORM (filter));
+ break;
+ }
+ case PROP_JOIN:
+ filter->join = g_value_get_boolean (value);
+ break;
+ case PROP_REPLACE:
+ filter->replace = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_caps_setter_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstCapsSetter *filter;
+
+ g_return_if_fail (GST_IS_CAPS_SETTER (object));
+ filter = GST_CAPS_SETTER (object);
+
+ switch (prop_id) {
+ case PROP_CAPS:
+ gst_value_set_caps (value, filter->caps);
+ break;
+ case PROP_JOIN:
+ g_value_set_boolean (value, filter->join);
+ break;
+ case PROP_REPLACE:
+ g_value_set_boolean (value, filter->replace);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/gst/debugutils/gstcapssetter.h b/gst/debugutils/gstcapssetter.h
new file mode 100644
index 00000000..e792931c
--- /dev/null
+++ b/gst/debugutils/gstcapssetter.h
@@ -0,0 +1,62 @@
+/* GStreamer Element
+ * Copyright (C) 2006-2009 Mark Nauwelaerts <mnauw@users.sourceforge.net>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
+ * Boston, MA 02110-1307, USA.
+ */
+
+
+#ifndef __GST_CAPS_SETTER_H__
+#define __GST_CAPS_SETTER_H__
+
+#include <gst/base/gstbasetransform.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_CAPS_SETTER \
+ (gst_caps_setter_get_type())
+#define GST_CAPS_SETTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_CAPS_SETTER,GstCapsSetter))
+#define GST_CAPS_SETTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_CAPS_SETTER,GstCapsSetterClass))
+#define GST_IS_CAPS_SETTER(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_CAPS_SETTER))
+#define GST_IS_CAPS_SETTER_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_CAPS_SETTER))
+
+GType gst_caps_setter_get_type (void);
+
+typedef struct _GstCapsSetter GstCapsSetter;
+typedef struct _GstCapsSetterClass GstCapsSetterClass;
+
+struct _GstCapsSetter
+{
+ GstBaseTransform parent;
+
+ /* properties */
+ GstCaps *caps;
+ gboolean join;
+ gboolean replace;
+};
+
+
+struct _GstCapsSetterClass
+{
+ GstBaseTransformClass parent_class;
+};
+
+G_END_DECLS
+
+#endif /* __GST_CAPS_SETTER_H__ */
diff --git a/gst/freeze/Makefile.am b/gst/freeze/Makefile.am
index 525e476e..9d07aa84 100644
--- a/gst/freeze/Makefile.am
+++ b/gst/freeze/Makefile.am
@@ -4,6 +4,7 @@ libgstfreeze_la_SOURCES = gstfreeze.c
libgstfreeze_la_CFLAGS = $(GST_CFLAGS)
libgstfreeze_la_LIBADD = ${GST_LIBS}
libgstfreeze_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstfreeze_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstfreeze.h
diff --git a/gst/frei0r/gstfrei0r.c b/gst/frei0r/gstfrei0r.c
index 0f7ba5c5..3cfc939b 100644
--- a/gst/frei0r/gstfrei0r.c
+++ b/gst/frei0r/gstfrei0r.c
@@ -86,51 +86,71 @@ gst_frei0r_klass_install_properties (GObjectClass * gobject_class,
case F0R_PARAM_BOOL:
g_object_class_install_property (gobject_class, count++,
g_param_spec_boolean (prop_name, param_info->name,
- param_info->explanation, FALSE,
+ param_info->explanation, properties[i].default_value.data.b,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
properties[i].n_prop_ids = 1;
break;
- case F0R_PARAM_DOUBLE:
+ case F0R_PARAM_DOUBLE:{
+ gdouble def = properties[i].default_value.data.d;
+
+ /* If the default is NAN, +-INF we use 0.0 */
+ if (!(def <= G_MAXDOUBLE && def >= -G_MAXDOUBLE))
+ def = 0.0;
+
g_object_class_install_property (gobject_class, count++,
g_param_spec_double (prop_name, param_info->name,
- param_info->explanation, -G_MAXDOUBLE, G_MAXDOUBLE, 0.0,
+ param_info->explanation, -G_MAXDOUBLE, G_MAXDOUBLE, def,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
properties[i].n_prop_ids = 1;
break;
+ }
case F0R_PARAM_STRING:
g_object_class_install_property (gobject_class, count++,
g_param_spec_string (prop_name, param_info->name,
- param_info->explanation, NULL,
+ param_info->explanation, properties[i].default_value.data.s,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
properties[i].n_prop_ids = 1;
break;
case F0R_PARAM_COLOR:{
gchar *prop_name_full;
gchar *prop_nick_full;
+ gdouble def;
+ def = properties[i].default_value.data.color.r;
+ /* If the default is out of range we use 0.0 */
+ if (!(def <= 1.0 && def >= 0.0))
+ def = 0.0;
prop_name_full = g_strconcat (prop_name, "-r", NULL);
prop_nick_full = g_strconcat (param_info->name, "-R", NULL);
g_object_class_install_property (gobject_class, count++,
g_param_spec_float (prop_name_full, prop_nick_full,
- param_info->explanation, 0.0, 1.0, 0.0,
+ param_info->explanation, 0.0, 1.0, def,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
g_free (prop_name_full);
g_free (prop_nick_full);
+ def = properties[i].default_value.data.color.g;
+ /* If the default is out of range we use 0.0 */
+ if (!(def <= 1.0 && def >= 0.0))
+ def = 0.0;
prop_name_full = g_strconcat (prop_name, "-g", NULL);
prop_nick_full = g_strconcat (param_info->name, "-G", NULL);
g_object_class_install_property (gobject_class, count++,
g_param_spec_float (prop_name_full, param_info->name,
- param_info->explanation, 0.0, 1.0, 0.0,
+ param_info->explanation, 0.0, 1.0, def,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
g_free (prop_name_full);
g_free (prop_nick_full);
+ def = properties[i].default_value.data.color.b;
+ /* If the default is out of range we use 0.0 */
+ if (!(def <= 1.0 && def >= 0.0))
+ def = 0.0;
prop_name_full = g_strconcat (prop_name, "-b", NULL);
prop_nick_full = g_strconcat (param_info->name, "-B", NULL);
g_object_class_install_property (gobject_class, count++,
g_param_spec_float (prop_name_full, param_info->name,
- param_info->explanation, 0.0, 1.0, 0.0,
+ param_info->explanation, 0.0, 1.0, def,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
g_free (prop_name_full);
g_free (prop_nick_full);
@@ -141,21 +161,30 @@ gst_frei0r_klass_install_properties (GObjectClass * gobject_class,
case F0R_PARAM_POSITION:{
gchar *prop_name_full;
gchar *prop_nick_full;
+ gdouble def;
+ def = properties[i].default_value.data.position.x;
+ /* If the default is out of range we use 0.0 */
+ if (!(def <= 1.0 && def >= 0.0))
+ def = 0.0;
prop_name_full = g_strconcat (prop_name, "-x", NULL);
prop_nick_full = g_strconcat (param_info->name, "-X", NULL);
g_object_class_install_property (gobject_class, count++,
g_param_spec_double (prop_name_full, param_info->name,
- param_info->explanation, 0.0, 1.0, 0.0,
+ param_info->explanation, 0.0, 1.0, def,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
g_free (prop_name_full);
g_free (prop_nick_full);
+ def = properties[i].default_value.data.position.y;
+ /* If the default is out of range we use 0.0 */
+ if (!(def <= 1.0 && def >= 0.0))
+ def = 0.0;
prop_name_full = g_strconcat (prop_name, "-Y", NULL);
prop_nick_full = g_strconcat (param_info->name, "-X", NULL);
g_object_class_install_property (gobject_class, count++,
g_param_spec_double (prop_name_full, param_info->name,
- param_info->explanation, 0.0, 1.0, 0.0,
+ param_info->explanation, 0.0, 1.0, def,
G_PARAM_READWRITE | GST_PARAM_CONTROLLABLE));
g_free (prop_name_full);
g_free (prop_nick_full);
diff --git a/gst/frei0r/gstfrei0rfilter.c b/gst/frei0r/gstfrei0rfilter.c
index 43d8fc67..eda78adf 100644
--- a/gst/frei0r/gstfrei0rfilter.c
+++ b/gst/frei0r/gstfrei0rfilter.c
@@ -40,22 +40,11 @@ gst_frei0r_filter_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps)
{
GstFrei0rFilter *self = GST_FREI0R_FILTER (trans);
- GstFrei0rFilterClass *klass = GST_FREI0R_FILTER_GET_CLASS (trans);
GstVideoFormat fmt;
- gint width, height;
- if (!gst_video_format_parse_caps (incaps, &fmt, &width, &height))
+ if (!gst_video_format_parse_caps (incaps, &fmt, &self->width, &self->height))
return FALSE;
- if (self->f0r_instance) {
- klass->ftable->destruct (self->f0r_instance);
- self->f0r_instance = NULL;
- }
-
- self->f0r_instance =
- gst_frei0r_instance_construct (klass->ftable, klass->properties,
- klass->n_properties, self->property_cache, width, height);
-
return TRUE;
}
@@ -70,6 +59,8 @@ gst_frei0r_filter_stop (GstBaseTransform * trans)
self->f0r_instance = NULL;
}
+ self->width = self->height = 0;
+
return TRUE;
}
@@ -81,9 +72,17 @@ gst_frei0r_filter_transform (GstBaseTransform * trans, GstBuffer * inbuf,
GstFrei0rFilterClass *klass = GST_FREI0R_FILTER_GET_CLASS (trans);
gdouble time;
- if (!self->f0r_instance)
+ if (G_UNLIKELY (self->width <= 0 || self->height <= 0))
return GST_FLOW_NOT_NEGOTIATED;
+ if (G_UNLIKELY (!self->f0r_instance)) {
+ self->f0r_instance =
+ gst_frei0r_instance_construct (klass->ftable, klass->properties,
+ klass->n_properties, self->property_cache, self->width, self->height);
+ if (G_UNLIKELY (!self->f0r_instance))
+ return GST_FLOW_ERROR;
+ }
+
time = ((gdouble) GST_BUFFER_TIMESTAMP (inbuf)) / GST_SECOND;
if (klass->ftable->update2)
@@ -195,6 +194,8 @@ gst_frei0r_filter_init (GstFrei0rFilter * self, GstFrei0rFilterClass * klass)
{
self->property_cache =
gst_frei0r_property_cache_init (klass->properties, klass->n_properties);
+ gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SINK_PAD (self));
+ gst_pad_use_fixed_caps (GST_BASE_TRANSFORM_SRC_PAD (self));
}
gboolean
diff --git a/gst/frei0r/gstfrei0rfilter.h b/gst/frei0r/gstfrei0rfilter.h
index ded2172b..b85c3f6e 100644
--- a/gst/frei0r/gstfrei0rfilter.h
+++ b/gst/frei0r/gstfrei0rfilter.h
@@ -42,6 +42,8 @@ typedef struct _GstFrei0rFilterClass GstFrei0rFilterClass;
struct _GstFrei0rFilter {
GstVideoFilter parent;
+ gint width, height;
+
f0r_instance_t *f0r_instance;
GstFrei0rPropertyValue *property_cache;
};
diff --git a/gst/frei0r/gstfrei0rmixer.c b/gst/frei0r/gstfrei0rmixer.c
index 745c330e..8f3ee4ba 100644
--- a/gst/frei0r/gstfrei0rmixer.c
+++ b/gst/frei0r/gstfrei0rmixer.c
@@ -45,8 +45,16 @@ gst_frei0r_mixer_reset (GstFrei0rMixer * self)
self->f0r_instance = NULL;
}
+ if (self->property_cache)
+ gst_frei0r_property_cache_free (klass->properties, self->property_cache,
+ klass->n_properties);
+ self->property_cache = NULL;
+
gst_caps_replace (&self->caps, NULL);
gst_event_replace (&self->newseg_event, NULL);
+
+ self->fmt = GST_VIDEO_FORMAT_UNKNOWN;
+ self->width = self->height = 0;
}
static void
@@ -195,7 +203,6 @@ static gboolean
gst_frei0r_mixer_set_caps (GstPad * pad, GstCaps * caps)
{
GstFrei0rMixer *self = GST_FREI0R_MIXER (gst_pad_get_parent (pad));
- GstFrei0rMixerClass *klass = GST_FREI0R_MIXER_GET_CLASS (self);
gboolean ret = TRUE;
gst_caps_replace (&self->caps, caps);
@@ -215,16 +222,6 @@ gst_frei0r_mixer_set_caps (GstPad * pad, GstCaps * caps)
ret = FALSE;
goto out;
}
-
- if (self->f0r_instance) {
- klass->ftable->destruct (self->f0r_instance);
- self->f0r_instance = NULL;
- }
-
- self->f0r_instance =
- gst_frei0r_instance_construct (klass->ftable, klass->properties,
- klass->n_properties, self->property_cache, self->width, self->height);
-
}
out:
@@ -543,9 +540,17 @@ gst_frei0r_mixer_collected (GstCollectPads * pads, GstFrei0rMixer * self)
GstFrei0rMixerClass *klass = GST_FREI0R_MIXER_GET_CLASS (self);
gdouble time;
- if (G_UNLIKELY (!self->f0r_instance))
+ if (G_UNLIKELY (self->width <= 0 || self->height <= 0))
return GST_FLOW_NOT_NEGOTIATED;
+ if (G_UNLIKELY (!self->f0r_instance)) {
+ self->f0r_instance =
+ gst_frei0r_instance_construct (klass->ftable, klass->properties,
+ klass->n_properties, self->property_cache, self->width, self->height);
+ if (G_UNLIKELY (!self->f0r_instance))
+ return GST_FLOW_ERROR;
+ }
+
if (self->newseg_event) {
gst_pad_push_event (self->src, self->newseg_event);
self->newseg_event = NULL;
diff --git a/gst/frei0r/gstfrei0rsrc.c b/gst/frei0r/gstfrei0rsrc.c
index a713e1e9..2d637f95 100644
--- a/gst/frei0r/gstfrei0rsrc.c
+++ b/gst/frei0r/gstfrei0rsrc.c
@@ -39,22 +39,12 @@ static gboolean
gst_frei0r_src_set_caps (GstBaseSrc * src, GstCaps * caps)
{
GstFrei0rSrc *self = GST_FREI0R_SRC (src);
- GstFrei0rSrcClass *klass = GST_FREI0R_SRC_GET_CLASS (src);
if (!gst_video_format_parse_caps (caps, &self->fmt, &self->width,
&self->height)
|| !gst_video_parse_caps_framerate (caps, &self->fps_n, &self->fps_d))
return FALSE;
- if (self->f0r_instance) {
- klass->ftable->destruct (self->f0r_instance);
- self->f0r_instance = NULL;
- }
-
- self->f0r_instance =
- gst_frei0r_instance_construct (klass->ftable, klass->properties,
- klass->n_properties, self->property_cache, self->width, self->height);
-
return TRUE;
}
@@ -80,9 +70,18 @@ gst_frei0r_src_create (GstPushSrc * src, GstBuffer ** buf)
*buf = NULL;
- if (G_UNLIKELY (!self->f0r_instance))
+ if (G_UNLIKELY (self->width <= 0 || self->height <= 0))
return GST_FLOW_NOT_NEGOTIATED;
+ if (G_UNLIKELY (!self->f0r_instance)) {
+ self->f0r_instance =
+ gst_frei0r_instance_construct (klass->ftable, klass->properties,
+ klass->n_properties, self->property_cache, self->width, self->height);
+
+ if (G_UNLIKELY (!self->f0r_instance))
+ return GST_FLOW_ERROR;
+ }
+
newsize = gst_video_format_get_size (self->fmt, self->width, self->height);
ret =
@@ -147,6 +146,11 @@ gst_frei0r_src_stop (GstBaseSrc * basesrc)
self->f0r_instance = NULL;
}
+ self->fmt = GST_VIDEO_FORMAT_UNKNOWN;
+ self->width = self->height = 0;
+ self->fps_n = self->fps_d = 0;
+ self->n_frames = 0;
+
return TRUE;
}
diff --git a/gst/mpegdemux/mpegtspacketizer.c b/gst/mpegdemux/mpegtspacketizer.c
index b986fa60..c9fc325f 100644
--- a/gst/mpegdemux/mpegtspacketizer.c
+++ b/gst/mpegdemux/mpegtspacketizer.c
@@ -1264,17 +1264,16 @@ mpegts_packetizer_parse_sdt (MpegTSPacketizer * packetizer,
service_id = GST_READ_UINT16_BE (data);
data += 2;
- /* reserved */
- data += 1;
+ EIT_schedule = ((*data & 0x02) == 2);
+ EIT_present_following = (*data & 0x01) == 1;
+ data += 1;
tmp = GST_READ_UINT16_BE (data);
- data += 2;
- EIT_schedule = (tmp >> 15);
- EIT_present_following = (tmp >> 14) & 0x01;
- running_status = (tmp >> 5) & 0x03;
- scrambled = (tmp >> 4) & 0x01;
+ running_status = (*data >> 5) & 0x07;
+ scrambled = (*data >> 4) & 0x01;
descriptors_loop_length = tmp & 0x0FFF;
+ data += 2;
/* TODO send tag event down relevant pad for channel name and provider */
service_name = g_strdup_printf ("service-%d", service_id);
@@ -1307,7 +1306,26 @@ mpegts_packetizer_parse_sdt (MpegTSPacketizer * packetizer,
(gchar *) DESC_DVB_SERVICE_name_text (service_descriptor);
if (servicename_length + serviceprovider_name_length + 2 <=
DESC_LENGTH (service_descriptor)) {
-
+ gchar *running_status_tmp;
+ switch (running_status) {
+ case 0:
+ running_status_tmp = "undefined";
+ break;
+ case 1:
+ running_status_tmp = "not running";
+ break;
+ case 2:
+ running_status_tmp = "starts in a few seconds";
+ break;
+ case 3:
+ running_status_tmp = "pausing";
+ break;
+ case 4:
+ running_status_tmp = "running";
+ break;
+ default:
+ running_status_tmp = "reserved";
+ }
servicename_tmp =
get_encoding_and_convert (servicename, servicename_length);
serviceprovider_name_tmp =
@@ -1317,7 +1335,8 @@ mpegts_packetizer_parse_sdt (MpegTSPacketizer * packetizer,
gst_structure_set (service,
"name", G_TYPE_STRING, servicename_tmp,
"provider-name", G_TYPE_STRING, serviceprovider_name_tmp,
- "scrambled", G_TYPE_BOOLEAN, scrambled, NULL);
+ "scrambled", G_TYPE_BOOLEAN, scrambled,
+ "running-status", G_TYPE_STRING, running_status_tmp, NULL);
g_free (servicename_tmp);
g_free (serviceprovider_name_tmp);
diff --git a/gst/rtpmanager/Makefile.am b/gst/rtpmanager/Makefile.am
index 2d53d63e..8080f303 100644
--- a/gst/rtpmanager/Makefile.am
+++ b/gst/rtpmanager/Makefile.am
@@ -12,7 +12,6 @@ BUILT_SOURCES = $(built_sources) $(built_headers)
libgstrtpmanager_la_SOURCES = gstrtpmanager.c \
gstrtpbin.c \
- gstrtpclient.c \
gstrtpjitterbuffer.c \
gstrtpptdemux.c \
gstrtpssrcdemux.c \
@@ -26,7 +25,6 @@ nodist_libgstrtpmanager_la_SOURCES = \
$(built_sources)
noinst_HEADERS = gstrtpbin.h \
- gstrtpclient.h \
gstrtpjitterbuffer.h \
gstrtpptdemux.h \
gstrtpssrcdemux.h \
diff --git a/gst/rtpmanager/gstrtpbin.c b/gst/rtpmanager/gstrtpbin.c
index 482cf017..c09b0ab9 100644
--- a/gst/rtpmanager/gstrtpbin.c
+++ b/gst/rtpmanager/gstrtpbin.c
@@ -238,26 +238,14 @@ enum
};
#define DEFAULT_LATENCY_MS 200
-#define DEFAULT_SDES_CNAME NULL
-#define DEFAULT_SDES_NAME NULL
-#define DEFAULT_SDES_EMAIL NULL
-#define DEFAULT_SDES_PHONE NULL
-#define DEFAULT_SDES_LOCATION NULL
-#define DEFAULT_SDES_TOOL NULL
-#define DEFAULT_SDES_NOTE NULL
+#define DEFAULT_SDES NULL
#define DEFAULT_DO_LOST FALSE
enum
{
PROP_0,
PROP_LATENCY,
- PROP_SDES_CNAME,
- PROP_SDES_NAME,
- PROP_SDES_EMAIL,
- PROP_SDES_PHONE,
- PROP_SDES_LOCATION,
- PROP_SDES_TOOL,
- PROP_SDES_NOTE,
+ PROP_SDES,
PROP_DO_LOST,
PROP_LAST
};
@@ -271,10 +259,6 @@ static guint gst_rtp_bin_signals[LAST_SIGNAL] = { 0 };
static GstCaps *pt_map_requested (GstElement * element, guint pt,
GstRtpBinSession * session);
-static const gchar *sdes_type_to_name (GstRTCPSDESType type);
-static void gst_rtp_bin_set_sdes_string (GstRtpBin * bin,
- GstRTCPSDESType type, const gchar * data);
-
static void free_stream (GstRtpBinStream * stream);
/* Manages the RTP stream for one SSRC.
@@ -304,10 +288,9 @@ struct _GstRtpBinStream
/* the PT demuxer of the SSRC */
GstElement *demux;
gulong demux_newpad_sig;
+ gulong demux_padremoved_sig;
gulong demux_ptreq_sig;
gulong demux_pt_change_sig;
- /* ghostpads from the ptdemuxer */
- GSList *pads;
/* if we have calculated a valid unix_delta for this stream */
gboolean have_sync;
@@ -515,7 +498,6 @@ create_session (GstRtpBin * rtpbin, gint id)
{
GstRtpBinSession *sess;
GstElement *session, *demux;
- gint i;
GstState target;
if (!(session = gst_element_factory_make ("gstrtpsession", NULL)))
@@ -538,9 +520,7 @@ create_session (GstRtpBin * rtpbin, gint id)
g_object_set (session, "ntp-ns-base", rtpbin->priv->ntp_ns_base, NULL);
/* configure SDES items */
GST_OBJECT_LOCK (rtpbin);
- for (i = GST_RTCP_SDES_CNAME; i < GST_RTCP_SDES_PRIV; i++) {
- g_object_set (session, sdes_type_to_name (i), rtpbin->sdes[i], NULL);
- }
+ g_object_set (session, "sdes", rtpbin->sdes, NULL);
GST_OBJECT_UNLOCK (rtpbin);
/* provide clock_rate to the session manager when needed */
@@ -1171,7 +1151,6 @@ static void
free_stream (GstRtpBinStream * stream)
{
GstRtpBinSession *session;
- GSList *walk;
session = stream->session;
@@ -1184,17 +1163,13 @@ free_stream (GstRtpBinStream * stream)
gst_element_set_state (stream->demux, GST_STATE_NULL);
gst_element_set_state (stream->buffer, GST_STATE_NULL);
+ /* now remove this signal, we need this while going to NULL because it to
+ * do some cleanups */
+ g_signal_handler_disconnect (stream->demux, stream->demux_padremoved_sig);
+
gst_bin_remove (GST_BIN_CAST (session->bin), stream->buffer);
gst_bin_remove (GST_BIN_CAST (session->bin), stream->demux);
- for (walk = stream->pads; walk; walk = g_slist_next (walk)) {
- GstPad *gpad = GST_PAD_CAST (walk->data);
-
- gst_pad_set_active (gpad, FALSE);
- gst_element_remove_pad (GST_ELEMENT_CAST (session->bin), gpad);
- }
- g_slist_free (stream->pads);
-
g_free (stream);
}
@@ -1448,40 +1423,10 @@ gst_rtp_bin_class_init (GstRtpBinClass * klass)
NULL, NULL, gst_rtp_bin_marshal_VOID__UINT_UINT, G_TYPE_NONE, 2,
G_TYPE_UINT, G_TYPE_UINT);
- g_object_class_install_property (gobject_class, PROP_SDES_CNAME,
- g_param_spec_string ("sdes-cname", "SDES CNAME",
- "The CNAME to put in SDES messages of this session",
- DEFAULT_SDES_CNAME, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_NAME,
- g_param_spec_string ("sdes-name", "SDES NAME",
- "The NAME to put in SDES messages of this session",
- DEFAULT_SDES_NAME, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_EMAIL,
- g_param_spec_string ("sdes-email", "SDES EMAIL",
- "The EMAIL to put in SDES messages of this session",
- DEFAULT_SDES_EMAIL, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_PHONE,
- g_param_spec_string ("sdes-phone", "SDES PHONE",
- "The PHONE to put in SDES messages of this session",
- DEFAULT_SDES_PHONE, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_LOCATION,
- g_param_spec_string ("sdes-location", "SDES LOCATION",
- "The LOCATION to put in SDES messages of this session",
- DEFAULT_SDES_LOCATION, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_TOOL,
- g_param_spec_string ("sdes-tool", "SDES TOOL",
- "The TOOL to put in SDES messages of this session",
- DEFAULT_SDES_TOOL, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_NOTE,
- g_param_spec_string ("sdes-note", "SDES NOTE",
- "The NOTE to put in SDES messages of this session",
- DEFAULT_SDES_NOTE, G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, PROP_DO_LOST,
g_param_spec_boolean ("do-lost", "Do Lost",
@@ -1517,11 +1462,11 @@ gst_rtp_bin_init (GstRtpBin * rtpbin, GstRtpBinClass * klass)
/* some default SDES entries */
str = g_strdup_printf ("%s@%s", g_get_user_name (), g_get_host_name ());
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_CNAME, str);
+ rtpbin->sdes = gst_structure_new ("application/x-rtp-source-sdes",
+ "cname", G_TYPE_STRING, str,
+ "name", G_TYPE_STRING, g_get_real_name (),
+ "tool", G_TYPE_STRING, "GStreamer", NULL);
g_free (str);
-
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_NAME, g_get_real_name ());
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_TOOL, "GStreamer");
}
static void
@@ -1547,12 +1492,11 @@ static void
gst_rtp_bin_finalize (GObject * object)
{
GstRtpBin *rtpbin;
- gint i;
rtpbin = GST_RTP_BIN (object);
- for (i = 0; i < 9; i++)
- g_free (rtpbin->sdes[i]);
+ if (rtpbin->sdes)
+ gst_structure_free (rtpbin->sdes);
g_mutex_free (rtpbin->priv->bin_lock);
g_mutex_free (rtpbin->priv->dyn_lock);
@@ -1560,77 +1504,37 @@ gst_rtp_bin_finalize (GObject * object)
G_OBJECT_CLASS (parent_class)->finalize (object);
}
-static const gchar *
-sdes_type_to_name (GstRTCPSDESType type)
-{
- const gchar *result;
-
- switch (type) {
- case GST_RTCP_SDES_CNAME:
- result = "sdes-cname";
- break;
- case GST_RTCP_SDES_NAME:
- result = "sdes-name";
- break;
- case GST_RTCP_SDES_EMAIL:
- result = "sdes-email";
- break;
- case GST_RTCP_SDES_PHONE:
- result = "sdes-phone";
- break;
- case GST_RTCP_SDES_LOC:
- result = "sdes-location";
- break;
- case GST_RTCP_SDES_TOOL:
- result = "sdes-tool";
- break;
- case GST_RTCP_SDES_NOTE:
- result = "sdes-note";
- break;
- case GST_RTCP_SDES_PRIV:
- result = "sdes-priv";
- break;
- default:
- result = NULL;
- break;
- }
- return result;
-}
static void
-gst_rtp_bin_set_sdes_string (GstRtpBin * bin, GstRTCPSDESType type,
- const gchar * data)
+gst_rtp_bin_set_sdes_struct (GstRtpBin * bin, const GstStructure * sdes)
{
GSList *item;
- const gchar *name;
- if (type < 0 || type > 8)
+ if (sdes == NULL)
return;
GST_RTP_BIN_LOCK (bin);
GST_OBJECT_LOCK (bin);
- g_free (bin->sdes[type]);
- bin->sdes[type] = g_strdup (data);
- name = sdes_type_to_name (type);
+ if (bin->sdes)
+ gst_structure_free (bin->sdes);
+ bin->sdes = gst_structure_copy (sdes);
+
/* store in all sessions */
for (item = bin->sessions; item; item = g_slist_next (item))
- g_object_set (item->data, name, bin->sdes[type], NULL);
+ g_object_set (item->data, "sdes", sdes, NULL);
GST_OBJECT_UNLOCK (bin);
GST_RTP_BIN_UNLOCK (bin);
}
-static gchar *
-gst_rtp_bin_get_sdes_string (GstRtpBin * bin, GstRTCPSDESType type)
+static GstStructure *
+gst_rtp_bin_get_sdes_struct (GstRtpBin * bin)
{
- gchar *result;
-
- if (type < 0 || type > 8)
- return NULL;
+ GstStructure *result;
GST_OBJECT_LOCK (bin);
- result = g_strdup (bin->sdes[type]);
+ result = gst_structure_copy (bin->sdes);
GST_OBJECT_UNLOCK (bin);
return result;
@@ -1652,33 +1556,8 @@ gst_rtp_bin_set_property (GObject * object, guint prop_id,
/* propegate the property down to the jitterbuffer */
gst_rtp_bin_propagate_property_to_jitterbuffer (rtpbin, "latency", value);
break;
- case PROP_SDES_CNAME:
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_CNAME,
- g_value_get_string (value));
- break;
- case PROP_SDES_NAME:
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_NAME,
- g_value_get_string (value));
- break;
- case PROP_SDES_EMAIL:
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_EMAIL,
- g_value_get_string (value));
- break;
- case PROP_SDES_PHONE:
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_PHONE,
- g_value_get_string (value));
- break;
- case PROP_SDES_LOCATION:
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_LOC,
- g_value_get_string (value));
- break;
- case PROP_SDES_TOOL:
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_TOOL,
- g_value_get_string (value));
- break;
- case PROP_SDES_NOTE:
- gst_rtp_bin_set_sdes_string (rtpbin, GST_RTCP_SDES_NOTE,
- g_value_get_string (value));
+ case PROP_SDES:
+ gst_rtp_bin_set_sdes_struct (rtpbin, g_value_get_boxed (value));
break;
case PROP_DO_LOST:
GST_RTP_BIN_LOCK (rtpbin);
@@ -1706,33 +1585,8 @@ gst_rtp_bin_get_property (GObject * object, guint prop_id,
g_value_set_uint (value, rtpbin->latency);
GST_RTP_BIN_UNLOCK (rtpbin);
break;
- case PROP_SDES_CNAME:
- g_value_take_string (value, gst_rtp_bin_get_sdes_string (rtpbin,
- GST_RTCP_SDES_CNAME));
- break;
- case PROP_SDES_NAME:
- g_value_take_string (value, gst_rtp_bin_get_sdes_string (rtpbin,
- GST_RTCP_SDES_NAME));
- break;
- case PROP_SDES_EMAIL:
- g_value_take_string (value, gst_rtp_bin_get_sdes_string (rtpbin,
- GST_RTCP_SDES_EMAIL));
- break;
- case PROP_SDES_PHONE:
- g_value_take_string (value, gst_rtp_bin_get_sdes_string (rtpbin,
- GST_RTCP_SDES_PHONE));
- break;
- case PROP_SDES_LOCATION:
- g_value_take_string (value, gst_rtp_bin_get_sdes_string (rtpbin,
- GST_RTCP_SDES_LOC));
- break;
- case PROP_SDES_TOOL:
- g_value_take_string (value, gst_rtp_bin_get_sdes_string (rtpbin,
- GST_RTCP_SDES_TOOL));
- break;
- case PROP_SDES_NOTE:
- g_value_take_string (value, gst_rtp_bin_get_sdes_string (rtpbin,
- GST_RTCP_SDES_NOTE));
+ case PROP_SDES:
+ g_value_take_boxed (value, gst_rtp_bin_get_sdes_struct (rtpbin));
break;
case PROP_DO_LOST:
GST_RTP_BIN_LOCK (rtpbin);
@@ -1884,13 +1738,11 @@ new_payload_found (GstElement * element, guint pt, GstPad * pad,
stream->session->id, stream->ssrc, pt);
gpad = gst_ghost_pad_new_from_template (padname, pad, templ);
g_free (padname);
+ g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", gpad);
gst_pad_set_caps (gpad, GST_PAD_CAPS (pad));
gst_pad_set_active (gpad, TRUE);
gst_element_add_pad (GST_ELEMENT_CAST (rtpbin), gpad);
-
- stream->pads = g_slist_prepend (stream->pads, gpad);
-
GST_RTP_BIN_SHUTDOWN_UNLOCK (rtpbin);
return;
@@ -1902,6 +1754,27 @@ shutdown:
}
}
+static void
+payload_pad_removed (GstElement * element, GstPad * pad,
+ GstRtpBinStream * stream)
+{
+ GstRtpBin *rtpbin;
+ GstPad *gpad;
+
+ rtpbin = stream->bin;
+
+ GST_DEBUG ("payload pad removed");
+
+ GST_RTP_BIN_DYN_LOCK (rtpbin);
+ if ((gpad = g_object_get_data (G_OBJECT (pad), "GstRTPBin.ghostpad"))) {
+ g_object_set_data (G_OBJECT (pad), "GstRTPBin.ghostpad", NULL);
+
+ gst_pad_set_active (gpad, FALSE);
+ gst_element_remove_pad (GST_ELEMENT_CAST (rtpbin), gpad);
+ }
+ GST_RTP_BIN_DYN_UNLOCK (rtpbin);
+}
+
static GstCaps *
pt_map_requested (GstElement * element, guint pt, GstRtpBinSession * session)
{
@@ -2009,6 +1882,9 @@ new_ssrc_pad_found (GstElement * element, guint ssrc, GstPad * pad,
* new pad by ghosting it. */
stream->demux_newpad_sig = g_signal_connect (stream->demux,
"new-payload-type", (GCallback) new_payload_found, stream);
+ stream->demux_padremoved_sig = g_signal_connect (stream->demux,
+ "pad-removed", (GCallback) payload_pad_removed, stream);
+
/* connect to the request-pt-map signal. This signal will be emited by the
* demuxer so that it can apply a proper caps on the buffers for the
* depayloaders. */
diff --git a/gst/rtpmanager/gstrtpbin.h b/gst/rtpmanager/gstrtpbin.h
index f47048e5..bed6ad02 100644
--- a/gst/rtpmanager/gstrtpbin.h
+++ b/gst/rtpmanager/gstrtpbin.h
@@ -53,7 +53,7 @@ struct _GstRtpBin {
GSList *clients;
/* the default SDES items for sessions */
- gchar *sdes[9];
+ GstStructure *sdes;
/*< private >*/
GstRtpBinPrivate *priv;
diff --git a/gst/rtpmanager/gstrtpclient.c b/gst/rtpmanager/gstrtpclient.c
deleted file mode 100644
index 2fccbfd7..00000000
--- a/gst/rtpmanager/gstrtpclient.c
+++ /dev/null
@@ -1,484 +0,0 @@
-/* GStreamer
- * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-/**
- * SECTION:element-gstrtpclient
- * @see_also: gstrtpjitterbuffer, gstrtpbin, gstrtpsession
- *
- * This element handles RTP data from one client. It accepts multiple RTP streams that
- * should be synchronized together.
- *
- * Normally the SSRCs that map to the same CNAME (as given in the RTCP SDES messages)
- * should be synchronized.
- *
- * <refsect2>
- * <title>Example pipelines</title>
- * |[
- * FIXME: gst-launch
- * ]| FIXME: describe
- * </refsect2>
- *
- * Last reviewed on 2007-04-02 (0.10.5)
- */
-
-#ifdef HAVE_CONFIG_H
-#include "config.h"
-#endif
-
-#include <stdlib.h>
-#include <string.h>
-
-#include "gstrtpclient.h"
-
-/* elementfactory information */
-static const GstElementDetails rtpclient_details =
-GST_ELEMENT_DETAILS ("RTP Client",
- "Filter/Network/RTP",
- "Implement an RTP client",
- "Wim Taymans <wim.taymans@gmail.com>");
-
-/* sink pads */
-static GstStaticPadTemplate rtpclient_rtp_sink_template =
-GST_STATIC_PAD_TEMPLATE ("rtp_sink_%d",
- GST_PAD_SINK,
- GST_PAD_REQUEST,
- GST_STATIC_CAPS ("application/x-rtp")
- );
-
-static GstStaticPadTemplate rtpclient_sync_sink_template =
-GST_STATIC_PAD_TEMPLATE ("sync_sink_%d",
- GST_PAD_SINK,
- GST_PAD_REQUEST,
- GST_STATIC_CAPS ("application/x-rtcp")
- );
-
-/* src pads */
-static GstStaticPadTemplate rtpclient_rtp_src_template =
-GST_STATIC_PAD_TEMPLATE ("rtp_src_%d_%d",
- GST_PAD_SRC,
- GST_PAD_SOMETIMES,
- GST_STATIC_CAPS ("application/x-rtp")
- );
-
-#define GST_RTP_CLIENT_GET_PRIVATE(obj) \
- (G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_RTP_CLIENT, GstRtpClientPrivate))
-
-struct _GstRtpClientPrivate
-{
- gint foo;
-};
-
-/* all the info needed to handle the stream with SSRC */
-typedef struct
-{
- GstRtpClient *client;
-
- /* the SSRC of this stream */
- guint32 ssrc;
-
- /* RTP and RTCP in */
- GstPad *rtp_sink;
- GstPad *sync_sink;
-
- /* the jitterbuffer */
- GstElement *jitterbuffer;
- /* the payload demuxer */
- GstElement *ptdemux;
- /* the new-pad signal */
- gulong new_pad_sig;
-} GstRtpClientStream;
-
-/* the PT demuxer found a new payload type */
-static void
-new_pad (GstElement * element, GstPad * pad, GstRtpClientStream * stream)
-{
-}
-
-/* create a new stream for SSRC.
- *
- * We create a jitterbuffer and an payload demuxer for the SSRC. The sinkpad of
- * the jitterbuffer is ghosted to the bin. We connect a pad-added signal to
- * rtpptdemux so that we can ghost the payload pads outside.
- *
- * +-----------------+ +---------------+
- * | rtpjitterbuffer | | rtpptdemux |
- * +- sink src - sink |
- * / +-----------------+ +---------------+
- *
- */
-static GstRtpClientStream *
-create_stream (GstRtpClient * rtpclient, guint32 ssrc)
-{
- GstRtpClientStream *stream;
- gchar *name;
- GstPad *srcpad, *sinkpad;
- GstPadLinkReturn res;
-
- stream = g_new0 (GstRtpClientStream, 1);
- stream->ssrc = ssrc;
- stream->client = rtpclient;
-
- stream->jitterbuffer = gst_element_factory_make ("gstrtpjitterbuffer", NULL);
- if (!stream->jitterbuffer)
- goto no_jitterbuffer;
-
- stream->ptdemux = gst_element_factory_make ("gstrtpptdemux", NULL);
- if (!stream->ptdemux)
- goto no_ptdemux;
-
- /* add elements to bin */
- gst_bin_add (GST_BIN_CAST (rtpclient), stream->jitterbuffer);
- gst_bin_add (GST_BIN_CAST (rtpclient), stream->ptdemux);
-
- /* link jitterbuffer and PT demuxer */
- srcpad = gst_element_get_static_pad (stream->jitterbuffer, "src");
- sinkpad = gst_element_get_static_pad (stream->ptdemux, "sink");
- res = gst_pad_link (srcpad, sinkpad);
- gst_object_unref (srcpad);
- gst_object_unref (sinkpad);
-
- if (res != GST_PAD_LINK_OK)
- goto could_not_link;
-
- /* add stream to list */
- rtpclient->streams = g_list_prepend (rtpclient->streams, stream);
-
- /* ghost sinkpad */
- name = g_strdup_printf ("rtp_sink_%d", ssrc);
- sinkpad = gst_element_get_static_pad (stream->jitterbuffer, "sink");
- stream->rtp_sink = gst_ghost_pad_new (name, sinkpad);
- gst_object_unref (sinkpad);
- g_free (name);
- gst_element_add_pad (GST_ELEMENT_CAST (rtpclient), stream->rtp_sink);
-
- /* add signal to ptdemuxer */
- stream->new_pad_sig =
- g_signal_connect (G_OBJECT (stream->ptdemux), "pad-added",
- G_CALLBACK (new_pad), stream);
-
- return stream;
-
- /* ERRORS */
-no_jitterbuffer:
- {
- g_free (stream);
- g_warning ("gstrtpclient: could not create gstrtpjitterbuffer element");
- return NULL;
- }
-no_ptdemux:
- {
- gst_object_unref (stream->jitterbuffer);
- g_free (stream);
- g_warning ("gstrtpclient: could not create gstrtpptdemux element");
- return NULL;
- }
-could_not_link:
- {
- gst_bin_remove (GST_BIN_CAST (rtpclient), stream->jitterbuffer);
- gst_bin_remove (GST_BIN_CAST (rtpclient), stream->ptdemux);
- g_free (stream);
- g_warning ("gstrtpclient: could not link jitterbuffer and ptdemux element");
- return NULL;
- }
-}
-
-#if 0
-static void
-free_stream (GstRtpClientStream * stream)
-{
- gst_object_unref (stream->jitterbuffer);
- g_free (stream);
-}
-#endif
-
-/* find the stream for the given SSRC, return NULL if the stream did not exist
- */
-static GstRtpClientStream *
-find_stream_by_ssrc (GstRtpClient * client, guint32 ssrc)
-{
- GstRtpClientStream *stream;
- GList *walk;
-
- for (walk = client->streams; walk; walk = g_list_next (walk)) {
- stream = (GstRtpClientStream *) walk->data;
- if (stream->ssrc == ssrc)
- return stream;
- }
- return NULL;
-}
-
-/* signals and args */
-enum
-{
- /* FILL ME */
- LAST_SIGNAL
-};
-
-enum
-{
- PROP_0
-};
-
-/* GObject vmethods */
-static void gst_rtp_client_finalize (GObject * object);
-static void gst_rtp_client_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec);
-static void gst_rtp_client_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec);
-
-/* GstElement vmethods */
-static GstStateChangeReturn gst_rtp_client_change_state (GstElement * element,
- GstStateChange transition);
-static GstPad *gst_rtp_client_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name);
-static void gst_rtp_client_release_pad (GstElement * element, GstPad * pad);
-
-/*static guint gst_rtp_client_signals[LAST_SIGNAL] = { 0 }; */
-
-GST_BOILERPLATE (GstRtpClient, gst_rtp_client, GstBin, GST_TYPE_BIN);
-
-static void
-gst_rtp_client_base_init (gpointer klass)
-{
- GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
-
- /* sink pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpclient_rtp_sink_template));
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpclient_sync_sink_template));
-
- /* src pads */
- gst_element_class_add_pad_template (element_class,
- gst_static_pad_template_get (&rtpclient_rtp_src_template));
-
- gst_element_class_set_details (element_class, &rtpclient_details);
-}
-
-static void
-gst_rtp_client_class_init (GstRtpClientClass * klass)
-{
- GObjectClass *gobject_class;
- GstElementClass *gstelement_class;
-
- gobject_class = (GObjectClass *) klass;
- gstelement_class = (GstElementClass *) klass;
-
- g_type_class_add_private (klass, sizeof (GstRtpClientPrivate));
-
- gobject_class->finalize = gst_rtp_client_finalize;
- gobject_class->set_property = gst_rtp_client_set_property;
- gobject_class->get_property = gst_rtp_client_get_property;
-
- gstelement_class->change_state =
- GST_DEBUG_FUNCPTR (gst_rtp_client_change_state);
- gstelement_class->request_new_pad =
- GST_DEBUG_FUNCPTR (gst_rtp_client_request_new_pad);
- gstelement_class->release_pad =
- GST_DEBUG_FUNCPTR (gst_rtp_client_release_pad);
-}
-
-static void
-gst_rtp_client_init (GstRtpClient * rtpclient, GstRtpClientClass * klass)
-{
- rtpclient->priv = GST_RTP_CLIENT_GET_PRIVATE (rtpclient);
-}
-
-static void
-gst_rtp_client_finalize (GObject * object)
-{
- GstRtpClient *rtpclient;
-
- rtpclient = GST_RTP_CLIENT (object);
-
- G_OBJECT_CLASS (parent_class)->finalize (object);
-}
-
-static void
-gst_rtp_client_set_property (GObject * object, guint prop_id,
- const GValue * value, GParamSpec * pspec)
-{
- GstRtpClient *rtpclient;
-
- rtpclient = GST_RTP_CLIENT (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static void
-gst_rtp_client_get_property (GObject * object, guint prop_id,
- GValue * value, GParamSpec * pspec)
-{
- GstRtpClient *rtpclient;
-
- rtpclient = GST_RTP_CLIENT (object);
-
- switch (prop_id) {
- default:
- G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
- break;
- }
-}
-
-static GstStateChangeReturn
-gst_rtp_client_change_state (GstElement * element, GstStateChange transition)
-{
- GstStateChangeReturn res;
- GstRtpClient *rtpclient;
-
- rtpclient = GST_RTP_CLIENT (element);
-
- switch (transition) {
- case GST_STATE_CHANGE_NULL_TO_READY:
- break;
- case GST_STATE_CHANGE_READY_TO_PAUSED:
- break;
- case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
- break;
- default:
- break;
- }
-
- res = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
-
- switch (transition) {
- case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
- break;
- case GST_STATE_CHANGE_PAUSED_TO_READY:
- break;
- case GST_STATE_CHANGE_READY_TO_NULL:
- break;
- default:
- break;
- }
- return res;
-}
-
-/* We have 2 request pads (rtp_sink_%d and sync_sink_%d), the %d is assumed to
- * be the SSRC of the stream.
- *
- * We require that the rtp pad is requested first for a particular SSRC, then
- * (optionaly) the sync pad can be requested. If no sync pad is requested, no
- * sync information can be exchanged for this stream.
- */
-static GstPad *
-gst_rtp_client_request_new_pad (GstElement * element,
- GstPadTemplate * templ, const gchar * name)
-{
- GstRtpClient *rtpclient;
- GstElementClass *klass;
- GstPadTemplate *rtp_sink_templ, *sync_sink_templ;
- guint32 ssrc;
- GstRtpClientStream *stream;
- GstPad *result;
-
- g_return_val_if_fail (templ != NULL, NULL);
- g_return_val_if_fail (GST_IS_RTP_CLIENT (element), NULL);
-
- if (templ->direction != GST_PAD_SINK)
- goto wrong_direction;
-
- rtpclient = GST_RTP_CLIENT (element);
- klass = GST_ELEMENT_GET_CLASS (element);
-
- /* figure out the template */
- rtp_sink_templ = gst_element_class_get_pad_template (klass, "rtp_sink_%d");
- sync_sink_templ = gst_element_class_get_pad_template (klass, "sync_sink_%d");
-
- if (templ != rtp_sink_templ && templ != sync_sink_templ)
- goto wrong_template;
-
- if (templ == rtp_sink_templ) {
- /* create new rtp sink pad. If a stream with the pad number already exists
- * we have an error, else we create the sinkpad, add a jitterbuffer and
- * ptdemuxer. */
- if (name == NULL || strlen (name) < 9)
- goto no_name;
-
- ssrc = atoi (&name[9]);
-
- /* see if a stream with that name exists, if so we have an error. */
- stream = find_stream_by_ssrc (rtpclient, ssrc);
- if (stream != NULL)
- goto stream_exists;
-
- /* ok, create new stream */
- stream = create_stream (rtpclient, ssrc);
- if (stream == NULL)
- goto stream_not_found;
-
- result = stream->rtp_sink;
- } else {
- /* create new rtp sink pad. We can only do this if the RTP pad was
- * requested before, meaning the session with the padnumber must exist. */
- if (name == NULL || strlen (name) < 10)
- goto no_name;
-
- ssrc = atoi (&name[10]);
-
- /* find stream */
- stream = find_stream_by_ssrc (rtpclient, ssrc);
- if (stream == NULL)
- goto stream_not_found;
-
- stream->sync_sink =
- gst_pad_new_from_static_template (&rtpclient_sync_sink_template, name);
- gst_element_add_pad (GST_ELEMENT_CAST (rtpclient), stream->sync_sink);
-
- result = stream->sync_sink;
- }
-
- return result;
-
- /* ERRORS */
-wrong_direction:
- {
- g_warning ("gstrtpclient: request pad that is not a SINK pad");
- return NULL;
- }
-wrong_template:
- {
- g_warning ("gstrtpclient: this is not our template");
- return NULL;
- }
-no_name:
- {
- g_warning ("gstrtpclient: no padname was specified");
- return NULL;
- }
-stream_exists:
- {
- g_warning ("gstrtpclient: stream with SSRC %d already registered", ssrc);
- return NULL;
- }
-stream_not_found:
- {
- g_warning ("gstrtpclient: stream with SSRC %d not yet registered", ssrc);
- return NULL;
- }
-}
-
-static void
-gst_rtp_client_release_pad (GstElement * element, GstPad * pad)
-{
-}
diff --git a/gst/rtpmanager/gstrtpclient.h b/gst/rtpmanager/gstrtpclient.h
deleted file mode 100644
index cb2f7753..00000000
--- a/gst/rtpmanager/gstrtpclient.h
+++ /dev/null
@@ -1,56 +0,0 @@
-/* GStreamer
- * Copyright (C) <2007> Wim Taymans <wim.taymans@gmail.com>
- *
- * This library is free software; you can redistribute it and/or
- * modify it under the terms of the GNU Library General Public
- * License as published by the Free Software Foundation; either
- * version 2 of the License, or (at your option) any later version.
- *
- * This library is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- * Library General Public License for more details.
- *
- * You should have received a copy of the GNU Library General Public
- * License along with this library; if not, write to the
- * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
- * Boston, MA 02111-1307, USA.
- */
-
-#ifndef __GST_RTP_CLIENT_H__
-#define __GST_RTP_CLIENT_H__
-
-#include <gst/gst.h>
-
-#define GST_TYPE_RTP_CLIENT \
- (gst_rtp_client_get_type())
-#define GST_RTP_CLIENT(obj) \
- (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_CLIENT,GstRtpClient))
-#define GST_RTP_CLIENT_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_CLIENT,GstRtpClientClass))
-#define GST_IS_RTP_CLIENT(obj) \
- (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_CLIENT))
-#define GST_IS_RTP_CLIENT_CLASS(klass) \
- (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_CLIENT))
-
-typedef struct _GstRtpClient GstRtpClient;
-typedef struct _GstRtpClientClass GstRtpClientClass;
-typedef struct _GstRtpClientPrivate GstRtpClientPrivate;
-
-struct _GstRtpClient {
- GstBin parent_bin;
-
- /* a list of streams from a client */
- GList *streams;
-
- /*< private >*/
- GstRtpClientPrivate *priv;
-};
-
-struct _GstRtpClientClass {
- GstBinClass parent_class;
-};
-
-GType gst_rtp_client_get_type (void);
-
-#endif /* __GST_RTP_CLIENT_H__ */
diff --git a/gst/rtpmanager/gstrtpmanager.c b/gst/rtpmanager/gstrtpmanager.c
index 99779522..f38a77a8 100644
--- a/gst/rtpmanager/gstrtpmanager.c
+++ b/gst/rtpmanager/gstrtpmanager.c
@@ -22,7 +22,6 @@
#endif
#include "gstrtpbin.h"
-#include "gstrtpclient.h"
#include "gstrtpjitterbuffer.h"
#include "gstrtpptdemux.h"
#include "gstrtpsession.h"
@@ -35,10 +34,6 @@ plugin_init (GstPlugin * plugin)
GST_TYPE_RTP_BIN))
return FALSE;
- if (!gst_element_register (plugin, "gstrtpclient", GST_RANK_NONE,
- GST_TYPE_RTP_CLIENT))
- return FALSE;
-
if (!gst_element_register (plugin, "gstrtpjitterbuffer", GST_RANK_NONE,
GST_TYPE_RTP_JITTER_BUFFER))
return FALSE;
diff --git a/gst/rtpmanager/gstrtpsession.c b/gst/rtpmanager/gstrtpsession.c
index 9407ee52..dcddb689 100644
--- a/gst/rtpmanager/gstrtpsession.c
+++ b/gst/rtpmanager/gstrtpsession.c
@@ -200,13 +200,7 @@ enum
#define DEFAULT_NTP_NS_BASE 0
#define DEFAULT_BANDWIDTH RTP_STATS_BANDWIDTH
#define DEFAULT_RTCP_FRACTION RTP_STATS_RTCP_BANDWIDTH
-#define DEFAULT_SDES_CNAME NULL
-#define DEFAULT_SDES_NAME NULL
-#define DEFAULT_SDES_EMAIL NULL
-#define DEFAULT_SDES_PHONE NULL
-#define DEFAULT_SDES_LOCATION NULL
-#define DEFAULT_SDES_TOOL NULL
-#define DEFAULT_SDES_NOTE NULL
+#define DEFAULT_SDES NULL
#define DEFAULT_NUM_SOURCES 0
#define DEFAULT_NUM_ACTIVE_SOURCES 0
@@ -216,13 +210,7 @@ enum
PROP_NTP_NS_BASE,
PROP_BANDWIDTH,
PROP_RTCP_FRACTION,
- PROP_SDES_CNAME,
- PROP_SDES_NAME,
- PROP_SDES_EMAIL,
- PROP_SDES_PHONE,
- PROP_SDES_LOCATION,
- PROP_SDES_TOOL,
- PROP_SDES_NOTE,
+ PROP_SDES,
PROP_NUM_SOURCES,
PROP_NUM_ACTIVE_SOURCES,
PROP_INTERNAL_SESSION,
@@ -555,40 +543,10 @@ gst_rtp_session_class_init (GstRtpSessionClass * klass)
"The fraction of the bandwidth used for RTCP",
0.0, G_MAXDOUBLE, DEFAULT_RTCP_FRACTION, G_PARAM_READWRITE));
- g_object_class_install_property (gobject_class, PROP_SDES_CNAME,
- g_param_spec_string ("sdes-cname", "SDES CNAME",
- "The CNAME to put in SDES messages of this session",
- DEFAULT_SDES_CNAME, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_NAME,
- g_param_spec_string ("sdes-name", "SDES NAME",
- "The NAME to put in SDES messages of this session",
- DEFAULT_SDES_NAME, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_EMAIL,
- g_param_spec_string ("sdes-email", "SDES EMAIL",
- "The EMAIL to put in SDES messages of this session",
- DEFAULT_SDES_EMAIL, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_PHONE,
- g_param_spec_string ("sdes-phone", "SDES PHONE",
- "The PHONE to put in SDES messages of this session",
- DEFAULT_SDES_PHONE, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_LOCATION,
- g_param_spec_string ("sdes-location", "SDES LOCATION",
- "The LOCATION to put in SDES messages of this session",
- DEFAULT_SDES_LOCATION, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_TOOL,
- g_param_spec_string ("sdes-tool", "SDES TOOL",
- "The TOOL to put in SDES messages of this session",
- DEFAULT_SDES_TOOL, G_PARAM_READWRITE));
-
- g_object_class_install_property (gobject_class, PROP_SDES_NOTE,
- g_param_spec_string ("sdes-note", "SDES NOTE",
- "The NOTE to put in SDES messages of this session",
- DEFAULT_SDES_NOTE, G_PARAM_READWRITE));
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE));
g_object_class_install_property (gobject_class, PROP_NUM_SOURCES,
g_param_spec_uint ("num-sources", "Num Sources",
@@ -704,33 +662,8 @@ gst_rtp_session_set_property (GObject * object, guint prop_id,
case PROP_RTCP_FRACTION:
rtp_session_set_rtcp_fraction (priv->session, g_value_get_double (value));
break;
- case PROP_SDES_CNAME:
- rtp_session_set_sdes_string (priv->session, GST_RTCP_SDES_CNAME,
- g_value_get_string (value));
- break;
- case PROP_SDES_NAME:
- rtp_session_set_sdes_string (priv->session, GST_RTCP_SDES_NAME,
- g_value_get_string (value));
- break;
- case PROP_SDES_EMAIL:
- rtp_session_set_sdes_string (priv->session, GST_RTCP_SDES_EMAIL,
- g_value_get_string (value));
- break;
- case PROP_SDES_PHONE:
- rtp_session_set_sdes_string (priv->session, GST_RTCP_SDES_PHONE,
- g_value_get_string (value));
- break;
- case PROP_SDES_LOCATION:
- rtp_session_set_sdes_string (priv->session, GST_RTCP_SDES_LOC,
- g_value_get_string (value));
- break;
- case PROP_SDES_TOOL:
- rtp_session_set_sdes_string (priv->session, GST_RTCP_SDES_TOOL,
- g_value_get_string (value));
- break;
- case PROP_SDES_NOTE:
- rtp_session_set_sdes_string (priv->session, GST_RTCP_SDES_NOTE,
- g_value_get_string (value));
+ case PROP_SDES:
+ rtp_session_set_sdes_struct (priv->session, g_value_get_boxed (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
@@ -760,33 +693,8 @@ gst_rtp_session_get_property (GObject * object, guint prop_id,
case PROP_RTCP_FRACTION:
g_value_set_double (value, rtp_session_get_rtcp_fraction (priv->session));
break;
- case PROP_SDES_CNAME:
- g_value_take_string (value, rtp_session_get_sdes_string (priv->session,
- GST_RTCP_SDES_CNAME));
- break;
- case PROP_SDES_NAME:
- g_value_take_string (value, rtp_session_get_sdes_string (priv->session,
- GST_RTCP_SDES_NAME));
- break;
- case PROP_SDES_EMAIL:
- g_value_take_string (value, rtp_session_get_sdes_string (priv->session,
- GST_RTCP_SDES_EMAIL));
- break;
- case PROP_SDES_PHONE:
- g_value_take_string (value, rtp_session_get_sdes_string (priv->session,
- GST_RTCP_SDES_PHONE));
- break;
- case PROP_SDES_LOCATION:
- g_value_take_string (value, rtp_session_get_sdes_string (priv->session,
- GST_RTCP_SDES_LOC));
- break;
- case PROP_SDES_TOOL:
- g_value_take_string (value, rtp_session_get_sdes_string (priv->session,
- GST_RTCP_SDES_TOOL));
- break;
- case PROP_SDES_NOTE:
- g_value_take_string (value, rtp_session_get_sdes_string (priv->session,
- GST_RTCP_SDES_NOTE));
+ case PROP_SDES:
+ g_value_take_boxed (value, rtp_session_get_sdes_struct (priv->session));
break;
case PROP_NUM_SOURCES:
g_value_set_uint (value, rtp_session_get_num_sources (priv->session));
diff --git a/gst/rtpmanager/rtpsession.c b/gst/rtpmanager/rtpsession.c
index cda04182..fa46f501 100644
--- a/gst/rtpmanager/rtpsession.c
+++ b/gst/rtpmanager/rtpsession.c
@@ -49,13 +49,7 @@ enum
#define DEFAULT_BANDWIDTH RTP_STATS_BANDWIDTH
#define DEFAULT_RTCP_FRACTION RTP_STATS_RTCP_BANDWIDTH
#define DEFAULT_RTCP_MTU 1400
-#define DEFAULT_SDES_CNAME NULL
-#define DEFAULT_SDES_NAME NULL
-#define DEFAULT_SDES_EMAIL NULL
-#define DEFAULT_SDES_PHONE NULL
-#define DEFAULT_SDES_LOCATION NULL
-#define DEFAULT_SDES_TOOL NULL
-#define DEFAULT_SDES_NOTE NULL
+#define DEFAULT_SDES NULL
#define DEFAULT_NUM_SOURCES 0
#define DEFAULT_NUM_ACTIVE_SOURCES 0
#define DEFAULT_SOURCES NULL
@@ -68,13 +62,7 @@ enum
PROP_BANDWIDTH,
PROP_RTCP_FRACTION,
PROP_RTCP_MTU,
- PROP_SDES_CNAME,
- PROP_SDES_NAME,
- PROP_SDES_EMAIL,
- PROP_SDES_PHONE,
- PROP_SDES_LOCATION,
- PROP_SDES_TOOL,
- PROP_SDES_NOTE,
+ PROP_SDES,
PROP_NUM_SOURCES,
PROP_NUM_ACTIVE_SOURCES,
PROP_SOURCES,
@@ -273,40 +261,10 @@ rtp_session_class_init (RTPSessionClass * klass)
16, G_MAXINT16, DEFAULT_RTCP_MTU,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
- g_object_class_install_property (gobject_class, PROP_SDES_CNAME,
- g_param_spec_string ("sdes-cname", "SDES CNAME",
- "The CNAME to put in SDES messages of this session",
- DEFAULT_SDES_CNAME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- g_object_class_install_property (gobject_class, PROP_SDES_NAME,
- g_param_spec_string ("sdes-name", "SDES NAME",
- "The NAME to put in SDES messages of this session",
- DEFAULT_SDES_NAME, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- g_object_class_install_property (gobject_class, PROP_SDES_EMAIL,
- g_param_spec_string ("sdes-email", "SDES EMAIL",
- "The EMAIL to put in SDES messages of this session",
- DEFAULT_SDES_EMAIL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- g_object_class_install_property (gobject_class, PROP_SDES_PHONE,
- g_param_spec_string ("sdes-phone", "SDES PHONE",
- "The PHONE to put in SDES messages of this session",
- DEFAULT_SDES_PHONE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- g_object_class_install_property (gobject_class, PROP_SDES_LOCATION,
- g_param_spec_string ("sdes-location", "SDES LOCATION",
- "The LOCATION to put in SDES messages of this session",
- DEFAULT_SDES_LOCATION, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- g_object_class_install_property (gobject_class, PROP_SDES_TOOL,
- g_param_spec_string ("sdes-tool", "SDES TOOL",
- "The TOOL to put in SDES messages of this session",
- DEFAULT_SDES_TOOL, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
-
- g_object_class_install_property (gobject_class, PROP_SDES_NOTE,
- g_param_spec_string ("sdes-note", "SDES NOTE",
- "The NOTE to put in SDES messages of this session",
- DEFAULT_SDES_NOTE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
+ g_object_class_install_property (gobject_class, PROP_SDES,
+ g_param_spec_boxed ("sdes", "SDES",
+ "The SDES items of this session",
+ GST_TYPE_STRUCTURE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_NUM_SOURCES,
g_param_spec_uint ("num-sources", "Num Sources",
@@ -426,6 +384,7 @@ copy_source (gpointer key, RTPSource * source, GValueArray * arr)
g_value_init (&value, RTP_TYPE_SOURCE);
g_value_take_object (&value, source);
+ /* copies the value */
g_value_array_append (arr, &value);
}
@@ -469,33 +428,8 @@ rtp_session_set_property (GObject * object, guint prop_id,
case PROP_RTCP_MTU:
sess->mtu = g_value_get_uint (value);
break;
- case PROP_SDES_CNAME:
- rtp_session_set_sdes_string (sess, GST_RTCP_SDES_CNAME,
- g_value_get_string (value));
- break;
- case PROP_SDES_NAME:
- rtp_session_set_sdes_string (sess, GST_RTCP_SDES_NAME,
- g_value_get_string (value));
- break;
- case PROP_SDES_EMAIL:
- rtp_session_set_sdes_string (sess, GST_RTCP_SDES_EMAIL,
- g_value_get_string (value));
- break;
- case PROP_SDES_PHONE:
- rtp_session_set_sdes_string (sess, GST_RTCP_SDES_PHONE,
- g_value_get_string (value));
- break;
- case PROP_SDES_LOCATION:
- rtp_session_set_sdes_string (sess, GST_RTCP_SDES_LOC,
- g_value_get_string (value));
- break;
- case PROP_SDES_TOOL:
- rtp_session_set_sdes_string (sess, GST_RTCP_SDES_TOOL,
- g_value_get_string (value));
- break;
- case PROP_SDES_NOTE:
- rtp_session_set_sdes_string (sess, GST_RTCP_SDES_NOTE,
- g_value_get_string (value));
+ case PROP_SDES:
+ rtp_session_set_sdes_struct (sess, g_value_get_boxed (value));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
@@ -527,33 +461,8 @@ rtp_session_get_property (GObject * object, guint prop_id,
case PROP_RTCP_MTU:
g_value_set_uint (value, sess->mtu);
break;
- case PROP_SDES_CNAME:
- g_value_take_string (value, rtp_session_get_sdes_string (sess,
- GST_RTCP_SDES_CNAME));
- break;
- case PROP_SDES_NAME:
- g_value_take_string (value, rtp_session_get_sdes_string (sess,
- GST_RTCP_SDES_NAME));
- break;
- case PROP_SDES_EMAIL:
- g_value_take_string (value, rtp_session_get_sdes_string (sess,
- GST_RTCP_SDES_EMAIL));
- break;
- case PROP_SDES_PHONE:
- g_value_take_string (value, rtp_session_get_sdes_string (sess,
- GST_RTCP_SDES_PHONE));
- break;
- case PROP_SDES_LOCATION:
- g_value_take_string (value, rtp_session_get_sdes_string (sess,
- GST_RTCP_SDES_LOC));
- break;
- case PROP_SDES_TOOL:
- g_value_take_string (value, rtp_session_get_sdes_string (sess,
- GST_RTCP_SDES_TOOL));
- break;
- case PROP_SDES_NOTE:
- g_value_take_string (value, rtp_session_get_sdes_string (sess,
- GST_RTCP_SDES_NOTE));
+ case PROP_SDES:
+ g_value_take_boxed (value, rtp_session_get_sdes_struct (sess));
break;
case PROP_NUM_SOURCES:
g_value_set_uint (value, rtp_session_get_num_sources (sess));
@@ -957,6 +866,45 @@ rtp_session_get_sdes_string (RTPSession * sess, GstRTCPSDESType type)
return result;
}
+/**
+ * rtp_session_get_sdes_struct:
+ * @sess: an #RTSPSession
+ *
+ * Get the SDES data as a #GstStructure
+ *
+ * Returns: a GstStructure with SDES items for @sess.
+ */
+GstStructure *
+rtp_session_get_sdes_struct (RTPSession * sess)
+{
+ GstStructure *result;
+
+ g_return_val_if_fail (RTP_IS_SESSION (sess), NULL);
+
+ RTP_SESSION_LOCK (sess);
+ result = rtp_source_get_sdes_struct (sess->source);
+ RTP_SESSION_UNLOCK (sess);
+
+ return result;
+}
+
+/**
+ * rtp_session_set_sdes_struct:
+ * @sess: an #RTSPSession
+ * @sdes: a #GstStructure
+ *
+ * Set the SDES data as a #GstStructure.
+ */
+void
+rtp_session_set_sdes_struct (RTPSession * sess, const GstStructure * sdes)
+{
+ g_return_if_fail (RTP_IS_SESSION (sess));
+
+ RTP_SESSION_LOCK (sess);
+ rtp_source_set_sdes_struct (sess->source, sdes);
+ RTP_SESSION_UNLOCK (sess);
+}
+
static GstFlowReturn
source_push_rtp (RTPSource * source, gpointer data, RTPSession * session)
{
@@ -1392,7 +1340,6 @@ rtp_session_create_new_ssrc (RTPSession * sess)
GINT_TO_POINTER (ssrc)) == NULL)
break;
}
-
return ssrc;
}
diff --git a/gst/rtpmanager/rtpsession.h b/gst/rtpmanager/rtpsession.h
index 6312f1c1..25e228b0 100644
--- a/gst/rtpmanager/rtpsession.h
+++ b/gst/rtpmanager/rtpsession.h
@@ -267,6 +267,9 @@ gboolean rtp_session_set_sdes_string (RTPSession *sess, GstRTCPSDE
const gchar *cname);
gchar* rtp_session_get_sdes_string (RTPSession *sess, GstRTCPSDESType type);
+GstStructure * rtp_session_get_sdes_struct (RTPSession *sess);
+void rtp_session_set_sdes_struct (RTPSession *sess, const GstStructure *sdes);
+
/* handling sources */
RTPSource* rtp_session_get_internal_source (RTPSession *sess);
diff --git a/gst/rtpmanager/rtpsource.c b/gst/rtpmanager/rtpsource.c
index 209c17b5..40cdd238 100644
--- a/gst/rtpmanager/rtpsource.c
+++ b/gst/rtpmanager/rtpsource.c
@@ -188,52 +188,13 @@ rtp_source_finalize (GObject * object)
G_OBJECT_CLASS (rtp_source_parent_class)->finalize (object);
}
-#define MAX_ADDRESS 64
-static void
-make_address_string (GstNetAddress * addr, gchar * dest, gulong n)
-{
- switch (gst_netaddress_get_net_type (addr)) {
- case GST_NET_TYPE_IP4:
- {
- guint32 address;
- guint16 port;
-
- gst_netaddress_get_ip4_address (addr, &address, &port);
- address = g_ntohl (address);
-
- g_snprintf (dest, n, "%d.%d.%d.%d:%d", (address >> 24) & 0xff,
- (address >> 16) & 0xff, (address >> 8) & 0xff, address & 0xff,
- g_ntohs (port));
- break;
- }
- case GST_NET_TYPE_IP6:
- {
- guint8 address[16];
- guint16 port;
-
- gst_netaddress_get_ip6_address (addr, address, &port);
-
- g_snprintf (dest, n, "[%04x:%04x:%04x:%04x:%04x:%04x:%04x:%04x]:%d",
- (address[0] << 8) | address[1], (address[2] << 8) | address[3],
- (address[4] << 8) | address[5], (address[6] << 8) | address[7],
- (address[8] << 8) | address[9], (address[10] << 8) | address[11],
- (address[12] << 8) | address[13], (address[14] << 8) | address[15],
- g_ntohs (port));
- break;
- }
- default:
- dest[0] = 0;
- break;
- }
-}
-
static GstStructure *
rtp_source_create_stats (RTPSource * src)
{
GstStructure *s;
gboolean is_sender = src->is_sender;
gboolean internal = src->internal;
- gchar address_str[MAX_ADDRESS];
+ gchar address_str[GST_NETADDRESS_MAX_LEN];
/* common data for all types of sources */
s = gst_structure_new ("application/x-rtp-source-stats",
@@ -246,11 +207,13 @@ rtp_source_create_stats (RTPSource * src)
/* add address and port */
if (src->have_rtp_from) {
- make_address_string (&src->rtp_from, address_str, sizeof (address_str));
+ gst_netaddress_to_string (&src->rtp_from, address_str,
+ sizeof (address_str));
gst_structure_set (s, "rtp-from", G_TYPE_STRING, address_str, NULL);
}
if (src->have_rtcp_from) {
- make_address_string (&src->rtcp_from, address_str, sizeof (address_str));
+ gst_netaddress_to_string (&src->rtcp_from, address_str,
+ sizeof (address_str));
gst_structure_set (s, "rtcp-from", G_TYPE_STRING, address_str, NULL);
}
@@ -316,8 +279,16 @@ rtp_source_create_stats (RTPSource * src)
return s;
}
-static GstStructure *
-rtp_source_create_sdes (RTPSource * src)
+/**
+ * rtp_source_get_sdes_struct:
+ * @src: an #RTSPSource
+ *
+ * Get the SDES data as a GstStructure
+ *
+ * Returns: a GstStructure with SDES items for @src.
+ */
+GstStructure *
+rtp_source_get_sdes_struct (RTPSource * src)
{
GstStructure *s;
gchar *str;
@@ -356,6 +327,44 @@ rtp_source_create_sdes (RTPSource * src)
return s;
}
+/**
+ * rtp_source_set_sdes_struct:
+ * @src: an #RTSPSource
+ * @sdes: a #GstStructure with SDES info
+ *
+ * Set the SDES items from @sdes.
+ */
+void
+rtp_source_set_sdes_struct (RTPSource * src, const GstStructure * sdes)
+{
+ const gchar *str;
+
+ if (!gst_structure_has_name (sdes, "application/x-rtp-source-sdes"))
+ return;
+
+ if ((str = gst_structure_get_string (sdes, "cname"))) {
+ rtp_source_set_sdes_string (src, GST_RTCP_SDES_CNAME, str);
+ }
+ if ((str = gst_structure_get_string (sdes, "name"))) {
+ rtp_source_set_sdes_string (src, GST_RTCP_SDES_NAME, str);
+ }
+ if ((str = gst_structure_get_string (sdes, "email"))) {
+ rtp_source_set_sdes_string (src, GST_RTCP_SDES_EMAIL, str);
+ }
+ if ((str = gst_structure_get_string (sdes, "phone"))) {
+ rtp_source_set_sdes_string (src, GST_RTCP_SDES_PHONE, str);
+ }
+ if ((str = gst_structure_get_string (sdes, "location"))) {
+ rtp_source_set_sdes_string (src, GST_RTCP_SDES_LOC, str);
+ }
+ if ((str = gst_structure_get_string (sdes, "tool"))) {
+ rtp_source_set_sdes_string (src, GST_RTCP_SDES_TOOL, str);
+ }
+ if ((str = gst_structure_get_string (sdes, "note"))) {
+ rtp_source_set_sdes_string (src, GST_RTCP_SDES_NOTE, str);
+ }
+}
+
static void
rtp_source_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@@ -396,7 +405,7 @@ rtp_source_get_property (GObject * object, guint prop_id,
g_value_set_boolean (value, rtp_source_is_sender (src));
break;
case PROP_SDES:
- g_value_take_boxed (value, rtp_source_create_sdes (src));
+ g_value_take_boxed (value, rtp_source_get_sdes_struct (src));
break;
case PROP_STATS:
g_value_take_boxed (value, rtp_source_create_stats (src));
diff --git a/gst/rtpmanager/rtpsource.h b/gst/rtpmanager/rtpsource.h
index 8286f2ec..8355bc0c 100644
--- a/gst/rtpmanager/rtpsource.h
+++ b/gst/rtpmanager/rtpsource.h
@@ -187,6 +187,9 @@ gboolean rtp_source_get_sdes (RTPSource *src, GstRTCPSDESType
guint8 **data, guint *len);
gchar* rtp_source_get_sdes_string (RTPSource *src, GstRTCPSDESType type);
+GstStructure * rtp_source_get_sdes_struct (RTPSource * src);
+void rtp_source_set_sdes_struct (RTPSource * src, const GstStructure *sdes);
+
/* handling network address */
void rtp_source_set_rtp_from (RTPSource *src, GstNetAddress *address);
void rtp_source_set_rtcp_from (RTPSource *src, GstNetAddress *address);
diff --git a/gst/shapewipe/gstshapewipe.c b/gst/shapewipe/gstshapewipe.c
index 4029c494..655cfc50 100644
--- a/gst/shapewipe/gstshapewipe.c
+++ b/gst/shapewipe/gstshapewipe.c
@@ -93,10 +93,10 @@ enum
};
static GstStaticPadTemplate video_sink_pad_template =
-GST_STATIC_PAD_TEMPLATE ("video_sink",
+ GST_STATIC_PAD_TEMPLATE ("video_sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")));
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") " ; " GST_VIDEO_CAPS_ARGB));
static GstStaticPadTemplate mask_sink_pad_template =
GST_STATIC_PAD_TEMPLATE ("mask_sink",
@@ -112,8 +112,8 @@ static GstStaticPadTemplate mask_sink_pad_template =
"height = " GST_VIDEO_SIZE_RANGE ", " "framerate = 0/1"));
static GstStaticPadTemplate src_pad_template =
-GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
- GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV")));
+ GST_STATIC_PAD_TEMPLATE ("src", GST_PAD_SRC, GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("AYUV") " ; " GST_VIDEO_CAPS_ARGB));
GST_DEBUG_CATEGORY_STATIC (gst_shape_wipe_debug);
#define GST_CAT_DEFAULT gst_shape_wipe_debug
@@ -273,6 +273,7 @@ gst_shape_wipe_reset (GstShapeWipe * self)
g_cond_signal (self->mask_cond);
+ self->fmt = GST_VIDEO_FORMAT_UNKNOWN;
self->width = self->height = 0;
self->mask_position = 0.0;
self->mask_border = 0.0;
@@ -309,6 +310,7 @@ gst_shape_wipe_video_sink_setcaps (GstPad * pad, GstCaps * caps)
GstShapeWipe *self = GST_SHAPE_WIPE (gst_pad_get_parent (pad));
gboolean ret = TRUE;
GstStructure *s;
+ GstVideoFormat fmt;
gint width, height;
gint fps_n, fps_d;
@@ -316,13 +318,13 @@ gst_shape_wipe_video_sink_setcaps (GstPad * pad, GstCaps * caps)
s = gst_caps_get_structure (caps, 0);
- if (!gst_structure_get_int (s, "width", &width) ||
- !gst_structure_get_int (s, "height", &height) ||
+ if (!gst_video_format_parse_caps (caps, &fmt, &width, &height) ||
!gst_structure_get_fraction (s, "framerate", &fps_n, &fps_d)) {
ret = FALSE;
goto done;
}
+ self->fmt = fmt;
if (self->width != width || self->height != height) {
g_mutex_lock (self->mask_mutex);
self->width = width;
@@ -402,13 +404,20 @@ gst_shape_wipe_video_sink_getcaps (GstPad * pad)
n = gst_caps_get_size (tmp);
+ tmp2 = gst_caps_new_empty ();
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (tmp, i);
+ GstStructure *c;
- gst_structure_remove_fields (s, "bpp", "depth", "endianness", "framerate",
+ gst_structure_remove_fields (s, "format", "bpp", "depth", "endianness",
+ "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask",
NULL);
gst_structure_set_name (s, "video/x-raw-yuv");
+ c = gst_structure_copy (s);
+ gst_structure_set_name (c, "video/x-raw-rgb");
+ gst_caps_append_structure (tmp2, c);
}
+ gst_caps_append (tmp, tmp2);
intersection = gst_caps_intersect (tmp, ret);
gst_caps_unref (tmp);
@@ -498,7 +507,9 @@ gst_shape_wipe_mask_sink_getcaps (GstPad * pad)
GstStructure *t;
gst_structure_set_name (s, "video/x-raw-gray");
- gst_structure_remove_fields (s, "format", "framerate", NULL);
+ gst_structure_remove_fields (s, "format", "framerate", "bpp", "depth",
+ "endianness", "framerate", "red_mask", "green_mask", "blue_mask",
+ "alpha_mask", NULL);
if (self->width && self->height)
gst_structure_set (s, "width", G_TYPE_INT, self->width, "height",
@@ -514,7 +525,7 @@ gst_shape_wipe_mask_sink_getcaps (GstPad * pad)
gst_caps_append_structure (tmp, t);
}
- gst_caps_merge (ret, tmp);
+ gst_caps_append (ret, tmp);
tmp = gst_pad_peer_get_caps (pad);
if (tmp) {
@@ -591,13 +602,20 @@ gst_shape_wipe_src_getcaps (GstPad * pad)
tmp = intersection;
n = gst_caps_get_size (tmp);
+ tmp2 = gst_caps_new_empty ();
for (i = 0; i < n; i++) {
GstStructure *s = gst_caps_get_structure (tmp, i);
+ GstStructure *c;
- gst_structure_remove_fields (s, "bpp", "depth", "endianness", "framerate",
+ gst_structure_remove_fields (s, "format", "bpp", "depth", "endianness",
+ "framerate", "red_mask", "green_mask", "blue_mask", "alpha_mask",
NULL);
gst_structure_set_name (s, "video/x-raw-yuv");
+ c = gst_structure_copy (s);
+
+ gst_caps_append_structure (tmp2, c);
}
+ gst_caps_append (tmp, tmp2);
intersection = gst_caps_intersect (tmp, ret);
gst_caps_unref (tmp);
@@ -729,118 +747,128 @@ gst_shape_wipe_do_qos (GstShapeWipe * self, GstClockTime timestamp)
return TRUE;
}
-static GstFlowReturn
-gst_shape_wipe_blend_16 (GstShapeWipe * self, GstBuffer * inbuf,
- GstBuffer * maskbuf, GstBuffer * outbuf)
-{
- const guint16 *mask = (const guint16 *) GST_BUFFER_DATA (maskbuf);
- const guint8 *input = (const guint8 *) GST_BUFFER_DATA (inbuf);
- guint8 *output = (guint8 *) GST_BUFFER_DATA (outbuf);
- guint i, j;
- guint mask_increment = GST_ROUND_UP_2 (self->width) - self->width;
- gfloat position = self->mask_position;
- gfloat low = position - (self->mask_border / 2.0f);
- gfloat high = position + (self->mask_border / 2.0f);
-
- if (low < 0.0f) {
- high = 0.0f;
- low = 0.0f;
- }
-
- if (high > 1.0f) {
- low = 1.0f;
- high = 1.0f;
- }
-
- for (i = 0; i < self->height; i++) {
- for (j = 0; j < self->width; j++) {
- gfloat in = *mask / 65536.0f;
-
- if (in < low) {
- output[0] = 0x00; /* A */
- output[1] = 0x00; /* Y */
- output[2] = 0x80; /* U */
- output[3] = 0x80; /* V */
- } else if (in >= high) {
- output[0] = 0xff; /* A */
- output[1] = input[1]; /* Y */
- output[2] = input[2]; /* U */
- output[3] = input[3]; /* V */
- } else {
- gfloat val = 255.0f * ((in - low) / (high - low));
-
- output[0] = CLAMP (val, 0, 255); /* A */
- output[1] = input[1]; /* Y */
- output[2] = input[2]; /* U */
- output[3] = input[3]; /* V */
- }
-
- mask++;
- input += 4;
- output += 4;
- }
- mask += mask_increment;
- }
-
- return GST_FLOW_OK;
+#define CREATE_AYUV_FUNCTIONS(depth, scale) \
+static GstFlowReturn \
+gst_shape_wipe_blend_ayuv_##depth (GstShapeWipe * self, GstBuffer * inbuf, \
+ GstBuffer * maskbuf, GstBuffer * outbuf) \
+{ \
+ const guint##depth *mask = (const guint##depth *) GST_BUFFER_DATA (maskbuf); \
+ const guint8 *input = (const guint8 *) GST_BUFFER_DATA (inbuf); \
+ guint8 *output = (guint8 *) GST_BUFFER_DATA (outbuf); \
+ guint i, j; \
+ guint mask_increment = ((depth == 16) ? GST_ROUND_UP_2 (self->width) : \
+ GST_ROUND_UP_4 (self->width)) - self->width; \
+ gfloat position = self->mask_position; \
+ gfloat low = position - (self->mask_border / 2.0f); \
+ gfloat high = position + (self->mask_border / 2.0f); \
+ \
+ if (low < 0.0f) { \
+ high = 0.0f; \
+ low = 0.0f; \
+ } \
+ \
+ if (high > 1.0f) { \
+ low = 1.0f; \
+ high = 1.0f; \
+ } \
+ \
+ for (i = 0; i < self->height; i++) { \
+ for (j = 0; j < self->width; j++) { \
+ gfloat in = *mask / scale; \
+ \
+ if (in < low) { \
+ output[0] = 0x00; /* A */ \
+ output[1] = 0x00; /* Y */ \
+ output[2] = 0x80; /* U */ \
+ output[3] = 0x80; /* V */ \
+ } else if (in >= high) { \
+ output[0] = 0xff; /* A */ \
+ output[1] = input[1]; /* Y */ \
+ output[2] = input[2]; /* U */ \
+ output[3] = input[3]; /* V */ \
+ } else { \
+ gfloat val = 255.0f * ((in - low) / (high - low)); \
+ \
+ output[0] = CLAMP (val, 0, 255); /* A */ \
+ output[1] = input[1]; /* Y */ \
+ output[2] = input[2]; /* U */ \
+ output[3] = input[3]; /* V */ \
+ } \
+ \
+ mask++; \
+ input += 4; \
+ output += 4; \
+ } \
+ mask += mask_increment; \
+ } \
+ \
+ return GST_FLOW_OK; \
}
-static GstFlowReturn
-gst_shape_wipe_blend_8 (GstShapeWipe * self, GstBuffer * inbuf,
- GstBuffer * maskbuf, GstBuffer * outbuf)
-{
- const guint8 *mask = (const guint8 *) GST_BUFFER_DATA (maskbuf);
- const guint8 *input = (const guint8 *) GST_BUFFER_DATA (inbuf);
- guint8 *output = (guint8 *) GST_BUFFER_DATA (outbuf);
- guint i, j;
- guint mask_increment = GST_ROUND_UP_4 (self->width) - self->width;
- gfloat position = self->mask_position;
- gfloat low = position - (self->mask_border / 2.0f);
- gfloat high = position + (self->mask_border / 2.0f);
-
- if (low < 0.0f) {
- high = 0.0f;
- low = 0.0f;
- }
-
- if (high > 1.0f) {
- low = 1.0f;
- high = 1.0f;
- }
-
- for (i = 0; i < self->height; i++) {
- for (j = 0; j < self->width; j++) {
- gfloat in = *mask / 256.0f;
-
- if (in < low) {
- output[0] = 0x00; /* A */
- output[1] = 0x00; /* Y */
- output[2] = 0x80; /* U */
- output[3] = 0x80; /* V */
- } else if (in >= high) {
- output[0] = 0xff; /* A */
- output[1] = input[1]; /* Y */
- output[2] = input[2]; /* U */
- output[3] = input[3]; /* V */
- } else {
- gfloat val = 255.0f * ((in - low) / (high - low));
-
- output[0] = CLAMP (val, 0, 255); /* A */
- output[1] = input[1]; /* Y */
- output[2] = input[2]; /* U */
- output[3] = input[3]; /* V */
- }
-
- mask++;
- input += 4;
- output += 4;
- }
- mask += mask_increment;
- }
-
- return GST_FLOW_OK;
+CREATE_AYUV_FUNCTIONS (16, 65536.0f);
+CREATE_AYUV_FUNCTIONS (8, 256.0f);
+
+#define CREATE_ARGB_FUNCTIONS(depth, scale) \
+static GstFlowReturn \
+gst_shape_wipe_blend_argb_##depth (GstShapeWipe * self, GstBuffer * inbuf, \
+ GstBuffer * maskbuf, GstBuffer * outbuf) \
+{ \
+ const guint##depth *mask = (const guint##depth *) GST_BUFFER_DATA (maskbuf); \
+ const guint8 *input = (const guint8 *) GST_BUFFER_DATA (inbuf); \
+ guint8 *output = (guint8 *) GST_BUFFER_DATA (outbuf); \
+ guint i, j; \
+ guint mask_increment = ((depth == 16) ? GST_ROUND_UP_2 (self->width) : \
+ GST_ROUND_UP_4 (self->width)) - self->width; \
+ gfloat position = self->mask_position; \
+ gfloat low = position - (self->mask_border / 2.0f); \
+ gfloat high = position + (self->mask_border / 2.0f); \
+ \
+ if (low < 0.0f) { \
+ high = 0.0f; \
+ low = 0.0f; \
+ } \
+ \
+ if (high > 1.0f) { \
+ low = 1.0f; \
+ high = 1.0f; \
+ } \
+ \
+ for (i = 0; i < self->height; i++) { \
+ for (j = 0; j < self->width; j++) { \
+ gfloat in = *mask / scale; \
+ \
+ if (in < low) { \
+ output[0] = 0x00; /* A */ \
+ output[1] = 0x00; /* R */ \
+ output[2] = 0x00; /* G */ \
+ output[3] = 0x00; /* B */ \
+ } else if (in >= high) { \
+ output[0] = 0xff; /* A */ \
+ output[1] = input[1]; /* R */ \
+ output[2] = input[2]; /* G */ \
+ output[3] = input[3]; /* B */ \
+ } else { \
+ gfloat val = 255.0f * ((in - low) / (high - low)); \
+ \
+ output[0] = CLAMP (val, 0, 255); /* A */ \
+ output[1] = input[1]; /* R */ \
+ output[2] = input[2]; /* G */ \
+ output[3] = input[3]; /* B */ \
+ } \
+ \
+ mask++; \
+ input += 4; \
+ output += 4; \
+ } \
+ mask += mask_increment; \
+ } \
+ \
+ return GST_FLOW_OK; \
}
+CREATE_ARGB_FUNCTIONS (16, 65536.0f);
+CREATE_ARGB_FUNCTIONS (8, 256.0f);
+
static GstFlowReturn
gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer)
{
@@ -850,6 +878,9 @@ gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer)
GstClockTime timestamp;
gboolean new_outbuf = FALSE;
+ if (G_UNLIKELY (self->fmt == GST_VIDEO_FORMAT_UNKNOWN))
+ return GST_FLOW_NOT_NEGOTIATED;
+
timestamp = GST_BUFFER_TIMESTAMP (buffer);
timestamp =
gst_segment_to_stream_time (&self->segment, GST_FORMAT_TIME, timestamp);
@@ -898,10 +929,16 @@ gst_shape_wipe_video_sink_chain (GstPad * pad, GstBuffer * buffer)
outbuf = buffer;
}
- if (self->mask_bpp == 16)
- ret = gst_shape_wipe_blend_16 (self, buffer, mask, outbuf);
+ if (self->fmt == GST_VIDEO_FORMAT_AYUV && self->mask_bpp == 16)
+ ret = gst_shape_wipe_blend_ayuv_16 (self, buffer, mask, outbuf);
+ else if (self->fmt == GST_VIDEO_FORMAT_AYUV)
+ ret = gst_shape_wipe_blend_ayuv_8 (self, buffer, mask, outbuf);
+ else if (self->fmt == GST_VIDEO_FORMAT_ARGB && self->mask_bpp == 16)
+ ret = gst_shape_wipe_blend_argb_16 (self, buffer, mask, outbuf);
+ else if (self->fmt == GST_VIDEO_FORMAT_ARGB)
+ ret = gst_shape_wipe_blend_argb_8 (self, buffer, mask, outbuf);
else
- ret = gst_shape_wipe_blend_8 (self, buffer, mask, outbuf);
+ g_assert_not_reached ();
gst_buffer_unref (mask);
if (new_outbuf)
diff --git a/gst/shapewipe/gstshapewipe.h b/gst/shapewipe/gstshapewipe.h
index 2cc44831..f6196c69 100644
--- a/gst/shapewipe/gstshapewipe.h
+++ b/gst/shapewipe/gstshapewipe.h
@@ -59,6 +59,7 @@ struct _GstShapeWipe
GCond *mask_cond;
gint mask_bpp;
+ GstVideoFormat fmt;
gint width, height;
gdouble proportion;
diff --git a/tests/check/elements/rtpbin.c b/tests/check/elements/rtpbin.c
index bc30c918..8764da54 100644
--- a/tests/check/elements/rtpbin.c
+++ b/tests/check/elements/rtpbin.c
@@ -306,6 +306,89 @@ GST_START_TEST (test_cleanup_recv)
GST_END_TEST;
+GST_START_TEST (test_cleanup_recv2)
+{
+ GstElement *rtpbin;
+ GstPad *rtp_sink;
+ CleanupData data;
+ GstStateChangeReturn ret;
+ GstFlowReturn res;
+ GstBuffer *buffer;
+ gint count = 2;
+
+ init_data (&data);
+
+ rtpbin = gst_element_factory_make ("gstrtpbin", "rtpbin");
+
+ g_signal_connect (rtpbin, "pad-added", (GCallback) pad_added_cb, &data);
+ g_signal_connect (rtpbin, "pad-removed", (GCallback) pad_removed_cb, &data);
+
+ ret = gst_element_set_state (rtpbin, GST_STATE_PLAYING);
+ fail_unless (ret == GST_STATE_CHANGE_SUCCESS);
+
+ /* request session 0 */
+ rtp_sink = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");
+ fail_unless (rtp_sink != NULL);
+ ASSERT_OBJECT_REFCOUNT (rtp_sink, "rtp_sink", 2);
+
+ while (count--) {
+ /* no sourcepads are created yet */
+ fail_unless (rtpbin->numsinkpads == 1);
+ fail_unless (rtpbin->numsrcpads == 0);
+
+ buffer = make_rtp_packet (&data);
+ res = gst_pad_chain (rtp_sink, buffer);
+ GST_DEBUG ("res %d, %s\n", res, gst_flow_get_name (res));
+ fail_unless (res == GST_FLOW_OK);
+
+ buffer = make_rtp_packet (&data);
+ res = gst_pad_chain (rtp_sink, buffer);
+ GST_DEBUG ("res %d, %s\n", res, gst_flow_get_name (res));
+ fail_unless (res == GST_FLOW_OK);
+
+ /* we wait for the new pad to appear now */
+ g_mutex_lock (data.lock);
+ while (!data.pad_added)
+ g_cond_wait (data.cond, data.lock);
+ g_mutex_unlock (data.lock);
+
+ /* sourcepad created now */
+ fail_unless (rtpbin->numsinkpads == 1);
+ fail_unless (rtpbin->numsrcpads == 1);
+
+ /* change state */
+ ret = gst_element_set_state (rtpbin, GST_STATE_NULL);
+ fail_unless (ret == GST_STATE_CHANGE_SUCCESS);
+
+ /* pad should be gone now */
+ g_mutex_lock (data.lock);
+ while (data.pad_added)
+ g_cond_wait (data.cond, data.lock);
+ g_mutex_unlock (data.lock);
+
+ /* back to playing for the next round */
+ ret = gst_element_set_state (rtpbin, GST_STATE_PLAYING);
+ fail_unless (ret == GST_STATE_CHANGE_SUCCESS);
+ }
+
+ /* remove the session */
+ gst_element_release_request_pad (rtpbin, rtp_sink);
+ gst_object_unref (rtp_sink);
+
+ /* nothing left anymore now */
+ fail_unless (rtpbin->numsinkpads == 0);
+ fail_unless (rtpbin->numsrcpads == 0);
+
+ ret = gst_element_set_state (rtpbin, GST_STATE_NULL);
+ fail_unless (ret == GST_STATE_CHANGE_SUCCESS);
+
+ gst_object_unref (rtpbin);
+
+ clean_data (&data);
+}
+
+GST_END_TEST;
+
Suite *
gstrtpbin_suite (void)
{
@@ -315,6 +398,7 @@ gstrtpbin_suite (void)
suite_add_tcase (s, tc_chain);
tcase_add_test (tc_chain, test_cleanup_send);
tcase_add_test (tc_chain, test_cleanup_recv);
+ tcase_add_test (tc_chain, test_cleanup_recv2);
return s;
}
diff --git a/tests/examples/Makefile.am b/tests/examples/Makefile.am
index 45d94378..42ffa481 100644
--- a/tests/examples/Makefile.am
+++ b/tests/examples/Makefile.am
@@ -11,4 +11,4 @@ DIRECTFB_DIR=
endif
SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) shapewipe switch
-DIST_SUBDIRS= directfb mxf scaletempo shapewipe switch
+DIST_SUBDIRS= camerabin directfb mxf scaletempo shapewipe switch
diff --git a/tests/examples/camerabin/gst-camera-perf.c b/tests/examples/camerabin/gst-camera-perf.c
index b6424181..858e60cd 100644
--- a/tests/examples/camerabin/gst-camera-perf.c
+++ b/tests/examples/camerabin/gst-camera-perf.c
@@ -105,7 +105,9 @@ static guint test_ix = 0;
static gboolean signal_sink = FALSE;
static gboolean signal_shot = FALSE;
static gboolean signal_cont = FALSE;
-//static gboolean signal_save = FALSE;
+
+static gboolean have_img_captured = FALSE;
+static gboolean have_img_done = FALSE;
/* time samples and test results */
static GstClockTime t_initial = G_GUINT64_CONSTANT (0);
@@ -129,14 +131,14 @@ static GstClockTime target[TEST_CASES] = {
static const gchar *test_names[TEST_CASES] = {
"Camera OFF to VF on",
- "(3A latency)",
+ "(3A latency)", /* time to get AF? */
"Shot to snapshot",
"Shot to shot",
"Serial shooting",
"(Shutter lag)",
"Image saved",
"Mode change",
- "(Video recording)"
+ "(Video recording)" /* time to get videobin to PLAYING? or first buffer reaching filesink? */
};
/*
@@ -199,7 +201,14 @@ img_capture_done (GstElement * camera, GString * fname, gpointer user_data)
GST_INFO ("%2d cont new filename '%s'", test_ix, filename->str);
g_object_set (camera_bin, "filename", filename->str, NULL);
// FIXME: is burst capture broken? new filename and return TRUE should be enough
- g_signal_emit_by_name (camera_bin, "user-start", NULL);
+ // as a workaround we will kick next image from here
+ // but this needs sync so that we have received "image-captured" message already
+ if (have_img_captured) {
+ have_img_captured = FALSE;
+ g_signal_emit_by_name (camera_bin, "user-start", NULL);
+ } else {
+ have_img_done = TRUE;
+ }
ret = TRUE;
} else {
GstClockTime max = 0;
@@ -276,6 +285,25 @@ bus_callback (GstBus * bus, GstMessage * message, gpointer data)
g_main_loop_quit (loop);
break;
}
+ case GST_MESSAGE_STATE_CHANGED:
+ if (GST_MESSAGE_SRC (message) == GST_OBJECT (camera_bin)) {
+ GstState oldstate, newstate;
+
+ gst_message_parse_state_changed (message, &oldstate, &newstate, NULL);
+ GST_INFO ("state-changed: %s -> %s",
+ gst_element_state_get_name (oldstate),
+ gst_element_state_get_name (newstate));
+ if (GST_STATE_TRANSITION (oldstate,
+ newstate) == GST_STATE_CHANGE_PAUSED_TO_PLAYING) {
+ GET_TIME (t_final[0]);
+ DIFF_TIME (t_final[0], t_initial, diff);
+
+ result.avg = result.min = result.max = diff;
+ print_result ();
+ g_idle_add ((GSourceFunc) run_test, NULL);
+ }
+ }
+ break;
case GST_MESSAGE_EOS:
/* end-of-stream */
g_main_loop_quit (loop);
@@ -291,6 +319,15 @@ bus_callback (GstBus * bus, GstMessage * message, gpointer data)
DIFF_TIME (t_final[num_pics_cont], t_initial, diff);
result.avg = result.min = result.max = diff;
break;
+ case 4:
+ // we need to have this received before we can take next one
+ if (have_img_done) {
+ have_img_done = FALSE;
+ g_signal_emit_by_name (camera_bin, "user-start", NULL);
+ } else {
+ have_img_captured = TRUE;
+ }
+ break;
}
} else if (gst_structure_has_name (st, "preview-image")) {
GST_INFO ("%2d preview-image", test_ix);
@@ -318,8 +355,8 @@ static void
cleanup_pipeline (void)
{
if (camera_bin) {
+ GST_INFO_OBJECT (camera_bin, "stopping and destroying");
gst_element_set_state (camera_bin, GST_STATE_NULL);
- gst_element_get_state (camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
gst_object_unref (camera_bin);
camera_bin = NULL;
}
@@ -446,6 +483,7 @@ setup_pipeline (void)
g_warning ("can't set camerabin to playing\n");
goto error;
}
+ GST_INFO_OBJECT (camera_bin, "created and started");
return TRUE;
error:
cleanup_pipeline ();
@@ -466,19 +504,25 @@ error:
static gboolean
test_01 (void)
{
+ gboolean res;
+
GET_TIME (t_initial);
if (setup_pipeline ()) {
/* MAKE SURE THE PIPELINE IS IN PLAYING STATE BEFORE START TAKING PICTURES
AND SO ON (otherwise it will deadlock) */
- gst_element_get_state (camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
- }
- GET_TIME (t_final[0]);
- DIFF_TIME (t_final[0], t_initial, diff);
+ //gst_element_get_state (camera_bin, NULL, NULL, GST_CLOCK_TIME_NONE);
+ /* the actual results are fetched in bus_callback::state-changed */
+ res = FALSE;
+ } else {
+ GET_TIME (t_final[0]);
+ DIFF_TIME (t_final[0], t_initial, diff);
- result.avg = result.min = result.max = diff;
+ result.avg = result.min = result.max = diff;
+ res = TRUE;
+ }
result.times = 1;
- return TRUE;
+ return res;
}
@@ -528,6 +572,7 @@ static gboolean
test_05 (void)
{
signal_cont = TRUE;
+ have_img_captured = have_img_done = FALSE;
GET_TIME (t_initial);
g_signal_emit_by_name (camera_bin, "user-start", 0);
@@ -545,7 +590,6 @@ test_05 (void)
static gboolean
test_07 (void)
{
- // signal_save = TRUE;
signal_shot = TRUE;
GET_TIME (t_initial);