summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorThiago Santos <thiagoss@embedded.ufcg.edu.br>2009-07-24 14:52:28 -0300
committerThiago Santos <thiagoss@embedded.ufcg.edu.br>2009-07-24 14:52:28 -0300
commit2641cd9d94433820796c613d17c9a6227c2a8f27 (patch)
tree732052db9f0013361b4720ca26c09829e07f7b43
parent30feab574a5bab8fcfd88644a2f2eb126e18d0df (diff)
downloadgst-plugins-bad-2641cd9d94433820796c613d17c9a6227c2a8f27.tar.gz
gst-plugins-bad-2641cd9d94433820796c613d17c9a6227c2a8f27.tar.bz2
gst-plugins-bad-2641cd9d94433820796c613d17c9a6227c2a8f27.zip
asfmux: Adds new plugin asfmux
Adds the brand new asfmux plugin, containing 3 elements: asfmux, rtpasfpay and asfparse. This plugin was developed as a GSoC 2009 project, with David Schleef as the mentor and Thiago Santos as the student.
-rw-r--r--configure.ac2
-rw-r--r--gst/asfmux/Makefile.am24
-rw-r--r--gst/asfmux/gstasf.c51
-rw-r--r--gst/asfmux/gstasfmux.c2259
-rw-r--r--gst/asfmux/gstasfmux.h159
-rw-r--r--gst/asfmux/gstasfobjects.c803
-rw-r--r--gst/asfmux/gstasfobjects.h190
-rw-r--r--gst/asfmux/gstasfparse.c623
-rw-r--r--gst/asfmux/gstasfparse.h88
-rw-r--r--gst/asfmux/gstrtpasfpay.c446
-rw-r--r--gst/asfmux/gstrtpasfpay.h87
11 files changed, 4732 insertions, 0 deletions
diff --git a/configure.ac b/configure.ac
index 1f180beb..7fa190ea 100644
--- a/configure.ac
+++ b/configure.ac
@@ -254,6 +254,7 @@ AG_GST_CHECK_PLUGIN(aacparse)
AG_GST_CHECK_PLUGIN(adpcmdec)
AG_GST_CHECK_PLUGIN(aiffparse)
AG_GST_CHECK_PLUGIN(amrparse)
+AG_GST_CHECK_PLUGIN(asfmux)
AG_GST_CHECK_PLUGIN(autoconvert)
AG_GST_CHECK_PLUGIN(camerabin)
AG_GST_CHECK_PLUGIN(legacyresample)
@@ -1626,6 +1627,7 @@ gst/aacparse/Makefile
gst/adpcmdec/Makefile
gst/aiffparse/Makefile
gst/amrparse/Makefile
+gst/asfmux/Makefile
gst/autoconvert/Makefile
gst/bayer/Makefile
gst/camerabin/Makefile
diff --git a/gst/asfmux/Makefile.am b/gst/asfmux/Makefile.am
new file mode 100644
index 00000000..0246102b
--- /dev/null
+++ b/gst/asfmux/Makefile.am
@@ -0,0 +1,24 @@
+# plugindir is set in configure
+
+plugin_LTLIBRARIES = libgstasfmux.la
+
+# sources used to compile this plug-in
+libgstasfmux_la_SOURCES = gstasfmux.c \
+ gstasfobjects.c \
+ gstasfparse.c \
+ gstrtpasfpay.c \
+ gstasf.c
+
+# flags used to compile this plugin
+# add other _CFLAGS and _LIBS as needed
+libgstasfmux_la_CFLAGS = $(GST_CFLAGS) $(GST_BASE_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS)
+libgstasfmux_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) $(GST_PLUGINS_BASE_LIBS) \
+ -lgstrtp-@GST_MAJORMINOR@
+libgstasfmux_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+#libgstasfmux_la_LIBTOOLFLAGS = --tag=disable-static
+
+# headers we need but don't want installed
+noinst_HEADERS = gstasfmux.h \
+ gstasfobjects.h \
+ gstasfparse.h \
+ gstrtpasfpay.h
diff --git a/gst/asfmux/gstasf.c b/gst/asfmux/gstasf.c
new file mode 100644
index 00000000..fb935172
--- /dev/null
+++ b/gst/asfmux/gstasf.c
@@ -0,0 +1,51 @@
+/* GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embeddeed.ufcg.edu.br>
+ *
+ * gstasf.c: plugin registering
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+
+#include "gstasfmux.h"
+#include "gstrtpasfpay.h"
+#include "gstasfparse.h"
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ if (!gst_asf_mux_plugin_init (plugin)) {
+ return FALSE;
+ }
+ if (!gst_rtp_asf_pay_plugin_init (plugin)) {
+ return FALSE;
+ }
+ if (!gst_asf_parse_plugin_init (plugin)) {
+ return FALSE;
+ }
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "asfmux",
+ "ASF Muxer Plugin",
+ plugin_init, VERSION, "LGPL", "gsoc2009 package", "embedded.ufcg.edu.br")
diff --git a/gst/asfmux/gstasfmux.c b/gst/asfmux/gstasfmux.c
new file mode 100644
index 00000000..9e8218df
--- /dev/null
+++ b/gst/asfmux/gstasfmux.c
@@ -0,0 +1,2259 @@
+/* ASF muxer plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/* based on:
+ * - avimux (by Ronald Bultje and Mark Nauwelaerts)
+ * - qtmux (by Thiago Santos and Mark Nauwelaerts)
+ */
+
+/**
+ * SECTION:element-asfmux
+ *
+ * Muxes window media content into an ASF file/stream.
+ *
+ * <refsect2>
+ * <title>Example launch lines</title>
+ * <para>(write everything in one line, without the backslash characters)</para>
+ * |[
+ * gst-launch videotestsrc num-buffers=250 \
+ * ! "video/x-raw-yuv,format=(fourcc)I420,framerate=(fraction)25/1" ! ffenc_wmv2 \
+ * ! asfmux name=mux ! filesink location=test.asf \
+ * audiotestsrc num-buffers=440 ! audioconvert \
+ * ! "audio/x-raw-int,rate=44100" ! ffenc_wmav2 ! mux.
+ * ]| This creates an ASF file containing an WMV video stream
+ * with a test picture and WMA audio stream of a test sound.
+ *
+ * <title>Live streaming</title>
+ * asfmux and rtpasfpay are capable of generating a live asf stream.
+ * asfmux has to set its 'is-live' property to true, because in this
+ * mode it won't try to seek back to the start of the file to replace
+ * some fields that couldn't be known at the file start. In this mode,
+ * it won't also send indexes at the end of the data packets (the actual
+ * media content)
+ * the following pipelines are an example of this usage.
+ * <para>(write everything in one line, without the backslash characters)</para>
+ * Server (sender)
+ * |[
+ * gst-launch -ve videotestsrc ! ffenc_wmv2 ! asfmux name=mux is-live=true \
+ * ! rtpasfpay ! udpsink host=127.0.0.1 port=3333 \
+ * audiotestsrc ! ffenc_wmav2 ! mux.
+ * ]|
+ * Client (receiver)
+ * |[
+ * gst-launch udpsrc port=3333 ! "caps_from_rtpasfpay_at_sender" \
+ * ! rtpasfdepay ! decodebin2 name=d ! queue \
+ * ! ffmpegcolorspace ! autovideosink \
+ * d. ! queue ! audioconvert ! autoaudiosink
+ * ]|
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include "gstasfmux.h"
+
+#define DEFAULT_SIMPLE_INDEX_TIME_INTERVAL G_GUINT64_CONSTANT (10000000)
+#define MAX_PAYLOADS_IN_A_PACKET 63
+
+GST_DEBUG_CATEGORY_STATIC (asfmux_debug);
+#define GST_CAT_DEFAULT asfmux_debug
+
+enum
+{
+ PROP_0,
+ PROP_PACKET_SIZE,
+ PROP_PREROLL,
+ PROP_MERGE_STREAM_TAGS,
+ PROP_PADDING,
+ PROP_IS_LIVE
+};
+
+/* Stores a tag list for the available/known tags
+ * in an ASF file
+ * Also stores the sizes those entries would use in a
+ * content description object and extended content
+ * description object
+ */
+typedef struct
+{
+ GstTagList *tags;
+ guint64 cont_desc_size;
+ guint64 ext_cont_desc_size;
+} GstAsfTags;
+
+/* Helper struct to be used as user data
+ * in gst_tag_foreach function for writing
+ * each tag for the metadata objects
+ *
+ * stream_num is used only for stream dependent tags
+ */
+typedef struct
+{
+ GstAsfMux *asfmux;
+ guint8 *buf;
+ guint16 count;
+ guint64 size;
+ guint16 stream_num;
+} GstAsfExtContDescData;
+
+typedef GstAsfExtContDescData GstAsfMetadataObjData;
+
+#define DEFAULT_PACKET_SIZE 4800
+#define DEFAULT_PREROLL 5000
+#define DEFAULT_MERGE_STREAM_TAGS TRUE
+#define DEFAULT_PADDING 0
+#define DEFAULT_IS_LIVE FALSE
+
+static const GstElementDetails gst_asf_mux_details =
+GST_ELEMENT_DETAILS ("ASF muxer",
+ "Codec/Muxer",
+ "Muxes audio and video into an ASF stream",
+ "Thiago Santos <thiagoss@embedded.ufcg.edu.br>");
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-ms-asf, " "parsed = (boolean) true")
+ );
+
+static GstStaticPadTemplate video_sink_factory =
+GST_STATIC_PAD_TEMPLATE ("video_%d",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("video/x-wmv, wmvversion = (int) [1,3]"));
+
+static GstStaticPadTemplate audio_sink_factory =
+ GST_STATIC_PAD_TEMPLATE ("audio_%d",
+ GST_PAD_SINK,
+ GST_PAD_REQUEST,
+ GST_STATIC_CAPS ("audio/x-wma, wmaversion = (int) [1,3]; "
+ "audio/mpeg, layer = (int) 3, mpegversion = (int) 1, "
+ "channels = (int) [1,2], rate = (int) [8000,96000]"));
+
+static void gst_asf_mux_base_init (gpointer g_class);
+static void gst_asf_mux_class_init (GstAsfMuxClass * klass);
+static void gst_asf_mux_init (GstAsfMux * asfmux);
+
+static GstPad *gst_asf_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name);
+static void gst_asf_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_asf_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static GstStateChangeReturn gst_asf_mux_change_state (GstElement * element,
+ GstStateChange transition);
+
+static gboolean gst_asf_mux_sink_event (GstPad * pad, GstEvent * event);
+
+static void gst_asf_mux_pad_reset (GstAsfPad * data);
+static GstFlowReturn gst_asf_mux_collected (GstCollectPads * collect,
+ gpointer data);
+
+static GstElementClass *parent_class = NULL;
+
+GType
+gst_asf_mux_get_type (void)
+{
+ static GType asfmux_type = 0;
+
+ if (!asfmux_type) {
+ static const GTypeInfo asfmux_info = {
+ sizeof (GstAsfMuxClass),
+ gst_asf_mux_base_init,
+ NULL,
+ (GClassInitFunc) gst_asf_mux_class_init,
+ NULL,
+ NULL,
+ sizeof (GstAsfMux),
+ 0,
+ (GInstanceInitFunc) gst_asf_mux_init,
+ };
+ static const GInterfaceInfo tag_setter_info = {
+ NULL,
+ NULL,
+ NULL
+ };
+
+ asfmux_type =
+ g_type_register_static (GST_TYPE_ELEMENT, "GstAsfMux", &asfmux_info, 0);
+ g_type_add_interface_static (asfmux_type, GST_TYPE_TAG_SETTER,
+ &tag_setter_info);
+ }
+ return asfmux_type;
+}
+
+static void
+gst_asf_mux_reset (GstAsfMux * asfmux)
+{
+ asfmux->state = GST_ASF_MUX_STATE_NONE;
+ asfmux->stream_number = 0;
+ asfmux->data_object_size = 0;
+ asfmux->data_object_position = 0;
+ asfmux->file_properties_object_position = 0;
+ asfmux->total_data_packets = 0;
+ asfmux->file_size = 0;
+ asfmux->packet_size = 0;
+
+ if (asfmux->payloads) {
+ GSList *walk;
+ for (walk = asfmux->payloads; walk; walk = g_slist_next (walk)) {
+ gst_asf_payload_free ((AsfPayload *) walk->data);
+ walk->data = NULL;
+ }
+ g_slist_free (asfmux->payloads);
+ }
+ asfmux->payloads = NULL;
+ asfmux->payload_data_size = 0;
+
+ asfmux->file_id.v1 = 0;
+ asfmux->file_id.v2 = 0;
+ asfmux->file_id.v3 = 0;
+ asfmux->file_id.v4 = 0;
+
+ gst_tag_setter_reset_tags (GST_TAG_SETTER (asfmux));
+}
+
+static void
+gst_asf_mux_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&audio_sink_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&video_sink_factory));
+
+ gst_element_class_set_details (element_class, &gst_asf_mux_details);
+
+ GST_DEBUG_CATEGORY_INIT (asfmux_debug, "asfmux", 0, "Muxer for ASF streams");
+}
+
+static void
+gst_asf_mux_finalize (GObject * object)
+{
+ GstAsfMux *asfmux;
+
+ asfmux = GST_ASF_MUX (object);
+
+ gst_asf_mux_reset (asfmux);
+ gst_object_unref (asfmux->collect);
+
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_asf_mux_class_init (GstAsfMuxClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->get_property = gst_asf_mux_get_property;
+ gobject_class->set_property = gst_asf_mux_set_property;
+ gobject_class->finalize = gst_asf_mux_finalize;
+
+ g_object_class_install_property (gobject_class, PROP_PACKET_SIZE,
+ g_param_spec_uint ("packet-size", "Packet size",
+ "The ASF packets size (bytes)",
+ ASF_MULTIPLE_PAYLOAD_HEADER_SIZE + 1, G_MAXUINT32,
+ DEFAULT_PACKET_SIZE, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+ g_object_class_install_property (gobject_class, PROP_PREROLL,
+ g_param_spec_uint64 ("preroll", "Preroll",
+ "The preroll time (milisecs)",
+ 0, G_MAXUINT64,
+ DEFAULT_PREROLL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+ g_object_class_install_property (gobject_class, PROP_MERGE_STREAM_TAGS,
+ g_param_spec_boolean ("merge-stream-tags", "Merge Stream Tags",
+ "If the stream metadata (received as events in the sink) should be "
+ "merged to the main file metadata.",
+ DEFAULT_MERGE_STREAM_TAGS, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+ g_object_class_install_property (gobject_class, PROP_PADDING,
+ g_param_spec_uint64 ("padding", "Padding",
+ "Size of the padding object to be added to the end of the header. "
+ "If this less than 24 (the smaller size of an ASF object), "
+ "no padding is added.",
+ 0, G_MAXUINT64,
+ DEFAULT_PADDING, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+ g_object_class_install_property (gobject_class, PROP_IS_LIVE,
+ g_param_spec_boolean ("is-live", "Is Live",
+ "If this stream should be threated as a live, meaning that it "
+ "doesn't need indexes nor late update of headers.",
+ DEFAULT_IS_LIVE, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+
+ gstelement_class->request_new_pad =
+ GST_DEBUG_FUNCPTR (gst_asf_mux_request_new_pad);
+ gstelement_class->change_state = GST_DEBUG_FUNCPTR (gst_asf_mux_change_state);
+}
+
+static void
+gst_asf_mux_init (GstAsfMux * asfmux)
+{
+ GstCaps *caps;
+
+ asfmux->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ caps = gst_caps_copy (gst_pad_get_pad_template_caps (asfmux->srcpad));
+ gst_pad_set_caps (asfmux->srcpad, caps);
+ gst_caps_unref (caps);
+ gst_pad_use_fixed_caps (asfmux->srcpad);
+ gst_element_add_pad (GST_ELEMENT (asfmux), asfmux->srcpad);
+
+ asfmux->collect = gst_collect_pads_new ();
+ gst_collect_pads_set_function (asfmux->collect,
+ (GstCollectPadsFunction) GST_DEBUG_FUNCPTR (gst_asf_mux_collected),
+ asfmux);
+
+ asfmux->payloads = NULL;
+ asfmux->prop_packet_size = DEFAULT_PACKET_SIZE;
+ asfmux->prop_preroll = DEFAULT_PREROLL;
+ asfmux->prop_merge_stream_tags = DEFAULT_MERGE_STREAM_TAGS;
+ asfmux->prop_padding = DEFAULT_PADDING;
+ asfmux->prop_is_live = DEFAULT_IS_LIVE;
+ gst_asf_mux_reset (asfmux);
+}
+
+static gboolean
+gst_asf_mux_sink_event (GstPad * pad, GstEvent * event)
+{
+ gboolean ret;
+ GstAsfMux *asfmux;
+ GstAsfPad *asfpad = (GstAsfPad *) gst_pad_get_element_private (pad);
+
+ asfmux = GST_ASF_MUX_CAST (gst_pad_get_parent (pad));
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_TAG:{
+ GST_DEBUG_OBJECT (asfmux, "received tag event");
+ /* we discard tag events that come after we started
+ * writing the headers, because tags are to be in
+ * the headers
+ */
+ if (asfmux->state == GST_ASF_MUX_STATE_STARTED) {
+ GstTagList *list = NULL;
+ gst_event_parse_tag (event, &list);
+ if (asfmux->merge_stream_tags) {
+ GstTagSetter *setter = GST_TAG_SETTER (asfmux);
+ const GstTagMergeMode mode =
+ gst_tag_setter_get_tag_merge_mode (setter);
+ gst_tag_setter_merge_tags (setter, list, mode);
+ } else {
+ if (asfpad->taglist == NULL) {
+ asfpad->taglist = gst_tag_list_new ();
+ }
+ gst_tag_list_insert (asfpad->taglist, list, GST_TAG_MERGE_REPLACE);
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ ret = asfmux->collect_event (pad, event);
+ gst_object_unref (asfmux);
+ return ret;
+}
+
+/**
+ * gst_asf_mux_push_buffer:
+ * @asfmux: #GstAsfMux that should push the buffer
+ * @buf: #GstBuffer to be pushed
+ *
+ * Pushes a buffer downstream and adds its size to the total file size
+ *
+ * Returns: the result of #gst_pad_push on the buffer
+ */
+static GstFlowReturn
+gst_asf_mux_push_buffer (GstAsfMux * asfmux, GstBuffer * buf)
+{
+ GstFlowReturn ret;
+ gst_buffer_set_caps (buf, GST_PAD_CAPS (asfmux->srcpad));
+ ret = gst_pad_push (asfmux->srcpad, buf);
+ if (ret == GST_FLOW_OK)
+ asfmux->file_size += GST_BUFFER_SIZE (buf);
+ return ret;
+}
+
+/**
+ * content_description_calc_size_for_tag:
+ * @taglist: the #GstTagList that contains the tag
+ * @tag: the tag's name
+ * @user_data: a #GstAsfTags struct for putting the results
+ *
+ * Function that has the #GstTagForEach signature and
+ * is used to calculate the size in bytes for each tag
+ * that can be contained in asf's content description object
+ * and extended content description object. This size is added
+ * to the total size for each of that objects in the #GstAsfTags
+ * struct passed in the user_data pointer.
+ */
+static void
+content_description_calc_size_for_tag (const GstTagList * taglist,
+ const gchar * tag, gpointer user_data)
+{
+ const gchar *asftag = gst_asf_get_asf_tag (tag);
+ GValue value = { 0 };
+ guint type;
+ GstAsfTags *asftags = (GstAsfTags *) user_data;
+ guint content_size;
+
+ if (asftag == NULL)
+ return;
+
+ if (!gst_tag_list_copy_value (&value, taglist, tag)) {
+ return;
+ }
+ type = gst_asf_get_tag_field_type (&value);
+ switch (type) {
+ case ASF_TAG_TYPE_UNICODE_STR:
+ {
+ const gchar *text;
+
+ text = g_value_get_string (&value);
+ /* +1 -> because of the \0 at the end
+ * 2* -> because we have uft8, and asf demands utf16
+ */
+ content_size = 2 * (1 + g_utf8_strlen (text, -1));
+
+ if (gst_asf_tag_present_in_content_description (tag)) {
+ asftags->cont_desc_size += content_size;
+ }
+ }
+ break;
+ case ASF_TAG_TYPE_DWORD:
+ content_size = 4;
+ break;
+ default:
+ GST_WARNING ("Unhandled asf tag field type %u for tag %s", type, tag);
+ g_value_reset (&value);
+ return;
+ }
+ if (asftag) {
+ /* size of the tag content in utf16 +
+ * size of the tag name +
+ * 3 uint16 (size of the tag name string,
+ * size of the tag content string and
+ * type of content
+ */
+ asftags->ext_cont_desc_size += content_size +
+ (g_utf8_strlen (asftag, -1) + 1) * 2 + 6;
+ }
+ gst_tag_list_add_value (asftags->tags, GST_TAG_MERGE_REPLACE, tag, &value);
+ g_value_reset (&value);
+}
+
+/* FIXME
+ * it is awful to keep track of the size here
+ * and get the same tags in the writing function */
+/**
+ * gst_asf_mux_get_content_description_tags:
+ * @asfmux: #GstAsfMux to have its tags proccessed
+ * @asftags: #GstAsfTags to hold the results
+ *
+ * Inspects the tags received by the GstTagSetter interface
+ * or possibly by sink tag events and calculates the total
+ * size needed for the default and extended content description objects.
+ * This results and a copy of the #GstTagList
+ * are stored in the #GstAsfTags. We store a copy so that
+ * the sizes estimated here mantain the same until they are
+ * written to the asf file.
+ */
+static void
+gst_asf_mux_get_content_description_tags (GstAsfMux * asfmux,
+ GstAsfTags * asftags)
+{
+ const GstTagList *tags;
+ GstTagList *taglist = NULL;
+
+ tags = gst_tag_setter_get_tag_list (GST_TAG_SETTER (asfmux));
+ if (tags && !gst_tag_list_is_empty (tags)) {
+ if (asftags->tags != NULL) {
+ gst_tag_list_free (asftags->tags);
+ }
+ asftags->tags = gst_tag_list_new ();
+ asftags->cont_desc_size = 0;
+ asftags->ext_cont_desc_size = 0;
+ taglist = asftags->tags;
+
+ GST_DEBUG_OBJECT (asfmux, "Processing tags");
+ gst_tag_list_foreach (tags, content_description_calc_size_for_tag, asftags);
+ } else {
+ GST_DEBUG_OBJECT (asfmux, "No tags received");
+ }
+
+ if (asftags->cont_desc_size > 0) {
+ asftags->cont_desc_size += ASF_CONTENT_DESCRIPTION_OBJECT_SIZE;
+ }
+ if (asftags->ext_cont_desc_size > 0) {
+ asftags->ext_cont_desc_size += ASF_EXT_CONTENT_DESCRIPTION_OBJECT_SIZE;
+ }
+}
+
+/**
+ * add_metadata_tag_size:
+ * @taglist: #GstTagList
+ * @tag: tag name
+ * @user_data: pointer to a guint to store the result
+ *
+ * GstTagForeachFunc implementation that accounts the size of
+ * each tag in the taglist and adds them to the guint pointed
+ * by the user_data
+ */
+static void
+add_metadata_tag_size (const GstTagList * taglist, const gchar * tag,
+ gpointer user_data)
+{
+ const gchar *asftag = gst_asf_get_asf_tag (tag);
+ GValue value = { 0 };
+ guint type;
+ guint content_size;
+ guint *total_size = (guint *) user_data;
+
+ if (asftag == NULL)
+ return;
+
+ if (!gst_tag_list_copy_value (&value, taglist, tag)) {
+ return;
+ }
+ type = gst_asf_get_tag_field_type (&value);
+ switch (type) {
+ case ASF_TAG_TYPE_UNICODE_STR:
+ {
+ const gchar *text;
+
+ text = g_value_get_string (&value);
+ /* +1 -> because of the \0 at the end
+ * 2* -> because we have uft8, and asf demands utf16
+ */
+ content_size = 2 * (1 + g_utf8_strlen (text, -1));
+ }
+ break;
+ case ASF_TAG_TYPE_DWORD:
+ content_size = 4;
+ break;
+ default:
+ GST_WARNING ("Unhandled asf tag field type %u for tag %s", type, tag);
+ g_value_reset (&value);
+ return;
+ }
+ /* size of reserved (2) +
+ * size of stream number (2) +
+ * size of the tag content in utf16 +
+ * size of the tag name +
+ * 2 uint16 (size of the tag name string and type of content) +
+ * 1 uint32 (size of the data)
+ */
+ *total_size +=
+ 4 + content_size + (g_utf8_strlen (asftag, -1) + 1) * 2 + 4 + 4;
+ g_value_reset (&value);
+}
+
+/**
+ * gst_asf_mux_get_metadata_object_size:
+ * @asfmux: #GstAsfMux
+ * @asfpad: pad for which the metadata object size should be calculated
+ *
+ * Calculates the size of the metadata object for the tags of the stream
+ * handled by the asfpad in the parameter
+ *
+ * Returns: The size calculated
+ */
+static guint
+gst_asf_mux_get_metadata_object_size (GstAsfMux * asfmux, GstAsfPad * asfpad)
+{
+ guint size = ASF_METADATA_OBJECT_SIZE;
+ if (asfpad->taglist == NULL || gst_tag_list_is_empty (asfpad->taglist))
+ return 0;
+
+ gst_tag_list_foreach (asfpad->taglist, add_metadata_tag_size, &size);
+ return size;
+}
+
+/**
+ * gst_asf_mux_get_headers_size:
+ * @asfmux: #GstAsfMux
+ *
+ * Calculates the size of the headers of the asf stream
+ * to be generated by this #GstAsfMux.
+ * Its used for determining the size of the buffer to allocate
+ * to exactly fit the headers in.
+ * Padding and metadata objects sizes are not included.
+ *
+ * Returns: the calculated size
+ */
+static guint
+gst_asf_mux_get_headers_size (GstAsfMux * asfmux)
+{
+ GSList *walk;
+ gint stream_num = 0;
+ guint size = ASF_HEADER_OBJECT_SIZE +
+ ASF_FILE_PROPERTIES_OBJECT_SIZE + ASF_HEADER_EXTENSION_OBJECT_SIZE;
+
+ /* per stream data */
+ for (walk = asfmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstAsfPad *asfpad = (GstAsfPad *) walk->data;
+
+ if (asfpad->is_audio)
+ size += ASF_AUDIO_SPECIFIC_DATA_SIZE;
+ else
+ size += ASF_VIDEO_SPECIFIC_DATA_SIZE;
+
+ if (asfpad->codec_data)
+ size += GST_BUFFER_SIZE (asfpad->codec_data);
+
+ stream_num++;
+ }
+ size += stream_num * (ASF_STREAM_PROPERTIES_OBJECT_SIZE +
+ ASF_EXTENDED_STREAM_PROPERTIES_OBJECT_SIZE);
+
+ return size;
+}
+
+/**
+ * gst_asf_mux_write_header_object:
+ * @asfmux:
+ * @buf: pointer to the data pointer
+ * @size: size of the header object
+ * @child_objects: number of children objects inside the main header object
+ *
+ * Writes the main asf header object start. The buffer pointer
+ * is incremented to the next writing position.
+ */
+static void
+gst_asf_mux_write_header_object (GstAsfMux * asfmux, guint8 ** buf,
+ guint64 size, guint32 child_objects)
+{
+ gst_asf_put_guid (*buf, guids[ASF_HEADER_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (*buf + 16, size); /* object size */
+ GST_WRITE_UINT32_LE (*buf + 24, child_objects); /* # of child objects */
+ GST_WRITE_UINT8 (*buf + 28, 0x01); /* reserved */
+ GST_WRITE_UINT8 (*buf + 29, 0x02); /* reserved */
+ *buf += ASF_HEADER_OBJECT_SIZE;
+}
+
+/**
+ * gst_asf_mux_write_file_properties:
+ * @asfmux:
+ * @buf: pointer to the data pointer
+ *
+ * Writes the file properties object to the buffer. The buffer pointer
+ * is incremented to the next writing position.
+ */
+static void
+gst_asf_mux_write_file_properties (GstAsfMux * asfmux, guint8 ** buf)
+{
+ gst_asf_put_guid (*buf, guids[ASF_FILE_PROPERTIES_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (*buf + 16, ASF_FILE_PROPERTIES_OBJECT_SIZE); /* object size */
+ gst_asf_put_guid (*buf + 24, asfmux->file_id);
+ GST_WRITE_UINT64_LE (*buf + 40, 0); /* file size - needs update */
+ gst_asf_put_time (*buf + 48, gst_asf_get_current_time ()); /* creation time */
+ GST_WRITE_UINT64_LE (*buf + 56, 0); /* data packets - needs update */
+ GST_WRITE_UINT64_LE (*buf + 64, 0); /* play duration - needs update */
+ GST_WRITE_UINT64_LE (*buf + 72, 0); /* send duration - needs update */
+ GST_WRITE_UINT64_LE (*buf + 80, 0); /* preroll */
+ GST_WRITE_UINT32_LE (*buf + 88, 0x1); /* flags - broadcast on */
+ GST_WRITE_UINT32_LE (*buf + 92, asfmux->packet_size); /* minimum data packet size */
+ GST_WRITE_UINT32_LE (*buf + 96, asfmux->packet_size); /* maximum data packet size */
+ GST_WRITE_UINT32_LE (*buf + 100, 0); /* maximum bitrate TODO */
+
+ *buf += ASF_FILE_PROPERTIES_OBJECT_SIZE;
+}
+
+/**
+ * gst_asf_mux_write_stream_properties:
+ * @param asfmux:
+ * @param buf: pointer to the data pointer
+ * @param asfpad: Pad that handles the stream
+ *
+ * Writes the stream properties object in the buffer
+ * for the stream handled by the #GstAsfPad passed.
+ * The pointer is incremented to the next writing position
+ */
+static void
+gst_asf_mux_write_stream_properties (GstAsfMux * asfmux, guint8 ** buf,
+ GstAsfPad * asfpad)
+{
+ guint32 codec_data_length = 0;
+ guint32 media_specific_data_length = 0;
+ guint16 flags = 0;
+
+ /* codec specific data length */
+ if (asfpad->codec_data)
+ codec_data_length = GST_BUFFER_SIZE (asfpad->codec_data);
+ if (asfpad->is_audio)
+ media_specific_data_length = ASF_AUDIO_SPECIFIC_DATA_SIZE;
+ else
+ media_specific_data_length = ASF_VIDEO_SPECIFIC_DATA_SIZE;
+
+ GST_DEBUG_OBJECT (asfmux, "Stream %" G_GUINT16_FORMAT " codec data length: %"
+ G_GUINT32_FORMAT ", media specific data length: %" G_GUINT32_FORMAT,
+ (guint16) asfpad->stream_number, codec_data_length,
+ media_specific_data_length);
+
+ gst_asf_put_guid (*buf, guids[ASF_STREAM_PROPERTIES_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (*buf + 16, ASF_STREAM_PROPERTIES_OBJECT_SIZE + codec_data_length + media_specific_data_length); /* object size */
+
+ /* stream type */
+ if (asfpad->is_audio)
+ gst_asf_put_guid (*buf + 24, guids[ASF_AUDIO_MEDIA_INDEX]);
+ else
+ gst_asf_put_guid (*buf + 24, guids[ASF_VIDEO_MEDIA_INDEX]);
+ /* error correction */
+ if (asfpad->is_audio) {
+ gst_asf_put_guid (*buf + 40, guids[ASF_NO_ERROR_CORRECTION_INDEX]); /* TODO - use audio spread */
+ } else {
+ gst_asf_put_guid (*buf + 40, guids[ASF_NO_ERROR_CORRECTION_INDEX]);
+ }
+ GST_WRITE_UINT64_LE (*buf + 56, 0); /* time offset */
+
+ GST_WRITE_UINT32_LE (*buf + 64, codec_data_length + media_specific_data_length); /* type specific data length */
+ GST_WRITE_UINT32_LE (*buf + 68, 0); /* error correction data length */
+
+ flags = (asfpad->stream_number & 0x7F);
+ GST_WRITE_UINT16_LE (*buf + 72, flags);
+ GST_WRITE_UINT32_LE (*buf + 74, 0); /* reserved */
+
+ *buf += ASF_STREAM_PROPERTIES_OBJECT_SIZE;
+ /* audio specific data */
+ if (asfpad->is_audio) {
+ GstAsfAudioPad *audiopad = (GstAsfAudioPad *) asfpad;
+ GST_WRITE_UINT16_LE (*buf, audiopad->audioinfo.format);
+ GST_WRITE_UINT16_LE (*buf + 2, audiopad->audioinfo.channels);
+ GST_WRITE_UINT32_LE (*buf + 4, audiopad->audioinfo.rate);
+ GST_WRITE_UINT32_LE (*buf + 8, audiopad->audioinfo.av_bps);
+ GST_WRITE_UINT16_LE (*buf + 12, audiopad->audioinfo.blockalign);
+ GST_WRITE_UINT16_LE (*buf + 14, audiopad->audioinfo.size);
+ GST_WRITE_UINT16_LE (*buf + 16, codec_data_length);
+
+ GST_DEBUG_OBJECT (asfmux,
+ "wave formatex values: codec_id=%" G_GUINT16_FORMAT ", channels=%"
+ G_GUINT16_FORMAT ", rate=%" G_GUINT32_FORMAT ", bytes_per_sec=%"
+ G_GUINT32_FORMAT ", block_alignment=%" G_GUINT16_FORMAT
+ ", bits_per_sample=%" G_GUINT16_FORMAT ", codec_data_length=%"
+ G_GUINT16_FORMAT, audiopad->audioinfo.format,
+ audiopad->audioinfo.channels, audiopad->audioinfo.rate,
+ audiopad->audioinfo.av_bps, audiopad->audioinfo.blockalign,
+ audiopad->audioinfo.size, codec_data_length);
+
+
+ *buf += ASF_AUDIO_SPECIFIC_DATA_SIZE;
+ } else {
+ GstAsfVideoPad *videopad = (GstAsfVideoPad *) asfpad;
+ GST_WRITE_UINT32_LE (*buf, (guint32) videopad->vidinfo.width);
+ GST_WRITE_UINT32_LE (*buf + 4, (guint32) videopad->vidinfo.height);
+ GST_WRITE_UINT8 (*buf + 8, 2);
+
+ /* the BITMAPINFOHEADER size + codec_data size */
+ GST_WRITE_UINT16_LE (*buf + 9,
+ ASF_VIDEO_SPECIFIC_DATA_SIZE + codec_data_length - 11);
+
+ /* BITMAPINFOHEADER */
+ GST_WRITE_UINT32_LE (*buf + 11,
+ ASF_VIDEO_SPECIFIC_DATA_SIZE + codec_data_length - 11);
+ gst_asf_put_i32 (*buf + 15, videopad->vidinfo.width);
+ gst_asf_put_i32 (*buf + 19, videopad->vidinfo.height);
+ GST_WRITE_UINT16_LE (*buf + 23, 1); /* reserved */
+ GST_WRITE_UINT16_LE (*buf + 25, videopad->vidinfo.bit_cnt);
+ GST_WRITE_UINT32_LE (*buf + 27, videopad->vidinfo.compression);
+ GST_WRITE_UINT32_LE (*buf + 31, videopad->vidinfo.width *
+ videopad->vidinfo.height * videopad->vidinfo.bit_cnt);
+ GST_WRITE_UINT32_LE (*buf + 35, videopad->vidinfo.xpels_meter);
+ GST_WRITE_UINT32_LE (*buf + 39, videopad->vidinfo.ypels_meter);
+ GST_WRITE_UINT32_LE (*buf + 41, videopad->vidinfo.num_colors);
+ GST_WRITE_UINT32_LE (*buf + 45, videopad->vidinfo.num_colors);
+
+ *buf += ASF_VIDEO_SPECIFIC_DATA_SIZE;
+ }
+
+ if (codec_data_length > 0)
+ memcpy (*buf, GST_BUFFER_DATA (asfpad->codec_data), codec_data_length);
+
+ *buf += codec_data_length;
+ /* TODO - error correction for audio */
+}
+
+/**
+ * gst_asf_mux_write_header_extension:
+ * @asfmux:
+ * @buf: pointer to the buffer pointer
+ * @extension_size: size of the extensions
+ *
+ * Writes the header of the header extension object. The buffer pointer
+ * is incremented to the next writing position (the header extension object
+ * childs should be writen from that point)
+ */
+static void
+gst_asf_mux_write_header_extension (GstAsfMux * asfmux, guint8 ** buf,
+ guint64 extension_size)
+{
+ gst_asf_put_guid (*buf, guids[ASF_HEADER_EXTENSION_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (*buf + 16, ASF_HEADER_EXTENSION_OBJECT_SIZE + extension_size); /* object size */
+ gst_asf_put_guid (*buf + 24, guids[ASF_RESERVED_1_INDEX]); /* reserved */
+ GST_WRITE_UINT16_LE (*buf + 40, 6); /* reserved */
+ GST_WRITE_UINT32_LE (*buf + 42, extension_size); /* header extension data size */
+ *buf += ASF_HEADER_EXTENSION_OBJECT_SIZE;
+}
+
+/**
+ * gst_asf_mux_write_extended_stream_properties:
+ * @asfmux:
+ * @buf: pointer to the buffer pointer
+ * @asfpad: Pad that handles the stream of the properties to be writen
+ *
+ * Writes the extended stream properties object (that is part of the
+ * header extension objects) for the stream handled by asfpad
+ */
+static void
+gst_asf_mux_write_extended_stream_properties (GstAsfMux * asfmux, guint8 ** buf,
+ GstAsfPad * asfpad)
+{
+ gst_asf_put_guid (*buf, guids[ASF_EXTENDED_STREAM_PROPERTIES_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (*buf + 16, ASF_EXTENDED_STREAM_PROPERTIES_OBJECT_SIZE);
+ GST_WRITE_UINT64_LE (*buf + 24, 0); /* start time */
+ GST_WRITE_UINT64_LE (*buf + 32, 0); /* end time */
+ GST_WRITE_UINT32_LE (*buf + 40, asfpad->bitrate); /* bitrate */
+ GST_WRITE_UINT32_LE (*buf + 44, 0); /* buffer size */
+ GST_WRITE_UINT32_LE (*buf + 48, 0); /* initial buffer fullness */
+ GST_WRITE_UINT32_LE (*buf + 52, asfpad->bitrate); /* alternate data bitrate */
+ GST_WRITE_UINT32_LE (*buf + 56, 0); /* alternate buffer size */
+ GST_WRITE_UINT32_LE (*buf + 60, 0); /* alternate initial buffer fullness */
+ GST_WRITE_UINT32_LE (*buf + 64, 0); /* maximum object size */
+
+ /* flags */
+ GST_WRITE_UINT32_LE (*buf + 68, 0x0); /* TODO check if seekable */
+
+ GST_WRITE_UINT16_LE (*buf + 72, asfpad->stream_number);
+ GST_WRITE_UINT16_LE (*buf + 74, 0); /* language index */
+ GST_WRITE_UINT64_LE (*buf + 76, 0); /* avg time per frame */
+ GST_WRITE_UINT16_LE (*buf + 84, 0); /* stream name count */
+ GST_WRITE_UINT16_LE (*buf + 86, 0); /* payload extension count */
+
+ *buf += ASF_EXTENDED_STREAM_PROPERTIES_OBJECT_SIZE;
+}
+
+/**
+ * gst_asf_mux_write_string_with_size:
+ * @asfmux:
+ * @size_buf: pointer to the memory position to write the size of the string
+ * @str_buf: pointer to the memory position to write the string
+ * @str: the string to be writen (in UTF-8)
+ * @use32: if the string size should be writen with 32 bits (if true)
+ * or with 16 (if false)
+ *
+ * Writes a string with its size as it is needed in many asf objects.
+ * The size is writen to size_buf as a WORD field if use32 is false, and
+ * as a DWORD if use32 is true. The string is writen to str_buf in UTF16-LE.
+ * The string should be passed in UTF-8.
+ *
+ * The string size in UTF16-LE is returned.
+ */
+static guint64
+gst_asf_mux_write_string_with_size (GstAsfMux * asfmux,
+ guint8 * size_buf, guint8 * str_buf, const gchar * str, gboolean use32)
+{
+ GError *error = NULL;
+ gsize str_size = 0;
+ gchar *str_utf16 = NULL;
+
+ GST_LOG_OBJECT (asfmux, "Writing extended content description string: "
+ "%s", str);
+
+ /*
+ * Covert the string to utf16
+ * Also force the last bytes to null terminated,
+ * tags were with extra weird characters without it.
+ */
+ str_utf16 = g_convert (str, -1, "UTF-16LE", "UTF-8", NULL, &str_size, &error);
+ str_utf16[str_size + 1] = '\0';
+ str_utf16[str_size + 2] = '\0';
+
+ /* sum up the null terminating char */
+ str_size += 2;
+
+ if (use32)
+ GST_WRITE_UINT32_LE (size_buf, str_size);
+ else
+ GST_WRITE_UINT16_LE (size_buf, str_size);
+ if (error) {
+ GST_WARNING_OBJECT (asfmux, "Error converting string "
+ "to UTF-16: %s - %s", str, error->message);
+ g_free (error);
+ memset (str_buf, 0, str_size);
+ } else {
+ memcpy (str_buf, str_utf16, str_size);
+ }
+ g_free (str_utf16);
+ return str_size;
+}
+
+/**
+ * gst_asf_mux_write_content_description_entry:
+ * @asfmux:
+ * @tags:
+ * @tagname:
+ * @size_buf:
+ * @data_buf:
+ *
+ * Checks if a string tag with tagname exists in the taglist. If it
+ * exists it is writen as an UTF-16LE to data_buf and its size in bytes
+ * is writen to size_buf. It is used for writing content description
+ * object fields.
+ *
+ * Returns: the size of the string
+ */
+static guint16
+gst_asf_mux_write_content_description_entry (GstAsfMux * asfmux,
+ const GstTagList * tags, const gchar * tagname,
+ guint8 * size_buf, guint8 * data_buf)
+{
+ gchar *text = NULL;
+ guint16 text_size = 0;
+ if (gst_tag_list_get_string (tags, tagname, &text)) {
+ text_size = gst_asf_mux_write_string_with_size (asfmux, size_buf,
+ data_buf, text, FALSE);
+ g_free (text);
+ } else {
+ GST_WRITE_UINT16_LE (size_buf, 0);
+ }
+ return text_size;
+}
+
+static guint64
+gst_asf_mux_write_ext_content_description_dword_entry (GstAsfMux * asfmux,
+ guint8 * buf, const gchar * asf_tag, const guint32 value)
+{
+ guint64 tag_size;
+ GST_DEBUG_OBJECT (asfmux, "Writing extended content description tag: "
+ "%s (%u)", asf_tag, value);
+
+ tag_size = gst_asf_mux_write_string_with_size (asfmux, buf, buf + 2,
+ asf_tag, FALSE);
+ buf += tag_size + 2;
+ GST_WRITE_UINT16_LE (buf, ASF_TAG_TYPE_DWORD);
+ GST_WRITE_UINT16_LE (buf + 2, 4);
+ GST_WRITE_UINT32_LE (buf + 4, value);
+
+ /* tagsize -> string size
+ * 2 -> string size field size
+ * 4 -> dword entry
+ * 4 -> type of entry + entry size
+ */
+ return tag_size + 2 + 4 + 4;
+}
+
+static guint64
+gst_asf_mux_write_ext_content_description_string_entry (GstAsfMux * asfmux,
+ guint8 * buf, const gchar * asf_tag, const gchar * text)
+{
+ guint64 tag_size = 0;
+ guint64 text_size = 0;
+
+ GST_DEBUG_OBJECT (asfmux, "Writing extended content description tag: "
+ "%s (%s)", asf_tag, text);
+
+ tag_size = gst_asf_mux_write_string_with_size (asfmux,
+ buf, buf + 2, asf_tag, FALSE);
+ GST_WRITE_UINT16_LE (buf + tag_size + 2, ASF_TAG_TYPE_UNICODE_STR);
+ buf += tag_size + 2 + 2;
+ text_size = gst_asf_mux_write_string_with_size (asfmux,
+ buf, buf + 2, text, FALSE);
+
+ /* the size of the strings in utf16-le plus the 3 WORD fields */
+ return tag_size + text_size + 6;
+}
+
+static void
+gst_asf_mux_write_content_description (GstAsfMux * asfmux, guint8 ** buf,
+ const GstTagList * tags)
+{
+ guint8 *values = (*buf) + ASF_CONTENT_DESCRIPTION_OBJECT_SIZE;
+ guint64 size = 0;
+
+ GST_DEBUG_OBJECT (asfmux, "Writing content description object");
+
+ gst_asf_put_guid (*buf, guids[ASF_CONTENT_DESCRIPTION_INDEX]);
+
+ values += gst_asf_mux_write_content_description_entry (asfmux, tags,
+ GST_TAG_TITLE, *buf + 24, values);
+ values += gst_asf_mux_write_content_description_entry (asfmux, tags,
+ GST_TAG_ARTIST, *buf + 26, values);
+ values += gst_asf_mux_write_content_description_entry (asfmux, tags,
+ GST_TAG_COPYRIGHT, *buf + 28, values);
+ values += gst_asf_mux_write_content_description_entry (asfmux, tags,
+ GST_TAG_DESCRIPTION, *buf + 30, values);
+
+ /* rating is currently not present in gstreamer tags, so we put 0 */
+ GST_WRITE_UINT16_LE (*buf + 32, 0);
+
+ size += values - *buf;
+ GST_WRITE_UINT64_LE (*buf + 16, size);
+ *buf += size;
+}
+
+static void
+write_ext_content_description_tag (const GstTagList * taglist,
+ const gchar * tag, gpointer user_data)
+{
+ const gchar *asftag = gst_asf_get_asf_tag (tag);
+ GValue value = { 0 };
+ guint type;
+ GstAsfExtContDescData *data = (GstAsfExtContDescData *) user_data;
+
+ if (asftag == NULL)
+ return;
+
+ if (!gst_tag_list_copy_value (&value, taglist, tag)) {
+ return;
+ }
+
+ type = gst_asf_get_tag_field_type (&value);
+ switch (type) {
+ case ASF_TAG_TYPE_UNICODE_STR:
+ {
+ const gchar *text;
+ text = g_value_get_string (&value);
+ data->size +=
+ gst_asf_mux_write_ext_content_description_string_entry (data->asfmux,
+ data->buf + data->size, asftag, text);
+ }
+ break;
+ case ASF_TAG_TYPE_DWORD:
+ {
+ guint num = g_value_get_uint (&value);
+ data->size +=
+ gst_asf_mux_write_ext_content_description_dword_entry (data->asfmux,
+ data->buf + data->size, asftag, num);
+ }
+ break;
+ default:
+ GST_WARNING_OBJECT (data->asfmux,
+ "Unhandled asf tag field type %u for tag %s", type, tag);
+ g_value_reset (&value);
+ return;
+ }
+ data->count++;
+ g_value_reset (&value);
+}
+
+static void
+gst_asf_mux_write_ext_content_description (GstAsfMux * asfmux, guint8 ** buf,
+ GstTagList * tags)
+{
+ GstAsfExtContDescData extContDesc;
+ extContDesc.asfmux = asfmux;
+ extContDesc.buf = *buf;
+ extContDesc.count = 0;
+ extContDesc.size = ASF_EXT_CONTENT_DESCRIPTION_OBJECT_SIZE;
+
+ GST_DEBUG_OBJECT (asfmux, "Writing extended content description object");
+ gst_asf_put_guid (*buf, guids[ASF_EXT_CONTENT_DESCRIPTION_INDEX]);
+
+ gst_tag_list_foreach (tags, write_ext_content_description_tag, &extContDesc);
+
+ GST_WRITE_UINT64_LE (*buf + 16, extContDesc.size);
+ GST_WRITE_UINT16_LE (*buf + 24, extContDesc.count);
+
+ *buf += extContDesc.size;
+}
+
+static void
+write_metadata_tag (const GstTagList * taglist, const gchar * tag,
+ gpointer user_data)
+{
+ const gchar *asftag = gst_asf_get_asf_tag (tag);
+ GValue value = { 0 };
+ guint type;
+ GstAsfMetadataObjData *data = (GstAsfMetadataObjData *) user_data;
+ guint16 tag_size;
+ guint32 content_size;
+
+ if (asftag == NULL)
+ return;
+
+ if (!gst_tag_list_copy_value (&value, taglist, tag)) {
+ return;
+ }
+
+ type = gst_asf_get_tag_field_type (&value);
+ switch (type) {
+ case ASF_TAG_TYPE_UNICODE_STR:
+ {
+ const gchar *text;
+ text = g_value_get_string (&value);
+ GST_WRITE_UINT16_LE (data->buf + data->size, 0);
+ GST_WRITE_UINT16_LE (data->buf + data->size + 2, data->stream_num);
+ data->size += 4;
+
+ tag_size = gst_asf_mux_write_string_with_size (data->asfmux,
+ data->buf + data->size, data->buf + data->size + 8, asftag, FALSE);
+ data->size += 2;
+
+ GST_WRITE_UINT16_LE (data->buf + data->size, type);
+ data->size += 2;
+
+ content_size = gst_asf_mux_write_string_with_size (data->asfmux,
+ data->buf + data->size, data->buf + data->size + tag_size + 4, text,
+ TRUE);
+ data->size += tag_size + content_size + 4;
+ }
+ break;
+ case ASF_TAG_TYPE_DWORD:
+ {
+ guint num = g_value_get_uint (&value);
+ GST_WRITE_UINT16_LE (data->buf + data->size, 0);
+ GST_WRITE_UINT16_LE (data->buf + data->size + 2, data->stream_num);
+ data->size += 4;
+
+ tag_size = gst_asf_mux_write_string_with_size (data->asfmux,
+ data->buf + data->size, data->buf + data->size + 8, asftag, FALSE);
+ data->size += 2;
+
+ GST_WRITE_UINT16_LE (data->buf + data->size, type);
+ data->size += 2;
+ /* dword length */
+ GST_WRITE_UINT32_LE (data->buf + data->size, 4);
+ data->size += 4 + tag_size;
+
+ GST_WRITE_UINT32_LE (data->buf + data->size, num);
+ data->size += 4;
+ }
+ break;
+ default:
+ GST_WARNING_OBJECT (data->asfmux,
+ "Unhandled asf tag field type %u for tag %s", type, tag);
+ g_value_reset (&value);
+ return;
+ }
+
+ data->count++;
+ g_value_reset (&value);
+}
+
+static void
+gst_asf_mux_write_metadata_object (GstAsfMux * asfmux, guint8 ** buf,
+ GstAsfPad * asfpad)
+{
+ GstAsfMetadataObjData metaObjData;
+ metaObjData.asfmux = asfmux;
+ metaObjData.buf = *buf;
+ metaObjData.count = 0;
+ metaObjData.size = ASF_METADATA_OBJECT_SIZE;
+ metaObjData.stream_num = asfpad->stream_number;
+
+ if (asfpad->taglist == NULL || gst_tag_list_is_empty (asfpad->taglist))
+ return;
+
+ GST_DEBUG_OBJECT (asfmux, "Writing metadata object");
+ gst_asf_put_guid (*buf, guids[ASF_METADATA_OBJECT_INDEX]);
+
+ gst_tag_list_foreach (asfpad->taglist, write_metadata_tag, &metaObjData);
+
+ GST_WRITE_UINT64_LE (*buf + 16, metaObjData.size);
+ GST_WRITE_UINT16_LE (*buf + 24, metaObjData.count);
+
+ *buf += metaObjData.size;
+}
+
+static void
+gst_asf_mux_write_padding_object (GstAsfMux * asfmux, guint8 ** buf,
+ guint64 padding)
+{
+ if (padding < ASF_PADDING_OBJECT_SIZE) {
+ return;
+ }
+
+ GST_DEBUG_OBJECT (asfmux, "Writing padding object of size %" G_GUINT64_FORMAT,
+ padding);
+ gst_asf_put_guid (*buf, guids[ASF_PADDING_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (*buf + 16, padding);
+ memset (*buf + 24, 0, padding - ASF_PADDING_OBJECT_SIZE);
+ *buf += padding;
+}
+
+static void
+gst_asf_mux_write_data_object (GstAsfMux * asfmux, guint8 ** buf)
+{
+ gst_asf_put_guid (*buf, guids[ASF_DATA_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (*buf + 16, 0); /* object size - needs updating */
+ gst_asf_put_guid (*buf + 24, asfmux->file_id);
+ GST_WRITE_UINT64_LE (*buf + 40, 0); /* total data packets */
+ GST_WRITE_UINT16_LE (*buf + 48, 0x0101); /* reserved */
+ *buf += ASF_DATA_OBJECT_SIZE;
+}
+
+/**
+ * gst_asf_mux_start_file:
+ * @asfmux: #GstAsfMux
+ *
+ * Starts the asf file/stream by creating and pushing
+ * the headers downstream.
+ */
+static GstFlowReturn
+gst_asf_mux_start_file (GstAsfMux * asfmux)
+{
+ GstBuffer *buf = NULL;
+ guint8 *bufdata = NULL;
+ GSList *walk;
+ guint stream_num = g_slist_length (asfmux->collect->data);
+ guint metadata_obj_size = 0;
+ GstAsfTags *asftags;
+ guint64 padding = asfmux->prop_padding;
+ if (padding < ASF_PADDING_OBJECT_SIZE)
+ padding = 0;
+
+ /* from this point we started writing the headers */
+ GST_INFO_OBJECT (asfmux, "Writing headers");
+ asfmux->state = GST_ASF_MUX_STATE_HEADERS;
+
+ /* let downstream know we think in BYTES and expect to do seeking later */
+ gst_pad_push_event (asfmux->srcpad,
+ gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES, 0, -1, 0));
+
+ asfmux->file_id = gst_asf_generate_file_id ();
+
+ /* Get the metadata for content description object.
+ * We store our own taglist because it might get changed from now
+ * to the time we actually add its contents to the file, changing
+ * the size of the data we already calculated here.
+ */
+ asftags = g_new0 (GstAsfTags, 1);
+ gst_asf_mux_get_content_description_tags (asfmux, asftags);
+
+ /* get the total metadata objects size */
+ for (walk = asfmux->collect->data; walk; walk = g_slist_next (walk)) {
+ metadata_obj_size += gst_asf_mux_get_metadata_object_size (asfmux,
+ (GstAsfPad *) walk->data);
+ }
+
+ /* alloc a buffer for all header objects */
+ buf = gst_buffer_new_and_alloc (gst_asf_mux_get_headers_size (asfmux) +
+ asftags->cont_desc_size +
+ asftags->ext_cont_desc_size +
+ metadata_obj_size + padding + ASF_DATA_OBJECT_SIZE);
+ bufdata = GST_BUFFER_DATA (buf);
+
+ gst_asf_mux_write_header_object (asfmux, &bufdata, GST_BUFFER_SIZE (buf) -
+ ASF_DATA_OBJECT_SIZE, 2 + stream_num);
+
+ /* get the position of the file properties object for
+ * updating it in gst_asf_mux_stop_file */
+ asfmux->file_properties_object_position = bufdata - GST_BUFFER_DATA (buf);
+ gst_asf_mux_write_file_properties (asfmux, &bufdata);
+
+ for (walk = asfmux->collect->data; walk; walk = g_slist_next (walk)) {
+ gst_asf_mux_write_stream_properties (asfmux, &bufdata,
+ (GstAsfPad *) walk->data);
+ }
+
+ if (asftags->cont_desc_size) {
+ gst_asf_mux_write_content_description (asfmux, &bufdata, asftags->tags);
+ }
+ if (asftags->ext_cont_desc_size) {
+ gst_asf_mux_write_ext_content_description (asfmux, &bufdata, asftags->tags);
+ }
+
+ if (asftags) {
+ if (asftags->tags)
+ gst_tag_list_free (asftags->tags);
+ g_free (asftags);
+ }
+
+ /* writing header extension objects */
+ gst_asf_mux_write_header_extension (asfmux, &bufdata, stream_num *
+ ASF_EXTENDED_STREAM_PROPERTIES_OBJECT_SIZE + metadata_obj_size);
+ for (walk = asfmux->collect->data; walk; walk = g_slist_next (walk)) {
+ gst_asf_mux_write_extended_stream_properties (asfmux, &bufdata,
+ (GstAsfPad *) walk->data);
+ }
+ for (walk = asfmux->collect->data; walk; walk = g_slist_next (walk)) {
+ gst_asf_mux_write_metadata_object (asfmux, &bufdata,
+ (GstAsfPad *) walk->data);
+ }
+
+ gst_asf_mux_write_padding_object (asfmux, &bufdata, padding);
+
+ /* store data object position for later updating some fields */
+ asfmux->data_object_position = bufdata - GST_BUFFER_DATA (buf);
+ gst_asf_mux_write_data_object (asfmux, &bufdata);
+
+ g_assert (bufdata - GST_BUFFER_DATA (buf) == GST_BUFFER_SIZE (buf));
+ return gst_asf_mux_push_buffer (asfmux, buf);
+}
+
+/**
+ * gst_asf_mux_add_simple_index_entry:
+ * @asfmux:
+ * @videopad:
+ *
+ * Adds a new entry to the simple index of the stream handler by videopad.
+ * This functions doesn't check if the time ellapsed
+ * is larger than the established time interval between entries. The caller
+ * is responsible for verifying this.
+ */
+static void
+gst_asf_mux_add_simple_index_entry (GstAsfMux * asfmux,
+ GstAsfVideoPad * videopad)
+{
+ SimpleIndexEntry *entry = NULL;
+ GST_DEBUG_OBJECT (asfmux, "Adding new simple index entry "
+ "packet number:%" G_GUINT32_FORMAT ", "
+ "packet count:%" G_GUINT16_FORMAT,
+ videopad->last_keyframe_packet, videopad->last_keyframe_packet_count);
+ entry = g_malloc0 (sizeof (SimpleIndexEntry));
+ entry->packet_number = videopad->last_keyframe_packet;
+ entry->packet_count = videopad->last_keyframe_packet_count;
+ if (entry->packet_count > videopad->max_keyframe_packet_count)
+ videopad->max_keyframe_packet_count = entry->packet_count;
+ videopad->simple_index = g_slist_append (videopad->simple_index, entry);
+}
+
+/**
+ * gst_asf_mux_send_packet:
+ * @asfmux:
+ * @buf: The asf data packet
+ *
+ * Pushes an asf data packet downstream. The total number
+ * of packets and bytes of the stream are incremented.
+ *
+ * Returns: the result of pushing the buffer downstream
+ */
+static GstFlowReturn
+gst_asf_mux_send_packet (GstAsfMux * asfmux, GstBuffer * buf)
+{
+ g_assert (GST_BUFFER_SIZE (buf) == asfmux->packet_size);
+ asfmux->total_data_packets++;
+ GST_LOG_OBJECT (asfmux,
+ "Pushing a packet of size %u and timestamp %" G_GUINT64_FORMAT,
+ GST_BUFFER_SIZE (buf), GST_BUFFER_TIMESTAMP (buf));
+ return gst_asf_mux_push_buffer (asfmux, buf);
+}
+
+/**
+ * gst_asf_mux_flush_payloads:
+ * @asfmux: #GstAsfMux to flush the payloads from
+ *
+ * Fills an asf packet with asfmux queued payloads and
+ * pushes it downstream.
+ *
+ * Returns: The result of pushing the packet
+ */
+static GstFlowReturn
+gst_asf_mux_flush_payloads (GstAsfMux * asfmux)
+{
+ GstBuffer *buf;
+ guint8 payloads_count = 0; /* we only use 6 bits, max is 63 */
+ guint i;
+ GstClockTime send_ts = GST_CLOCK_TIME_NONE;
+ guint64 size_left;
+ guint8 *data;
+ GSList *walk;
+ GstAsfPad *pad;
+ gboolean has_keyframe;
+ AsfPayload *payload;
+ guint32 payload_size;
+
+ if (asfmux->payloads == NULL)
+ return GST_FLOW_OK; /* nothing to send is ok */
+
+ GST_DEBUG_OBJECT (asfmux, "Flushing payloads");
+
+ buf = gst_buffer_new_and_alloc (asfmux->packet_size);
+ memset (GST_BUFFER_DATA (buf), 0, asfmux->packet_size);
+
+ /* 1 for the multiple payload flags */
+ data = GST_BUFFER_DATA (buf) + ASF_PAYLOAD_PARSING_INFO_SIZE + 1;
+ size_left = asfmux->packet_size - ASF_PAYLOAD_PARSING_INFO_SIZE - 1;
+
+ has_keyframe = FALSE;
+ walk = asfmux->payloads;
+ while (walk && payloads_count < MAX_PAYLOADS_IN_A_PACKET) {
+ payload = (AsfPayload *) walk->data;
+ pad = (GstAsfPad *) payload->pad;
+ payload_size = gst_asf_payload_get_size (payload);
+ if (size_left < payload_size) {
+ break; /* next payload doesn't fit fully */
+ }
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (send_ts))) {
+ send_ts = GST_BUFFER_TIMESTAMP (payload->data);
+ }
+
+ /* adding new simple index entry (if needed) */
+ if (!pad->is_audio
+ && GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (payload->data))) {
+ GstAsfVideoPad *videopad = (GstAsfVideoPad *) pad;
+ if (videopad->has_keyframe) {
+ for (; videopad->next_index_time <=
+ ASF_MILI_TO_100NANO (payload->presentation_time);
+ videopad->next_index_time += videopad->time_interval) {
+ gst_asf_mux_add_simple_index_entry (asfmux, videopad);
+ }
+ }
+ }
+
+ /* serialize our payload */
+ GST_DEBUG_OBJECT (asfmux, "Serializing a payload into the packet. "
+ "Stream number:%" G_GUINT16_FORMAT
+ ", media object number:%" G_GUINT16_FORMAT
+ ", offset into media object:%" G_GUINT32_FORMAT
+ ", replicated data length:%" G_GUINT16_FORMAT
+ ", media object size:%" G_GUINT32_FORMAT
+ ", presentation time:%" G_GUINT32_FORMAT
+ ", payload size:%" G_GUINT16_FORMAT,
+ payload->stream_number & 0x7F,
+ (guint16) payload->media_obj_num, payload->offset_in_media_obj,
+ (guint16) payload->replicated_data_length,
+ payload->media_object_size,
+ payload->presentation_time, (guint16) GST_BUFFER_SIZE (payload->data));
+ gst_asf_put_payload (data, payload);
+ if (!payload->has_packet_info) {
+ payload->has_packet_info = TRUE;
+ payload->packet_number = asfmux->total_data_packets;
+ }
+
+ if (ASF_PAYLOAD_IS_KEYFRAME (payload)) {
+ has_keyframe = TRUE;
+ if (!pad->is_audio) {
+ GstAsfVideoPad *videopad = (GstAsfVideoPad *) pad;
+ videopad->last_keyframe_packet = payload->packet_number;
+ videopad->last_keyframe_packet_count = payload->packet_count;
+ videopad->has_keyframe = TRUE;
+ }
+ }
+
+ /* update our variables */
+ data += payload_size;
+ size_left -= payload_size;
+ payloads_count++;
+ walk = g_slist_next (walk);
+ }
+
+ /* remove flushed payloads */
+ GST_LOG_OBJECT (asfmux, "Freeing already used payloads");
+ for (i = 0; i < payloads_count; i++) {
+ GSList *aux = g_slist_nth (asfmux->payloads, 0);
+ AsfPayload *payload;
+ g_assert (aux);
+ payload = (AsfPayload *) aux->data;
+ asfmux->payloads = g_slist_remove (asfmux->payloads, payload);
+ asfmux->payload_data_size -=
+ (GST_BUFFER_SIZE (payload->data) + ASF_MULTIPLE_PAYLOAD_HEADER_SIZE);
+ gst_asf_payload_free (payload);
+ }
+
+ /* check if we can add part of the next payload */
+ if (asfmux->payloads && size_left > ASF_MULTIPLE_PAYLOAD_HEADER_SIZE) {
+ AsfPayload *payload =
+ (AsfPayload *) g_slist_nth (asfmux->payloads, 0)->data;
+ guint16 bytes_writen;
+ GST_DEBUG_OBJECT (asfmux, "Adding part of a payload to a packet");
+
+ if (ASF_PAYLOAD_IS_KEYFRAME (payload))
+ has_keyframe = TRUE;
+
+ if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (send_ts))) {
+ send_ts = GST_BUFFER_TIMESTAMP (payload->data);
+ }
+
+ bytes_writen = gst_asf_put_subpayload (data, payload, size_left);
+ if (!payload->has_packet_info) {
+ payload->has_packet_info = TRUE;
+ payload->packet_number = asfmux->total_data_packets;
+ }
+ asfmux->payload_data_size -= bytes_writen;
+ size_left -= (bytes_writen + ASF_MULTIPLE_PAYLOAD_HEADER_SIZE);
+ payloads_count++;
+ }
+
+ GST_LOG_OBJECT (asfmux, "Payload data size: %" G_GUINT32_FORMAT,
+ asfmux->payload_data_size);
+
+ /* fill payload parsing info */
+ data = GST_BUFFER_DATA (buf);
+ /* flags */
+ GST_WRITE_UINT8 (data, (0x0 << 7) | /* no error correction */
+ (ASF_FIELD_TYPE_DWORD << 5) | /* packet length type */
+ (ASF_FIELD_TYPE_DWORD << 3) | /* padding length type */
+ (ASF_FIELD_TYPE_NONE << 1) | /* sequence type type */
+ 0x1); /* multiple payloads */
+
+ /* property flags - according to the spec, this should not change */
+ GST_WRITE_UINT8 (data + 1, (ASF_FIELD_TYPE_BYTE << 6) | /* stream number length type */
+ (ASF_FIELD_TYPE_BYTE << 4) | /* media obj number length type */
+ (ASF_FIELD_TYPE_DWORD << 2) | /* offset info media object length type */
+ (ASF_FIELD_TYPE_BYTE)); /* replicated data length type */
+
+ GST_WRITE_UINT32_LE (data + 2, asfmux->packet_size);
+ GST_WRITE_UINT32_LE (data + 6, size_left); /* padding size */
+
+ /* packet send time */
+ if (GST_CLOCK_TIME_IS_VALID (send_ts)) {
+ GST_WRITE_UINT32_LE (data + 10, (send_ts / GST_MSECOND));
+ GST_BUFFER_TIMESTAMP (buf) = send_ts;
+ }
+
+ /* packet duration */
+ GST_WRITE_UINT16_LE (data + 14, 0); /* FIXME send duration needs to be estimated */
+
+ /* multiple payloads flags */
+ GST_WRITE_UINT8 (data + 16, 0x2 << 6 | payloads_count);
+
+ if (payloads_count == 0) {
+ GST_WARNING_OBJECT (asfmux, "Sending packet without any payload");
+ }
+ asfmux->data_object_size += GST_BUFFER_SIZE (buf);
+ if (!has_keyframe)
+ GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
+ return gst_asf_mux_send_packet (asfmux, buf);
+}
+
+/**
+ * stream_number_compare:
+ * @a: a #GstAsfPad
+ * @b: another #GstAsfPad
+ *
+ * Utility function to compare #GstAsfPad by their stream numbers
+ *
+ * Returns: The difference between their stream numbers
+ */
+static gint
+stream_number_compare (gconstpointer a, gconstpointer b)
+{
+ GstAsfPad *pad_a = (GstAsfPad *) a;
+ GstAsfPad *pad_b = (GstAsfPad *) b;
+ return pad_b->stream_number - pad_a->stream_number;
+}
+
+static GstFlowReturn
+gst_asf_mux_push_simple_index (GstAsfMux * asfmux, GstAsfVideoPad * pad)
+{
+ guint64 object_size = ASF_SIMPLE_INDEX_OBJECT_SIZE +
+ g_slist_length (pad->simple_index) * ASF_SIMPLE_INDEX_ENTRY_SIZE;
+ GstBuffer *buf = gst_buffer_new_and_alloc (object_size);
+ GSList *walk;
+ guint8 *data = GST_BUFFER_DATA (buf);
+ guint32 entries_count = g_slist_length (pad->simple_index);
+
+ gst_asf_put_guid (data, guids[ASF_SIMPLE_INDEX_OBJECT_INDEX]);
+ GST_WRITE_UINT64_LE (data + 16, object_size);
+ gst_asf_put_guid (data + 24, asfmux->file_id);
+ GST_WRITE_UINT64_LE (data + 40, pad->time_interval);
+ GST_WRITE_UINT32_LE (data + 48, pad->max_keyframe_packet_count);
+ GST_WRITE_UINT32_LE (data + 52, entries_count);
+ data += ASF_SIMPLE_INDEX_OBJECT_SIZE;
+
+ GST_DEBUG_OBJECT (asfmux,
+ "Simple index object values - size:%" G_GUINT64_FORMAT ", time interval:%"
+ G_GUINT64_FORMAT ", max packet count:%" G_GUINT32_FORMAT ", entries:%"
+ G_GUINT16_FORMAT, object_size, pad->time_interval,
+ pad->max_keyframe_packet_count, entries_count);
+
+ for (walk = pad->simple_index; walk; walk = g_slist_next (walk)) {
+ SimpleIndexEntry *entry = (SimpleIndexEntry *) walk->data;
+ GST_DEBUG_OBJECT (asfmux, "Simple index entry: packet_number:%"
+ G_GUINT32_FORMAT " packet_count:%" G_GUINT16_FORMAT,
+ entry->packet_number, entry->packet_count);
+ GST_WRITE_UINT32_LE (data, entry->packet_number);
+ GST_WRITE_UINT16_LE (data + 4, entry->packet_count);
+ data += ASF_SIMPLE_INDEX_ENTRY_SIZE;
+ }
+
+ GST_DEBUG_OBJECT (asfmux, "Pushing the simple index");
+ g_assert (data - GST_BUFFER_DATA (buf) == object_size);
+ return gst_asf_mux_push_buffer (asfmux, buf);
+}
+
+static GstFlowReturn
+gst_asf_mux_write_indexes (GstAsfMux * asfmux)
+{
+ GSList *ordered_pads;
+ GSList *walker;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ /* write simple indexes for video medias */
+ ordered_pads =
+ g_slist_sort (asfmux->collect->data,
+ (GCompareFunc) stream_number_compare);
+ for (walker = ordered_pads; walker; walker = g_slist_next (walker)) {
+ GstAsfPad *pad = (GstAsfPad *) walker->data;
+ if (!pad->is_audio) {
+ ret = gst_asf_mux_push_simple_index (asfmux, (GstAsfVideoPad *) pad);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (asfmux, "Failed to write simple index for stream %"
+ G_GUINT16_FORMAT, (guint16) pad->stream_number);
+ return ret;
+ }
+ }
+ }
+ return ret;
+}
+
+/**
+ * gst_asf_mux_stop_file:
+ * @asfmux: #GstAsfMux
+ *
+ * Finalizes the asf stream by pushing the indexes after
+ * the data object. Also seeks back to the header positions
+ * to rewrite some fields such as the total number of bytes
+ * of the file, or any other that couldn't be predicted/known
+ * back on the header generation.
+ *
+ * Returns: GST_FLOW_OK on success
+ */
+static GstFlowReturn
+gst_asf_mux_stop_file (GstAsfMux * asfmux)
+{
+ GstEvent *event;
+ GstBuffer *buf;
+ GstFlowReturn ret = GST_FLOW_OK;
+ GSList *walk;
+ GstClockTime play_duration = 0;
+ guint32 bitrate = 0;
+
+ /* write indexes */
+ ret = gst_asf_mux_write_indexes (asfmux);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (asfmux, "Failed to write indexes");
+ return ret;
+ }
+
+ /* find max stream duration and bitrate */
+ for (walk = asfmux->collect->data; walk; walk = g_slist_next (walk)) {
+ GstAsfPad *pad = (GstAsfPad *) walk->data;
+ bitrate += pad->bitrate;
+ if (pad->play_duration > play_duration)
+ play_duration = pad->play_duration;
+ }
+
+ /* going back to file properties object to fill in
+ * values we didn't know back then */
+ GST_DEBUG_OBJECT (asfmux,
+ "Sending new segment to file properties object position");
+ event =
+ gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
+ asfmux->file_properties_object_position + 40, GST_CLOCK_TIME_NONE, 0);
+ if (!gst_pad_push_event (asfmux->srcpad, event)) {
+ GST_ERROR_OBJECT (asfmux, "Failed to update file properties object");
+ return GST_FLOW_ERROR;
+ }
+ /* All file properties fields except the first 40 bytes */
+ buf = gst_buffer_new_and_alloc (ASF_FILE_PROPERTIES_OBJECT_SIZE - 40);
+
+ GST_WRITE_UINT64_LE (GST_BUFFER_DATA (buf), asfmux->file_size);
+ gst_asf_put_time (GST_BUFFER_DATA (buf) + 8, gst_asf_get_current_time ());
+ GST_WRITE_UINT64_LE (GST_BUFFER_DATA (buf) + 16, asfmux->total_data_packets);
+ GST_WRITE_UINT64_LE (GST_BUFFER_DATA (buf) + 24, (play_duration / 100) +
+ ASF_MILI_TO_100NANO (asfmux->preroll));
+ GST_WRITE_UINT64_LE (GST_BUFFER_DATA (buf) + 32, (play_duration / 100)); /* TODO send duration */
+ GST_WRITE_UINT64_LE (GST_BUFFER_DATA (buf) + 40, asfmux->preroll);
+ GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buf) + 48, 0x2); /* flags - seekable */
+ GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buf) + 52, asfmux->packet_size);
+ GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buf) + 56, asfmux->packet_size);
+ /* FIXME - we want the max instantaneous bitrate, for vbr streams, we can't
+ * get it this way, this would be the average, right? */
+ GST_WRITE_UINT32_LE (GST_BUFFER_DATA (buf) + 60, bitrate); /* max bitrate */
+
+ /* we don't use gst_asf_mux_push_buffer because we are overwriting
+ * already sent data */
+ ret = gst_pad_push (asfmux->srcpad, buf);
+ if (ret != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (asfmux, "Failed to update file properties object");
+ return ret;
+ }
+
+ GST_DEBUG_OBJECT (asfmux, "Seeking back to data object");
+
+ /* seek back to the data object */
+ event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_BYTES,
+ asfmux->data_object_position + 16, GST_CLOCK_TIME_NONE, 0);
+
+ if (!gst_pad_push_event (asfmux->srcpad, event)) {
+ GST_ERROR_OBJECT (asfmux, "Seek to update data object failed");
+ return GST_FLOW_ERROR;
+ }
+
+ buf = gst_buffer_new_and_alloc (32); /* qword+guid+qword */
+ GST_WRITE_UINT64_LE (GST_BUFFER_DATA (buf), asfmux->data_object_size +
+ ASF_DATA_OBJECT_SIZE);
+ gst_asf_put_guid (GST_BUFFER_DATA (buf) + 8, asfmux->file_id);
+ GST_WRITE_UINT64_LE (GST_BUFFER_DATA (buf) + 24, asfmux->total_data_packets);
+
+ return gst_pad_push (asfmux->srcpad, buf);
+}
+
+/**
+ * gst_asf_mux_process_buffer:
+ * @asfmux:
+ * @pad: stream of the buffer
+ * @buf: The buffer to be processed
+ *
+ * Processes the buffer by parsing it and
+ * queueing it up as an asf payload for later
+ * being added and pushed inside an asf packet.
+ *
+ * Returns: a #GstFlowReturn
+ */
+static GstFlowReturn
+gst_asf_mux_process_buffer (GstAsfMux * asfmux, GstAsfPad * pad,
+ GstBuffer * buf)
+{
+ guint8 keyframe;
+ AsfPayload *payload;
+
+ payload = g_malloc0 (sizeof (AsfPayload));
+ payload->pad = (GstCollectData *) pad;
+ payload->data = buf;
+
+ GST_LOG_OBJECT (asfmux,
+ "Processing payload data for stream number %" G_GUINT16_FORMAT,
+ pad->stream_number);
+
+ /* stream number */
+ if (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
+ keyframe = 0;
+ } else {
+ keyframe = 0x1 << 7;
+ }
+ payload->stream_number = keyframe | pad->stream_number;
+
+ payload->media_obj_num = pad->media_object_number;
+ payload->offset_in_media_obj = 0;
+ payload->replicated_data_length = 8;
+
+ /* replicated data - 1) media object size */
+ payload->media_object_size = GST_BUFFER_SIZE (buf);
+ /* replicated data - 2) presentation time */
+ if (!GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buf))) {
+ GST_ERROR_OBJECT (asfmux, "Received buffer without timestamp");
+ gst_asf_payload_free (payload);
+ return GST_FLOW_ERROR;
+ }
+ payload->presentation_time = asfmux->preroll +
+ (GST_BUFFER_TIMESTAMP (buf) / GST_MSECOND);
+
+ /* update counting values */
+ pad->media_object_number = (pad->media_object_number + 1) % 256;
+ if (GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
+ pad->play_duration += GST_BUFFER_DURATION (buf);
+ } else {
+ GST_WARNING_OBJECT (asfmux, "Received buffer without duration, it will not "
+ "be accounted in the total file time");
+ }
+
+ asfmux->payloads = g_slist_append (asfmux->payloads, payload);
+ asfmux->payload_data_size +=
+ GST_BUFFER_SIZE (buf) + ASF_MULTIPLE_PAYLOAD_HEADER_SIZE;
+ GST_LOG_OBJECT (asfmux, "Payload data size: %" G_GUINT32_FORMAT,
+ asfmux->payload_data_size);
+
+ while (asfmux->payload_data_size + ASF_PAYLOAD_PARSING_INFO_SIZE >=
+ asfmux->packet_size) {
+ GstFlowReturn ret = gst_asf_mux_flush_payloads (asfmux);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_asf_mux_collected (GstCollectPads * collect, gpointer data)
+{
+ GstAsfMux *asfmux = GST_ASF_MUX_CAST (data);
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstAsfPad *best_pad = NULL;
+ GstClockTime best_time = GST_CLOCK_TIME_NONE;
+ GstBuffer *buf = NULL;
+ GSList *walk;
+
+ if (G_UNLIKELY (asfmux->state == GST_ASF_MUX_STATE_STARTED)) {
+ ret = gst_asf_mux_start_file (asfmux);
+ if (ret != GST_FLOW_OK) {
+ GST_WARNING_OBJECT (asfmux, "Failed to send headers");
+ return ret;
+ } else {
+ asfmux->state = GST_ASF_MUX_STATE_DATA;
+ }
+ }
+
+ if (G_UNLIKELY (asfmux->state == GST_ASF_MUX_STATE_EOS))
+ return GST_FLOW_UNEXPECTED;
+
+ /* select the earliest buffer */
+ walk = asfmux->collect->data;
+ while (walk) {
+ GstAsfPad *pad;
+ GstCollectData *data;
+ GstClockTime time;
+
+ data = (GstCollectData *) walk->data;
+ pad = (GstAsfPad *) data;
+
+ walk = g_slist_next (walk);
+
+ buf = gst_collect_pads_peek (collect, data);
+ if (buf == NULL) {
+ GST_LOG_OBJECT (asfmux, "Pad %s has no buffers",
+ GST_PAD_NAME (pad->collect.pad));
+ continue;
+ }
+ time = GST_BUFFER_TIMESTAMP (buf);
+ gst_buffer_unref (buf);
+
+ if (best_pad == NULL || !GST_CLOCK_TIME_IS_VALID (time) ||
+ (GST_CLOCK_TIME_IS_VALID (best_time) && time < best_time)) {
+ best_pad = pad;
+ best_time = time;
+ }
+ }
+
+ if (best_pad != NULL) {
+ /* we have data */
+ GST_LOG_OBJECT (asfmux, "selected pad %s with time %" GST_TIME_FORMAT,
+ GST_PAD_NAME (best_pad->collect.pad), GST_TIME_ARGS (best_time));
+ buf = gst_collect_pads_pop (collect, &best_pad->collect);
+ ret = gst_asf_mux_process_buffer (asfmux, best_pad, buf);
+ } else {
+ /* no data, let's finish it up */
+ while (asfmux->payloads) {
+ ret = gst_asf_mux_flush_payloads (asfmux);
+ if (ret != GST_FLOW_OK) {
+ return ret;
+ }
+ }
+ g_assert (asfmux->payloads == NULL);
+ g_assert (asfmux->payload_data_size == 0);
+ /* in 'is-live' mode we don't need to push indexes
+ * or updating headers */
+ if (!asfmux->prop_is_live) {
+ ret = gst_asf_mux_stop_file (asfmux);
+ }
+ if (ret == GST_FLOW_OK) {
+ gst_pad_push_event (asfmux->srcpad, gst_event_new_eos ());
+ ret = GST_FLOW_UNEXPECTED;
+ }
+ asfmux->state = GST_ASF_MUX_STATE_EOS;
+ }
+
+ return ret;
+}
+
+static void
+gst_asf_mux_pad_reset (GstAsfPad * pad)
+{
+ pad->stream_number = 0;
+ pad->media_object_number = 0;
+ pad->play_duration = (GstClockTime) 0;
+ pad->bitrate = 0;
+ if (pad->codec_data)
+ gst_buffer_unref (pad->codec_data);
+ pad->codec_data = NULL;
+ if (pad->taglist)
+ gst_tag_list_free (pad->taglist);
+ pad->taglist = NULL;
+
+ if (pad->is_audio) {
+ GstAsfAudioPad *audiopad = (GstAsfAudioPad *) pad;
+ audiopad->audioinfo.rate = 0;
+ audiopad->audioinfo.channels = 0;
+ audiopad->audioinfo.format = 0;
+ audiopad->audioinfo.av_bps = 0;
+ audiopad->audioinfo.blockalign = 0;
+ audiopad->audioinfo.size = 0;
+ } else {
+ GstAsfVideoPad *videopad = (GstAsfVideoPad *) pad;
+ videopad->vidinfo.size = 0;
+ videopad->vidinfo.width = 0;
+ videopad->vidinfo.height = 0;
+ videopad->vidinfo.planes = 1;
+ videopad->vidinfo.bit_cnt = 0;
+ videopad->vidinfo.compression = 0;
+ videopad->vidinfo.image_size = 0;
+ videopad->vidinfo.xpels_meter = 0;
+ videopad->vidinfo.ypels_meter = 0;
+ videopad->vidinfo.num_colors = 0;
+ videopad->vidinfo.imp_colors = 0;
+
+ videopad->last_keyframe_packet = 0;
+ videopad->has_keyframe = FALSE;
+ videopad->last_keyframe_packet_count = 0;
+ videopad->max_keyframe_packet_count = 0;
+ videopad->next_index_time = 0;
+ videopad->time_interval = DEFAULT_SIMPLE_INDEX_TIME_INTERVAL;
+ if (videopad->simple_index) {
+ GSList *walk;
+ for (walk = videopad->simple_index; walk; walk = g_slist_next (walk)) {
+ g_free (walk->data);
+ walk->data = NULL;
+ }
+ g_slist_free (videopad->simple_index);
+ }
+ videopad->simple_index = NULL;
+ }
+}
+
+static gboolean
+gst_asf_mux_audio_set_caps (GstPad * pad, GstCaps * caps)
+{
+ GstAsfMux *asfmux;
+ GstAsfAudioPad *audiopad;
+ GstStructure *structure;
+ const gchar *caps_name;
+ gint channels, rate;
+ gchar *aux;
+ const GValue *codec_data;
+
+ asfmux = GST_ASF_MUX (gst_pad_get_parent (pad));
+
+ audiopad = (GstAsfAudioPad *) gst_pad_get_element_private (pad);
+ g_assert (audiopad);
+
+ aux = gst_caps_to_string (caps);
+ GST_DEBUG_OBJECT (asfmux, "%s:%s, caps=%s", GST_DEBUG_PAD_NAME (pad), aux);
+ g_free (aux);
+
+ structure = gst_caps_get_structure (caps, 0);
+ caps_name = gst_structure_get_name (structure);
+
+ if (!gst_structure_get_int (structure, "channels", &channels) ||
+ !gst_structure_get_int (structure, "rate", &rate))
+ goto refuse_caps;
+
+ audiopad->audioinfo.channels = (guint16) channels;
+ audiopad->audioinfo.rate = (guint32) rate;
+
+ /* taken from avimux
+ * codec initialization data, if any
+ */
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ audiopad->pad.codec_data = gst_value_get_buffer (codec_data);
+ gst_buffer_ref (audiopad->pad.codec_data);
+ }
+
+ if (strcmp (caps_name, "audio/x-wma") == 0) {
+ gint version;
+ gint block_align = 0;
+ gint bitrate = 0;
+
+ if (!gst_structure_get_int (structure, "wmaversion", &version)) {
+ goto refuse_caps;
+ }
+
+ if (gst_structure_get_int (structure, "block_align", &block_align)) {
+ audiopad->audioinfo.blockalign = (guint16) block_align;
+ }
+ if (gst_structure_get_int (structure, "bitrate", &bitrate)) {
+ audiopad->pad.bitrate = (guint32) bitrate;
+ audiopad->audioinfo.av_bps = bitrate / 8;
+ }
+
+ if (version == 1) {
+ audiopad->audioinfo.format = GST_RIFF_WAVE_FORMAT_WMAV1;
+ } else if (version == 2) {
+ audiopad->audioinfo.format = GST_RIFF_WAVE_FORMAT_WMAV2;
+ } else if (version == 3) {
+ audiopad->audioinfo.format = GST_RIFF_WAVE_FORMAT_WMAV3;
+ } else {
+ goto refuse_caps;
+ }
+ } else if (strcmp (caps_name, "audio/mpeg") == 0) {
+ gint version;
+ gint layer;
+
+ if (!gst_structure_get_int (structure, "mpegversion", &version) ||
+ !gst_structure_get_int (structure, "layer", &layer)) {
+ goto refuse_caps;
+ }
+ if (version != 1 || layer != 3) {
+ goto refuse_caps;
+ }
+
+ audiopad->audioinfo.format = GST_RIFF_WAVE_FORMAT_MPEGL3;
+ } else {
+ goto refuse_caps;
+ }
+
+ gst_object_unref (asfmux);
+ return TRUE;
+
+refuse_caps:
+ GST_WARNING_OBJECT (asfmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (asfmux);
+ return FALSE;
+}
+
+/* TODO Read pixel aspect ratio */
+static gboolean
+gst_asf_mux_video_set_caps (GstPad * pad, GstCaps * caps)
+{
+ GstAsfMux *asfmux;
+ GstAsfVideoPad *videopad;
+ GstStructure *structure;
+ const gchar *caps_name;
+ gint width, height;
+ gchar *aux;
+ const GValue *codec_data;
+
+ asfmux = GST_ASF_MUX (gst_pad_get_parent (pad));
+
+ videopad = (GstAsfVideoPad *) gst_pad_get_element_private (pad);
+ g_assert (videopad);
+
+ aux = gst_caps_to_string (caps);
+ GST_DEBUG_OBJECT (asfmux, "%s:%s, caps=%s", GST_DEBUG_PAD_NAME (pad), aux);
+ g_free (aux);
+
+ structure = gst_caps_get_structure (caps, 0);
+ caps_name = gst_structure_get_name (structure);
+
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height))
+ goto refuse_caps;
+
+ videopad->vidinfo.width = (gint32) width;
+ videopad->vidinfo.height = (gint32) height;
+
+ /* taken from avimux
+ * codec initialization data, if any
+ */
+ codec_data = gst_structure_get_value (structure, "codec_data");
+ if (codec_data) {
+ videopad->pad.codec_data = gst_value_get_buffer (codec_data);
+ gst_buffer_ref (videopad->pad.codec_data);
+ }
+
+ if (strcmp (caps_name, "video/x-wmv") == 0) {
+ gint version;
+
+ if (!gst_structure_get_int (structure, "wmvversion", &version))
+ goto refuse_caps;
+
+ videopad->vidinfo.bit_cnt = 24;
+ if (version == 2) {
+ videopad->vidinfo.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '2');
+ } else if (version == 1) {
+ videopad->vidinfo.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '1');
+ } else if (version == 3) {
+ videopad->vidinfo.compression = GST_MAKE_FOURCC ('W', 'M', 'V', '3');
+ } else {
+ goto refuse_caps;
+ }
+ } else {
+ goto refuse_caps;
+ }
+
+ gst_object_unref (asfmux);
+ return TRUE;
+
+refuse_caps:
+ GST_WARNING_OBJECT (asfmux, "pad %s refused caps %" GST_PTR_FORMAT,
+ GST_PAD_NAME (pad), caps);
+ gst_object_unref (asfmux);
+ return FALSE;
+}
+
+static GstPad *
+gst_asf_mux_request_new_pad (GstElement * element,
+ GstPadTemplate * templ, const gchar * name)
+{
+ GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
+ GstAsfMux *asfmux = GST_ASF_MUX_CAST (element);
+ GstPad *newpad;
+ GstAsfPad *collect_pad;
+ gboolean is_audio;
+ guint collect_size = 0;
+
+ GST_DEBUG_OBJECT (asfmux, "Requested pad: %s", GST_STR_NULL (name));
+
+ if (asfmux->state != GST_ASF_MUX_STATE_NONE) {
+ GST_WARNING_OBJECT (asfmux, "Not providing request pad after element is at "
+ "paused/playing state.");
+ return NULL;
+ }
+
+ if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
+ is_audio = TRUE;
+ newpad = gst_pad_new_from_template (templ, name);
+ gst_pad_set_setcaps_function (newpad,
+ GST_DEBUG_FUNCPTR (gst_asf_mux_audio_set_caps));
+ } else if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
+ is_audio = FALSE;
+ newpad = gst_pad_new_from_template (templ, name);
+ gst_pad_set_setcaps_function (newpad,
+ GST_DEBUG_FUNCPTR (gst_asf_mux_video_set_caps));
+ } else {
+ GST_WARNING_OBJECT (asfmux, "This is not our template!");
+ return NULL;
+ }
+
+ /* add pad to collections */
+ if (is_audio) {
+ collect_size = sizeof (GstAsfAudioPad);
+ } else {
+ collect_size = sizeof (GstAsfVideoPad);
+ }
+ collect_pad = (GstAsfPad *)
+ gst_collect_pads_add_pad_full (asfmux->collect, newpad, collect_size,
+ (GstCollectDataDestroyNotify) (gst_asf_mux_pad_reset));
+
+ /* set up pad */
+ collect_pad->is_audio = is_audio;
+ if (!is_audio)
+ ((GstAsfVideoPad *) collect_pad)->simple_index = NULL;
+ collect_pad->taglist = NULL;
+ gst_asf_mux_pad_reset (collect_pad);
+
+ /* set pad stream number */
+ asfmux->stream_number += 1;
+ collect_pad->stream_number = asfmux->stream_number;
+
+ /* FIXME: hacked way to override/extend the event function of
+ * GstCollectPads; because it sets its own event function giving
+ * the element no access to events.
+ */
+ asfmux->collect_event = (GstPadEventFunction) GST_PAD_EVENTFUNC (newpad);
+ gst_pad_set_event_function (newpad,
+ GST_DEBUG_FUNCPTR (gst_asf_mux_sink_event));
+
+ gst_pad_set_active (newpad, TRUE);
+ gst_element_add_pad (element, newpad);
+
+ return newpad;
+}
+
+static void
+gst_asf_mux_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstAsfMux *asfmux;
+
+ asfmux = GST_ASF_MUX (object);
+ switch (prop_id) {
+ case PROP_PACKET_SIZE:
+ g_value_set_uint (value, asfmux->prop_packet_size);
+ break;
+ case PROP_PREROLL:
+ g_value_set_uint64 (value, asfmux->prop_preroll);
+ break;
+ case PROP_MERGE_STREAM_TAGS:
+ g_value_set_boolean (value, asfmux->prop_merge_stream_tags);
+ break;
+ case PROP_PADDING:
+ g_value_set_uint64 (value, asfmux->prop_padding);
+ break;
+ case PROP_IS_LIVE:
+ g_value_set_boolean (value, asfmux->prop_is_live);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_asf_mux_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstAsfMux *asfmux;
+
+ asfmux = GST_ASF_MUX (object);
+ switch (prop_id) {
+ case PROP_PACKET_SIZE:
+ asfmux->prop_packet_size = g_value_get_uint (value);
+ break;
+ case PROP_PREROLL:
+ asfmux->prop_preroll = g_value_get_uint64 (value);
+ break;
+ case PROP_MERGE_STREAM_TAGS:
+ asfmux->prop_merge_stream_tags = g_value_get_boolean (value);
+ break;
+ case PROP_PADDING:
+ asfmux->prop_padding = g_value_get_uint64 (value);
+ break;
+ case PROP_IS_LIVE:
+ asfmux->prop_is_live = g_value_get_boolean (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_asf_mux_change_state (GstElement * element, GstStateChange transition)
+{
+ GstAsfMux *asfmux;
+ GstStateChangeReturn ret;
+
+ asfmux = GST_ASF_MUX (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ /* TODO - check if it is possible to mux 2 files without going
+ * through here */
+ asfmux->packet_size = asfmux->prop_packet_size;
+ asfmux->preroll = asfmux->prop_preroll;
+ asfmux->merge_stream_tags = asfmux->prop_merge_stream_tags;
+ gst_collect_pads_start (asfmux->collect);
+ asfmux->state = GST_ASF_MUX_STATE_STARTED;
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_collect_pads_stop (asfmux->collect);
+ asfmux->state = GST_ASF_MUX_STATE_NONE;
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+done:
+ return ret;
+}
+
+gboolean
+gst_asf_mux_plugin_init (GstPlugin * plugin)
+{
+ return gst_element_register (plugin, "asfmux",
+ GST_RANK_NONE, GST_TYPE_ASF_MUX);
+}
diff --git a/gst/asfmux/gstasfmux.h b/gst/asfmux/gstasfmux.h
new file mode 100644
index 00000000..280b6753
--- /dev/null
+++ b/gst/asfmux/gstasfmux.h
@@ -0,0 +1,159 @@
+/* ASF muxer plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_ASF_MUX_H__
+#define __GST_ASF_MUX_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstcollectpads.h>
+#include <gst/riff/riff-media.h>
+
+#include "gstasfobjects.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_ASF_MUX \
+ (gst_asf_mux_get_type())
+#define GST_ASF_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ASF_MUX,GstAsfMux))
+#define GST_ASF_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ASF_MUX,GstAsfMuxClass))
+#define GST_IS_ASF_MUX(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ASF_MUX))
+#define GST_IS_ASF_MUX_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ASF_MUX))
+#define GST_ASF_MUX_CAST(obj) ((GstAsfMux*)(obj))
+typedef struct _GstAsfMux GstAsfMux;
+typedef struct _GstAsfMuxClass GstAsfMuxClass;
+typedef struct _GstAsfPad GstAsfPad;
+typedef struct _GstAsfAudioPad GstAsfAudioPad;
+typedef struct _GstAsfVideoPad GstAsfVideoPad;
+typedef enum _GstAsfMuxState GstAsfMuxState;
+
+enum _GstAsfMuxState
+{
+ GST_ASF_MUX_STATE_NONE,
+ GST_ASF_MUX_STATE_STARTED,
+ GST_ASF_MUX_STATE_HEADERS,
+ GST_ASF_MUX_STATE_DATA,
+ GST_ASF_MUX_STATE_EOS
+};
+
+struct _GstAsfPad
+{
+ GstCollectData collect;
+
+ gboolean is_audio;
+ guint8 stream_number;
+ guint8 media_object_number;
+ guint32 bitrate;
+
+ GstClockTime play_duration;
+
+ GstBuffer *codec_data;
+
+ /* stream only metadata */
+ GstTagList *taglist;
+};
+
+struct _GstAsfAudioPad
+{
+ GstAsfPad pad;
+
+ gst_riff_strf_auds audioinfo;
+};
+
+struct _GstAsfVideoPad
+{
+ GstAsfPad pad;
+
+ gst_riff_strf_vids vidinfo;
+
+ /* Simple Index Entries */
+ GSList *simple_index;
+ gboolean has_keyframe; /* if we have received one at least */
+ guint32 last_keyframe_packet;
+ guint16 last_keyframe_packet_count;
+ guint16 max_keyframe_packet_count;
+ GstClockTime next_index_time;
+ guint64 time_interval;
+};
+
+struct _GstAsfMux
+{
+ GstElement element;
+
+ /* output stream state */
+ GstAsfMuxState state;
+
+ /* counter to assign stream numbers */
+ guint8 stream_number;
+
+ /* counting variables */
+ guint64 file_size;
+ guint64 data_object_size;
+ guint64 total_data_packets;
+
+ /*
+ * data object size field position
+ * needed for updating when finishing the file
+ */
+ guint64 data_object_position;
+ guint64 file_properties_object_position;
+
+ /* payloads still to be sent in a packet */
+ guint32 payload_data_size;
+ GSList *payloads;
+
+ Guid file_id;
+
+ /* properties */
+ guint32 prop_packet_size;
+ guint64 prop_preroll;
+ gboolean prop_merge_stream_tags;
+ guint64 prop_padding;
+ gboolean prop_is_live;
+
+ /* same as properties, but those are stored here to be
+ * used without modification while muxing a single file */
+ guint32 packet_size;
+ guint64 preroll; /* milisecs */
+ gboolean merge_stream_tags;
+
+ /* pads */
+ GstPad *srcpad;
+
+ /* sinkpads, video first */
+ GSList *sinkpads;
+
+ GstCollectPads *collect;
+ GstPadEventFunction collect_event;
+};
+
+struct _GstAsfMuxClass
+{
+ GstElementClass parent_class;
+};
+
+GType gst_asf_mux_get_type (void);
+gboolean gst_asf_mux_plugin_init (GstPlugin * plugin);
+
+G_END_DECLS
+#endif /* __GST_ASF_MUX_H__ */
diff --git a/gst/asfmux/gstasfobjects.c b/gst/asfmux/gstasfobjects.c
new file mode 100644
index 00000000..27c71f5e
--- /dev/null
+++ b/gst/asfmux/gstasfobjects.c
@@ -0,0 +1,803 @@
+/* ASF muxer plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include "gstasfobjects.h"
+#include <string.h>
+
+/* Guids */
+const Guid guids[] = {
+ /* asf header object */
+ {0x75B22630, 0x668E, 0x11CF, G_GUINT64_CONSTANT (0xA6D900AA0062CE6C)},
+ /* asf file properties object */
+ {0x8CABDCA1, 0xA947, 0x11CF, G_GUINT64_CONSTANT (0x8EE400C00C205365)},
+ /* asf stream properties object */
+ {0xB7DC0791, 0xA9B7, 0x11CF, G_GUINT64_CONSTANT (0x8EE600C00C205365)},
+ /* asf audio media */
+ {0xF8699E40, 0x5B4D, 0x11CF, G_GUINT64_CONSTANT (0xA8FD00805F5C442B)},
+ /* asf no error correction */
+ {0x20FB5700, 0x5B55, 0x11CF, G_GUINT64_CONSTANT (0xA8FD00805F5C442B)},
+ /* asf audio spread */
+ {0xBFC3CD50, 0x618F, 0x11CF, G_GUINT64_CONSTANT (0x8BB200AA00B4E220)},
+ /* asf header extension object */
+ {0x5FBF03B5, 0xA92E, 0x11CF, G_GUINT64_CONSTANT (0x8EE300C00C205365)},
+ /* asf reserved 1 */
+ {0xABD3D211, 0xA9BA, 0x11CF, G_GUINT64_CONSTANT (0x8EE600C00C205365)},
+ /* asf data object */
+ {0x75B22636, 0x668E, 0x11CF, G_GUINT64_CONSTANT (0xA6D900AA0062CE6C)},
+ /* asf extended stream properties object */
+ {0x14E6A5CB, 0xC672, 0x4332, G_GUINT64_CONSTANT (0x8399A96952065B5A)},
+ /* asf video media */
+ {0xBC19EFC0, 0x5B4D, 0x11CF, G_GUINT64_CONSTANT (0xA8FD00805F5C442B)},
+ /* asf simple index object */
+ {0x33000890, 0xE5B1, 0x11CF, G_GUINT64_CONSTANT (0x89F400A0C90349CB)},
+ /* asf content description */
+ {0x75B22633, 0x668E, 0x11CF, G_GUINT64_CONSTANT (0xA6D900AA0062CE6C)},
+ /* asf extended content description */
+ {0xD2D0A440, 0xE307, 0x11D2, G_GUINT64_CONSTANT (0x97F000A0C95EA850)},
+ /* asf metadata object */
+ {0xC5F8CBEA, 0x5BAF, 0x4877, G_GUINT64_CONSTANT (0x8467AA8C44FA4CCA)},
+ /* asf padding object */
+ {0x1806D474, 0xCADF, 0x4509, G_GUINT64_CONSTANT (0xA4BA9AABCB96AAE8)}
+};
+
+/**
+ * gst_asf_generate_file_id:
+ *
+ * Generates a random GUID
+ *
+ * Returns: The generated GUID
+ */
+Guid
+gst_asf_generate_file_id ()
+{
+ guint32 aux;
+ Guid guid;
+ guid.v1 = g_random_int ();
+ aux = g_random_int ();
+ guid.v2 = (guint16) (aux & 0x0000FFFF);
+ guid.v3 = (guint16) (aux >> 16);
+ guid.v4 = (((guint64) g_random_int ()) << 32) | (guint64) g_random_int ();
+
+ return guid;
+}
+
+/**
+ * gst_byte_reader_get_asf_var_size_field:
+ * @reader: A #GstByteReader
+ * @field_type: an asf field type
+ * @var: pointer to store the result
+ *
+ * Reads the proper data from the #GstByteReader according to the
+ * asf field type and stores it in var
+ *
+ * Returns: True on success, false otherwise
+ */
+gboolean
+gst_byte_reader_get_asf_var_size_field (GstByteReader * reader,
+ guint8 field_type, guint32 * var)
+{
+ guint8 aux8 = 0;
+ guint16 aux16 = 0;
+ guint32 aux32 = 0;
+ gboolean ret;
+
+ switch (field_type) {
+ case ASF_FIELD_TYPE_DWORD:
+ ret = gst_byte_reader_get_uint32_le (reader, &aux32);
+ *var = aux32;
+ break;
+ case ASF_FIELD_TYPE_WORD:
+ ret = gst_byte_reader_get_uint16_le (reader, &aux16);
+ *var = aux16;
+ break;
+ case ASF_FIELD_TYPE_BYTE:
+ ret = gst_byte_reader_get_uint8 (reader, &aux8);
+ *var = aux8;
+ break;
+ case ASF_FIELD_TYPE_NONE:
+ ret = TRUE;
+ *var = 0;
+ break;
+ default:
+ return FALSE;
+ }
+ return ret;
+}
+
+/**
+ * gst_asf_read_var_size_field:
+ * @data: pointer to the data to be read
+ * @field_type: the asf field type pointed by data
+ *
+ * Reads and returns the value read from the data, according to the
+ * field type given
+ *
+ * Returns: The value read
+ */
+guint32
+gst_asf_read_var_size_field (guint8 * data, guint8 field_type)
+{
+ switch (field_type) {
+ case ASF_FIELD_TYPE_DWORD:
+ return GST_READ_UINT32_LE (data);
+ case ASF_FIELD_TYPE_WORD:
+ return GST_READ_UINT16_LE (data);
+ case ASF_FIELD_TYPE_BYTE:
+ return data[0];
+ default:
+ return 0;
+ }
+}
+
+/**
+ * gst_asf_get_var_size_field_len:
+ * @field_type: the asf field type
+ *
+ * Returns: the size in bytes of a variable of field_type type
+ */
+guint
+gst_asf_get_var_size_field_len (guint8 field_type)
+{
+ switch (field_type) {
+ case ASF_FIELD_TYPE_DWORD:
+ return 4;
+ case ASF_FIELD_TYPE_WORD:
+ return 2;
+ case ASF_FIELD_TYPE_BYTE:
+ return 1;
+ default:
+ return 0;
+ }
+}
+
+/**
+ * gst_asf_file_info_new:
+ * Creates a new #GstAsfFileInfo
+ * Returns: the created struct
+ */
+GstAsfFileInfo *
+gst_asf_file_info_new ()
+{
+ return g_new0 (GstAsfFileInfo, 1);
+}
+
+/**
+ * gst_asf_file_info_reset:
+ * @info: the #GstAsfFileInfo to be reset
+ * resets the data of a #GstFileInfo
+ */
+void
+gst_asf_file_info_reset (GstAsfFileInfo * info)
+{
+ info->packet_size = 0;
+ info->packets_count = 0;
+ info->broadcast = FALSE;
+}
+
+/**
+ * gst_asf_file_info_free:
+ * @info: the #GstAsfFileInfo to be freed
+ *
+ * Releases memory associated with this #GstAsfFileInfo
+ */
+void
+gst_asf_file_info_free (GstAsfFileInfo * info)
+{
+ g_free (info);
+}
+
+/**
+ * gst_asf_payload_get_size:
+ * @payload: the payload to get the size from
+ *
+ * Returns: the size of an asf payload of the data represented by this
+ * #AsfPayload
+ */
+guint32
+gst_asf_payload_get_size (AsfPayload * payload)
+{
+ return ASF_MULTIPLE_PAYLOAD_HEADER_SIZE + GST_BUFFER_SIZE (payload->data);
+}
+
+/**
+ * gst_asf_payload_free:
+ * @payload: the #AsfPayload to be freed
+ *
+ * Releases teh memory associated with this payload
+ */
+void
+gst_asf_payload_free (AsfPayload * payload)
+{
+ gst_buffer_unref (payload->data);
+ g_free (payload);
+}
+
+/**
+ * gst_asf_get_current_time:
+ *
+ * Gets system current time in ASF time unit
+ * (100-nanoseconds since Jan, 1st 1601)
+ *
+ * Returns:
+ */
+guint64
+gst_asf_get_current_time ()
+{
+ GTimeVal timeval;
+ guint64 secs;
+ guint64 usecs;
+
+ g_get_current_time (&timeval);
+
+ secs = (guint64) timeval.tv_sec;
+ usecs = (guint64) timeval.tv_usec;
+ return secs * G_GUINT64_CONSTANT (10000000) + usecs * 10
+ + G_GUINT64_CONSTANT (116444628000000000);
+}
+
+/**
+ * gst_asf_match_guid:
+ * @data: pointer to the guid to be tested
+ * @guid: guid to match against data
+ *
+ * Checks if the guid pointed by data is the same
+ * as the guid parameter
+ *
+ * Returns: True if they are the same, false otherwise
+ */
+gboolean
+gst_asf_match_guid (const guint8 * data, const Guid * guid)
+{
+ Guid g;
+ g.v1 = GST_READ_UINT32_LE (data);
+ g.v2 = GST_READ_UINT16_LE (data + 4);
+ g.v3 = GST_READ_UINT16_LE (data + 6);
+ g.v4 = GST_READ_UINT64_BE (data + 8);
+
+ return g.v1 == guid->v1 &&
+ g.v2 == guid->v2 && g.v3 == guid->v3 && g.v4 == guid->v4;
+}
+
+/**
+ * gst_asf_put_i32:
+ * @buf: the memory to write data to
+ * @data: the value to be writen
+ *
+ * Writes a 32 bit signed integer to memory
+ */
+void
+gst_asf_put_i32 (guint8 * buf, gint32 data)
+{
+ *(gint32 *) buf = data;
+}
+
+/**
+ * gst_asf_put_time:
+ * @buf: pointer to the buffer to write the value to
+ * @time: value to be writen
+ *
+ * Writes an asf time value to the buffer
+ */
+void
+gst_asf_put_time (guint8 * buf, guint64 time)
+{
+ GST_WRITE_UINT64_LE (buf, time);
+}
+
+/**
+ * gst_asf_put_guid:
+ * @buf: the buffer to write the guid to
+ * @guid: the guid to be writen
+ *
+ * Writes a GUID to the buffer
+ */
+void
+gst_asf_put_guid (guint8 * buf, Guid guid)
+{
+ guint32 *aux32 = (guint32 *) buf;
+ guint16 *aux16 = (guint16 *) & (buf[4]);
+ guint64 *aux64 = (guint64 *) & (buf[8]);
+ *aux32 = GUINT32_TO_LE (guid.v1);
+ *aux16 = GUINT16_TO_LE (guid.v2);
+ aux16 = (guint16 *) & (buf[6]);
+ *aux16 = GUINT16_TO_LE (guid.v3);
+ *aux64 = GUINT64_TO_BE (guid.v4);
+}
+
+/**
+ * gst_asf_put_payload:
+ * @buf: memory to write the payload to
+ * @payload: #AsfPayload to be writen
+ *
+ * Writes the asf payload to the buffer. The #AsfPayload
+ * packet count is incremented.
+ */
+void
+gst_asf_put_payload (guint8 * buf, AsfPayload * payload)
+{
+ GST_WRITE_UINT8 (buf, payload->stream_number);
+ GST_WRITE_UINT8 (buf + 1, payload->media_obj_num);
+ GST_WRITE_UINT32_LE (buf + 2, payload->offset_in_media_obj);
+ GST_WRITE_UINT8 (buf + 6, payload->replicated_data_length);
+ GST_WRITE_UINT32_LE (buf + 7, payload->media_object_size);
+ GST_WRITE_UINT32_LE (buf + 11, payload->presentation_time);
+ GST_WRITE_UINT16_LE (buf + 15, (guint16) GST_BUFFER_SIZE (payload->data));
+ memcpy (buf + 17, GST_BUFFER_DATA (payload->data),
+ GST_BUFFER_SIZE (payload->data));
+
+ payload->packet_count++;
+}
+
+/**
+ * gst_asf_put_subpayload:
+ * @buf: buffer to write the payload to
+ * @payload: the payload to be writen
+ * @size: maximum size in bytes to write
+ *
+ * Serializes part of a payload to a buffer.
+ * The maximum size is checked against the payload length,
+ * the minimum of this size and the payload length is writen
+ * to the buffer and the writen size is returned.
+ *
+ * It also updates the values of the payload to match the remaining
+ * data.
+ * In case there is not enough space to write the headers, nothing is done.
+ *
+ * Returns: The writen size in bytes.
+ */
+guint16
+gst_asf_put_subpayload (guint8 * buf, AsfPayload * payload, guint16 size)
+{
+ guint16 payload_size;
+ GstBuffer *newbuf;
+ if (size <= ASF_MULTIPLE_PAYLOAD_HEADER_SIZE) {
+ return 0; /* do nothing if there is not enough space */
+ }
+ GST_WRITE_UINT8 (buf, payload->stream_number);
+ GST_WRITE_UINT8 (buf + 1, payload->media_obj_num);
+ GST_WRITE_UINT32_LE (buf + 2, payload->offset_in_media_obj);
+ GST_WRITE_UINT8 (buf + 6, payload->replicated_data_length);
+ GST_WRITE_UINT32_LE (buf + 7, payload->media_object_size);
+ GST_WRITE_UINT32_LE (buf + 11, payload->presentation_time);
+ size -= ASF_MULTIPLE_PAYLOAD_HEADER_SIZE;
+ payload_size = size < GST_BUFFER_SIZE (payload->data) ?
+ size : GST_BUFFER_SIZE (payload->data);
+ GST_WRITE_UINT16_LE (buf + 15, payload_size);
+ memcpy (buf + 17, GST_BUFFER_DATA (payload->data), payload_size);
+
+ /* updates the payload to the remaining data */
+ payload->offset_in_media_obj += payload_size;
+ newbuf = gst_buffer_create_sub (payload->data, payload_size,
+ GST_BUFFER_SIZE (payload->data) - payload_size);
+ gst_buffer_copy_metadata (payload->data, newbuf, GST_BUFFER_COPY_FLAGS |
+ GST_BUFFER_COPY_CAPS);
+ GST_BUFFER_TIMESTAMP (newbuf) = GST_BUFFER_TIMESTAMP (payload->data);
+ gst_buffer_unref (payload->data);
+ payload->data = newbuf;
+
+ payload->packet_count++;
+
+ return payload_size;
+}
+
+/**
+ * gst_asf_match_and_peek_obj_size:
+ * @data: data to be peeked at
+ * @guid: pointer to a guid
+ *
+ * Compares the first bytes of data against the guid parameter and
+ * if they match gets the object size (that are right after the guid in
+ * asf objects).
+ *
+ * In case the guids don't match, 0 is returned.
+ * If the guid is NULL the match is assumed to be true.
+ *
+ * Returns: The size of the object in case the guid matches, 0 otherwise
+ */
+guint64
+gst_asf_match_and_peek_obj_size (const guint8 * data, const Guid * guid)
+{
+ g_assert (data);
+ if (guid && !gst_asf_match_guid (data, guid)) {
+ /* this is not the expected object */
+ return 0;
+ }
+ /* return the object size */
+ return GST_READ_UINT64_LE (data + ASF_GUID_SIZE);
+}
+
+/**
+ * gst_asf_parse_mult_payload:
+ * @reader: a #GstByteReader ready to read the multiple payload data
+ * @has_keyframe: pointer to return the result
+ *
+ * Parses a multiple payload section of an asf data packet
+ * to see if any of the paylaods has a a keyframe
+ *
+ * Notice that the #GstByteReader might not be positioned after
+ * this section on this function return. Because this section
+ * is the last one in an asf packet and the remaining data
+ * is probably uninteresting to the application.
+ *
+ * Returns: true on success, false if some error occurrs
+ */
+static gboolean
+gst_asf_parse_mult_payload (GstByteReader * reader, gboolean * has_keyframe)
+{
+ guint payloads;
+ guint8 payload_len_type;
+ guint8 rep_data_len;
+ guint32 payload_len;
+ guint8 stream_num;
+ guint8 aux;
+ guint i;
+
+ if (!gst_byte_reader_get_uint8 (reader, &aux))
+ return FALSE;
+
+ payloads = (aux & 0x3F);
+ payload_len_type = (aux & 0xC0) >> 6;
+
+ *has_keyframe = FALSE;
+ for (i = 0; i < payloads; i++) {
+ GST_LOG ("Parsing payload %u/%u", i + 1, payloads);
+ if (!gst_byte_reader_get_uint8 (reader, &stream_num))
+ goto error;
+ if ((stream_num & 0x80) != 0) {
+ GST_LOG ("Keyframe found, stoping parse of payloads");
+ *has_keyframe = TRUE;
+ return TRUE;
+ }
+ /* skip to replicated data length */
+ if (!gst_byte_reader_skip (reader, 5))
+ goto error;
+ if (!gst_byte_reader_get_uint8 (reader, &rep_data_len))
+ goto error;
+ if (!gst_byte_reader_skip (reader, rep_data_len))
+ goto error;
+ if (!gst_byte_reader_get_asf_var_size_field (reader, payload_len_type,
+ &payload_len))
+ goto error;
+ if (!gst_byte_reader_skip (reader, payload_len))
+ goto error;
+ }
+
+ /* we do not skip the rest of the payload bytes as
+ this is the last data to be parsed on the buffer */
+ return TRUE;
+error:
+ GST_WARNING ("Error while parsing payloads");
+ return FALSE;
+}
+
+/**
+ * gst_asf_parse_single_payload:
+ * @reader: a #GstByteReader ready to read the multiple payload data
+ * @has_keyframe: pointer to return the result
+ *
+ * Parses a single payload section of an asf data packet
+ * to see if any of the paylaods has a a keyframe
+ *
+ * Notice that the #GstByteReader might not be positioned after
+ * this section on this function return. Because this section
+ * is the last one in an asf packet and the remaining data
+ * is probably uninteresting to the application.
+ *
+ * Returns: true on success, false if some error occurrs
+ */
+static gboolean
+gst_asf_parse_single_payload (GstByteReader * reader, gboolean * has_keyframe)
+{
+ guint8 stream_num;
+ if (!gst_byte_reader_get_uint8 (reader, &stream_num))
+ return GST_FLOW_ERROR;
+ *has_keyframe = (stream_num & 0x80) != 0;
+
+ /* we do not skip the rest of the payload bytes as
+ this is the last data to be parsed on the buffer */
+ return TRUE;
+}
+
+gboolean
+gst_asf_parse_packet (GstBuffer * buffer, GstAsfPacketInfo * packet,
+ gboolean trust_delta_flag)
+{
+ GstByteReader *reader;
+ gboolean ret = TRUE;
+ guint8 first;
+ guint8 aux;
+ guint8 packet_len_type;
+ guint8 padding_len_type;
+ guint8 seq_len_type;
+ guint8 rep_data_len_type;
+ guint8 mo_number_len_type;
+ guint8 mo_offset_type;
+ gboolean mult_payloads;
+ guint32 packet_len;
+ guint32 padd_len;
+ guint32 send_time;
+ guint16 duration;
+ gboolean has_keyframe;
+
+ reader = gst_byte_reader_new_from_buffer (buffer);
+
+ GST_LOG ("Starting packet parsing, size: %u", GST_BUFFER_SIZE (buffer));
+ if (!gst_byte_reader_get_uint8 (reader, &first))
+ goto error;
+
+ if (first & 0x80) { /* error correction present */
+ guint8 err_cor_len;
+ GST_DEBUG ("Packet contains error correction");
+ if (first & 0x60) {
+ GST_ERROR ("Error correction data length should be "
+ "set to 0 and is reserved for future use.");
+ return FALSE;
+ }
+ err_cor_len = (first & 0x0F);
+ GST_DEBUG ("Error correction data length: %d", (gint) err_cor_len);
+ if (!gst_byte_reader_skip (reader, err_cor_len))
+ goto error;
+ if (!gst_byte_reader_get_uint8 (reader, &aux))
+ goto error;
+ } else {
+ aux = first;
+ }
+ mult_payloads = (aux & 0x1) != 0;
+
+ packet_len_type = (aux >> 5) & 0x3;
+ padding_len_type = (aux >> 3) & 0x3;
+ seq_len_type = (aux >> 1) & 0x3;
+ GST_LOG ("Field sizes: packet length type: %u "
+ ", padding length type: %u, sequence length type: %u",
+ gst_asf_get_var_size_field_len (packet_len_type),
+ gst_asf_get_var_size_field_len (padding_len_type),
+ gst_asf_get_var_size_field_len (seq_len_type));
+
+ if (mult_payloads) {
+ GST_DEBUG ("Packet contains multiple payloads");
+ }
+
+ if (!gst_byte_reader_get_uint8 (reader, &aux))
+ goto error;
+ rep_data_len_type = aux & 0x3;
+ mo_offset_type = (aux >> 2) & 0x3;
+ mo_number_len_type = (aux >> 4) & 0x3;
+
+ /* gets the fields lengths */
+ GST_LOG ("Getting packet and padding length");
+ if (!gst_byte_reader_get_asf_var_size_field (reader,
+ packet_len_type, &packet_len))
+ goto error;
+ if (!gst_byte_reader_skip (reader,
+ gst_asf_get_var_size_field_len (seq_len_type)))
+ goto error;
+ if (!gst_byte_reader_get_asf_var_size_field (reader,
+ padding_len_type, &padd_len))
+ goto error;
+
+ if (packet_len_type != ASF_FIELD_TYPE_NONE &&
+ packet_len != GST_BUFFER_SIZE (buffer)) {
+ GST_WARNING ("ASF packets should be aligned with buffers");
+ ret = FALSE;
+ goto end;
+ }
+
+ GST_LOG ("Getting send time and duration");
+ if (!gst_byte_reader_get_uint32_le (reader, &send_time))
+ goto error;
+ if (!gst_byte_reader_get_uint16_le (reader, &duration))
+ goto error;
+
+ has_keyframe = FALSE;
+ GST_LOG ("Checking for keyframes");
+ if (trust_delta_flag) {
+ has_keyframe = GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ } else {
+ if (mult_payloads) {
+ ret = gst_asf_parse_mult_payload (reader, &has_keyframe);
+ } else {
+ ret = gst_asf_parse_single_payload (reader, &has_keyframe);
+ }
+ }
+
+ if (!ret) {
+ GST_WARNING ("Failed to parse payloads");
+ goto end;
+ }
+ GST_DEBUG ("Received packet of length %" G_GUINT32_FORMAT
+ ", padding %" G_GUINT32_FORMAT ", send time %" G_GUINT32_FORMAT
+ ", duration %" G_GUINT16_FORMAT " and %s keyframe(s)",
+ packet_len, padd_len, send_time, duration,
+ (has_keyframe) ? "with" : "without");
+
+ packet->packet_size = packet_len;
+ packet->padding = padd_len;
+ packet->send_time = send_time;
+ packet->duration = duration;
+ packet->has_keyframe = has_keyframe;
+
+ gst_byte_reader_free (reader);
+ return ret;
+
+error:
+ ret = FALSE;
+ GST_WARNING ("Error while parsing data packet");
+end:
+ gst_byte_reader_free (reader);
+ return ret;
+}
+
+static gboolean
+gst_asf_parse_file_properties_obj (GstByteReader * reader,
+ GstAsfFileInfo * asfinfo)
+{
+ guint32 min_ps;
+ guint32 max_ps;
+ guint64 packets;
+ guint32 flags;
+ GST_DEBUG ("ASF: Parsing file properties object");
+
+ /* skip until data packets count */
+ if (!gst_byte_reader_skip (reader, 32))
+ return FALSE;
+ if (!gst_byte_reader_get_uint64_le (reader, &packets))
+ return FALSE;
+ asfinfo->packets_count = packets;
+ GST_DEBUG ("ASF: packets count %" G_GUINT64_FORMAT, packets);
+
+ /* skip until flags */
+ if (!gst_byte_reader_skip (reader, 24))
+ return FALSE;
+
+ if (!gst_byte_reader_get_uint32_le (reader, &flags))
+ return GST_FLOW_ERROR;
+ asfinfo->broadcast = (flags & 0x1) == 1;
+ GST_DEBUG ("ASF: broadcast flag: %s", asfinfo->broadcast ? "true" : "false");
+ if (!gst_byte_reader_get_uint32_le (reader, &min_ps))
+ return GST_FLOW_ERROR;
+ if (!gst_byte_reader_get_uint32_le (reader, &max_ps))
+ return GST_FLOW_ERROR;
+
+ if (min_ps != max_ps) {
+ GST_WARNING ("Mininum and maximum packet size differ "
+ "%" G_GUINT32_FORMAT " and %" G_GUINT32_FORMAT ", "
+ "ASF spec states they should be the same", min_ps, max_ps);
+ return FALSE;
+ }
+
+ GST_DEBUG ("ASF: Packet size: %" G_GUINT32_FORMAT, min_ps);
+ asfinfo->packet_size = min_ps;
+ if (!gst_byte_reader_skip (reader, 4))
+ return FALSE;
+
+ return TRUE;
+}
+
+gboolean
+gst_asf_parse_headers (GstBuffer * buffer, GstAsfFileInfo * file_info)
+{
+ gboolean ret = TRUE;
+ guint32 header_objects;
+ guint32 i;
+ GstByteReader *reader;
+ guint64 object_size;
+
+ object_size = gst_asf_match_and_peek_obj_size (GST_BUFFER_DATA (buffer),
+ &(guids[ASF_HEADER_OBJECT_INDEX]));
+ if (object_size == 0) {
+ GST_WARNING ("ASF: Cannot parse, header guid not found at the beginning "
+ " of data");
+ return FALSE;
+ }
+
+ reader = gst_byte_reader_new_from_buffer (buffer);
+
+ if (!gst_byte_reader_skip (reader, ASF_GUID_OBJSIZE_SIZE))
+ goto error;
+ if (!gst_byte_reader_get_uint32_le (reader, &header_objects))
+ goto error;
+ GST_DEBUG ("ASF: Header has %" G_GUINT32_FORMAT " child"
+ " objects", header_objects);
+ /* skip reserved bytes */
+ if (!gst_byte_reader_skip (reader, 2))
+ goto error;
+
+ /* iterate through childs of header object */
+ for (i = 0; i < header_objects; i++) {
+ const guint8 *guid = NULL;
+ guint64 obj_size;
+ if (!gst_byte_reader_get_data (reader, ASF_GUID_SIZE, &guid))
+ goto error;
+ if (!gst_byte_reader_get_uint64_le (reader, &obj_size))
+ goto error;
+
+ if (gst_asf_match_guid (guid, &guids[ASF_FILE_PROPERTIES_OBJECT_INDEX])) {
+ ret = gst_asf_parse_file_properties_obj (reader, file_info);
+ } else {
+ /* we don't know/care about this object */
+ if (!gst_byte_reader_skip (reader, obj_size - ASF_GUID_OBJSIZE_SIZE))
+ goto error;
+ }
+
+ if (!ret)
+ goto end;
+ }
+ goto end;
+
+error:
+ ret = FALSE;
+ GST_WARNING ("ASF: Error while parsing headers");
+end:
+ gst_byte_reader_free (reader);
+ return ret;
+}
+
+#define MAP_GST_TO_ASF_TAG(tag, gst, asf) \
+ if (strcmp (tag, gst) == 0) \
+ return asf
+
+/**
+ * gst_asf_get_asf_tag:
+ * @gsttag: a gstreamer tag
+ *
+ * Maps gstreamer tags to asf tags
+ *
+ * Returns: The tag corresponding name in asf files or NULL if it is not mapped
+ */
+const gchar *
+gst_asf_get_asf_tag (const gchar * gsttag)
+{
+ g_return_val_if_fail (gsttag != NULL, NULL);
+
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_TITLE, ASF_TAG_TITLE);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_TITLE_SORTNAME, ASF_TAG_TITLE_SORTNAME);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_ARTIST, ASF_TAG_ARTIST);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_ARTIST_SORTNAME, ASF_TAG_ARTIST_SORTNAME);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_ALBUM, ASF_TAG_ALBUM_TITLE);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_ALBUM_SORTNAME,
+ ASF_TAG_ALBUM_TITLE_SORTNAME);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_GENRE, ASF_TAG_GENRE);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_COPYRIGHT, ASF_TAG_COPYRIGHT);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_COMPOSER, ASF_TAG_COMPOSER);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_COMMENT, ASF_TAG_COMMENT);
+ MAP_GST_TO_ASF_TAG (gsttag, GST_TAG_TRACK_NUMBER, ASF_TAG_TRACK_NUMBER);
+
+ return NULL;
+}
+
+guint
+gst_asf_get_tag_field_type (GValue * value)
+{
+ if (G_VALUE_HOLDS_STRING (value))
+ return ASF_TAG_TYPE_UNICODE_STR;
+ if (G_VALUE_HOLDS_UINT (value))
+ return ASF_TAG_TYPE_DWORD;
+
+ return -1;
+}
+
+gboolean
+gst_asf_tag_present_in_content_description (const gchar * tag)
+{
+ return strcmp (tag, GST_TAG_TITLE) == 0 ||
+ strcmp (tag, GST_TAG_ARTIST) == 0 ||
+ strcmp (tag, GST_TAG_COPYRIGHT) == 0 ||
+ strcmp (tag, GST_TAG_DESCRIPTION) == 0;
+ /* FIXME we have no tag for rating */
+}
diff --git a/gst/asfmux/gstasfobjects.h b/gst/asfmux/gstasfobjects.h
new file mode 100644
index 00000000..f91e1e11
--- /dev/null
+++ b/gst/asfmux/gstasfobjects.h
@@ -0,0 +1,190 @@
+/* ASF muxer plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+#ifndef __GST_ASF_OBJECTS_H__
+#define __GST_ASF_OBJECTS_H__
+
+#include <glib.h>
+#include <gst/gst.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/base/gstcollectpads.h>
+
+#define ASF_PAYLOAD_IS_KEYFRAME(pay) ((pay->stream_number & 0x80) != 0)
+#define ASF_MILI_TO_100NANO(v) (v * 10000)
+#define ASF_GUID_SIZE 16
+#define ASF_GUID_OBJSIZE_SIZE 24
+
+typedef struct _Guid
+{
+ guint32 v1;
+ guint16 v2;
+ guint16 v3;
+ guint64 v4;
+} Guid;
+
+typedef struct _GstAsfFileInfo
+{
+ guint64 packets_count;
+ guint32 packet_size;
+ gboolean broadcast;
+} GstAsfFileInfo;
+
+typedef struct _GstAsfPacketInfo
+{
+ guint32 packet_size;
+ guint32 padding;
+ guint32 send_time;
+ guint16 duration;
+ gboolean has_keyframe;
+} GstAsfPacketInfo;
+
+typedef struct _SimpleIndexEntry
+{
+ guint32 packet_number;
+ guint16 packet_count;
+} SimpleIndexEntry;
+
+typedef struct _AsfPayload
+{
+ guint8 stream_number;
+ guint8 media_obj_num;
+ guint32 offset_in_media_obj;
+ guint8 replicated_data_length;
+ guint32 media_object_size;
+ guint32 presentation_time;
+ GstBuffer *data;
+
+ GstCollectData *pad;
+
+ /* simple index info */
+ gboolean has_packet_info;
+ guint32 packet_number;
+ guint16 packet_count;
+} AsfPayload;
+
+Guid gst_asf_generate_file_id ();
+
+gboolean gst_byte_reader_get_asf_var_size_field (GstByteReader * reader,
+ guint8 field_type, guint32 * var);
+guint32 gst_asf_read_var_size_field (guint8 * data, guint8 field_type);
+guint gst_asf_get_var_size_field_len (guint8 field_type);
+
+GstAsfFileInfo *gst_asf_file_info_new ();
+void gst_asf_file_info_reset (GstAsfFileInfo * info);
+void gst_asf_file_info_free (GstAsfFileInfo * info);
+
+guint32 gst_asf_payload_get_size (AsfPayload * payload);
+void gst_asf_payload_free (AsfPayload * payload);
+
+guint64 gst_asf_get_current_time ();
+
+gboolean gst_asf_match_guid (const guint8 * data, const Guid * g);
+
+void gst_asf_put_i32 (guint8 * buf, gint32 data);
+void gst_asf_put_time (guint8 * buf, guint64 time);
+void gst_asf_put_guid (guint8 * buf, Guid guid);
+void gst_asf_put_payload (guint8 * buf, AsfPayload * payload);
+guint16 gst_asf_put_subpayload (guint8 * buf, AsfPayload * payload,
+ guint16 size);
+
+gboolean gst_asf_parse_packet (GstBuffer * buffer, GstAsfPacketInfo * packet,
+ gboolean trust_delta_flag);
+guint64 gst_asf_match_and_peek_obj_size (const guint8 * data,
+ const Guid * guid);
+gboolean gst_asf_parse_headers (GstBuffer * buffer, GstAsfFileInfo * file_info);
+
+/* ASF tags
+ * found at http://msdn.microsoft.com/en-us/library/dd562330(VS.85).aspx
+ */
+
+#define ASF_TAG_TITLE "Title\0"
+#define ASF_TAG_TITLE_SORTNAME "TitleSortOrder\0"
+
+/* FIXME asf has no artist tag other than AlbumArtist, but it has Author
+ * What to use here? */
+#define ASF_TAG_ARTIST "WM/AlbumArtist\0"
+#define ASF_TAG_ARTIST_SORTNAME "AlbumArtistSortOrder\0"
+
+#define ASF_TAG_ALBUM_TITLE "WM/AlbumTitle\0"
+#define ASF_TAG_ALBUM_TITLE_SORTNAME "AlbumTitleSortOrder\0"
+
+#define ASF_TAG_GENRE "WM/Genre\0"
+#define ASF_TAG_COMMENT "Comment\0"
+#define ASF_TAG_TRACK_NUMBER "WM/TrackNumber\0"
+#define ASF_TAG_COPYRIGHT "Copyright\0"
+#define ASF_TAG_COMPOSER "WM/Composer\0"
+
+const gchar *gst_asf_get_asf_tag (const gchar * gsttag);
+guint gst_asf_get_tag_field_type (GValue * value);
+gboolean gst_asf_tag_present_in_content_description (const gchar * tag);
+
+/* ASF Objects Sizes */
+#define ASF_HEADER_OBJECT_SIZE 30
+#define ASF_FILE_PROPERTIES_OBJECT_SIZE 104
+#define ASF_STREAM_PROPERTIES_OBJECT_SIZE 78
+#define ASF_HEADER_EXTENSION_OBJECT_SIZE 46
+#define ASF_AUDIO_SPECIFIC_DATA_SIZE 18
+#define ASF_VIDEO_SPECIFIC_DATA_SIZE 51
+#define ASF_DATA_OBJECT_SIZE 50
+#define ASF_PAYLOAD_PARSING_INFO_SIZE 16
+#define ASF_SINGLE_PAYLOAD_HEADER_SIZE 15
+#define ASF_MULTIPLE_PAYLOAD_HEADER_SIZE 17
+#define ASF_EXTENDED_STREAM_PROPERTIES_OBJECT_SIZE 88
+#define ASF_CONTENT_DESCRIPTION_OBJECT_SIZE 34
+#define ASF_EXT_CONTENT_DESCRIPTION_OBJECT_SIZE 26
+#define ASF_SIMPLE_INDEX_OBJECT_SIZE 56
+#define ASF_SIMPLE_INDEX_ENTRY_SIZE 6
+#define ASF_METADATA_OBJECT_SIZE 26
+#define ASF_PADDING_OBJECT_SIZE 24
+
+/* Field types for data object payload description */
+#define ASF_FIELD_TYPE_NONE 0
+#define ASF_FIELD_TYPE_BYTE 1
+#define ASF_FIELD_TYPE_WORD 2
+#define ASF_FIELD_TYPE_DWORD 3
+
+/* tag types */
+#define ASF_TAG_TYPE_UNICODE_STR 0
+#define ASF_TAG_TYPE_BYTES 1
+#define ASF_TAG_TYPE_BOOL 2
+#define ASF_TAG_TYPE_DWORD 3
+#define ASF_TAG_TYPE_QWORD 4
+#define ASF_TAG_TYPE_WORD 5
+
+/* GUID objects */
+
+#define ASF_HEADER_OBJECT_INDEX 0
+#define ASF_FILE_PROPERTIES_OBJECT_INDEX 1
+#define ASF_STREAM_PROPERTIES_OBJECT_INDEX 2
+#define ASF_AUDIO_MEDIA_INDEX 3
+#define ASF_NO_ERROR_CORRECTION_INDEX 4
+#define ASF_AUDIO_SPREAD_INDEX 5
+#define ASF_HEADER_EXTENSION_OBJECT_INDEX 6
+#define ASF_RESERVED_1_INDEX 7
+#define ASF_DATA_OBJECT_INDEX 8
+#define ASF_EXTENDED_STREAM_PROPERTIES_OBJECT_INDEX 9
+#define ASF_VIDEO_MEDIA_INDEX 10
+#define ASF_SIMPLE_INDEX_OBJECT_INDEX 11
+#define ASF_CONTENT_DESCRIPTION_INDEX 12
+#define ASF_EXT_CONTENT_DESCRIPTION_INDEX 13
+#define ASF_METADATA_OBJECT_INDEX 14
+#define ASF_PADDING_OBJECT_INDEX 15
+
+extern const Guid guids[];
+
+#endif
diff --git a/gst/asfmux/gstasfparse.c b/gst/asfmux/gstasfparse.c
new file mode 100644
index 00000000..17db7293
--- /dev/null
+++ b/gst/asfmux/gstasfparse.c
@@ -0,0 +1,623 @@
+/* ASF parser plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <string.h>
+#include "gstasfparse.h"
+
+/* FIXME add this include
+ * #include <gst/gst-i18n-plugin.h> */
+
+GST_DEBUG_CATEGORY_STATIC (asfparse_debug);
+#define GST_CAT_DEFAULT asfparse_debug
+
+enum
+{
+ PROP_0,
+};
+
+static const GstElementDetails gst_asf_parse_details =
+GST_ELEMENT_DETAILS ("ASF parser",
+ "Parser",
+ "Parses ASF",
+ "Thiago Santos <thiagoss@embedded.ufcg.edu.br>");
+
+static GstStaticPadTemplate src_factory = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-ms-asf, parsed = (boolean) true")
+ );
+
+static GstStaticPadTemplate sink_factory = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-ms-asf, parsed = (boolean) false")
+ );
+
+static void gst_asf_parse_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_asf_parse_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+static GstStateChangeReturn gst_asf_parse_change_state (GstElement * element,
+ GstStateChange transition);
+static void gst_asf_parse_loop (GstPad * pad);
+
+GST_BOILERPLATE (GstAsfParse, gst_asf_parse, GstElement, GST_TYPE_ELEMENT);
+
+static void
+gst_asf_parse_reset (GstAsfParse * asfparse)
+{
+ gst_adapter_clear (asfparse->adapter);
+ gst_asf_file_info_reset (asfparse->asfinfo);
+ asfparse->parse_state = ASF_PARSING_HEADERS;
+ asfparse->headers_size = 0;
+ asfparse->data_size = 0;
+ asfparse->parsed_packets = 0;
+ asfparse->offset = 0;
+}
+
+static gboolean
+gst_asf_parse_sink_activate (GstPad * pad)
+{
+ if (gst_pad_check_pull_range (pad)) {
+ return gst_pad_activate_pull (pad, TRUE);
+ } else {
+ return gst_pad_activate_push (pad, TRUE);
+ }
+}
+
+static gboolean
+gst_asf_parse_sink_activate_pull (GstPad * pad, gboolean active)
+{
+ if (active) {
+ return gst_pad_start_task (pad, (GstTaskFunction) gst_asf_parse_loop, pad);
+ } else {
+ return gst_pad_stop_task (pad);
+ }
+}
+
+static GstFlowReturn
+gst_asf_parse_push (GstAsfParse * asfparse, GstBuffer * buf)
+{
+ gst_buffer_set_caps (buf, asfparse->outcaps);
+ return gst_pad_push (asfparse->srcpad, buf);
+}
+
+static GstFlowReturn
+gst_asf_parse_parse_data_object (GstAsfParse * asfparse, GstBuffer * buffer)
+{
+ GstByteReader *reader;
+ GstFlowReturn ret = GST_FLOW_OK;
+ guint64 packet_count;
+
+ GST_DEBUG_OBJECT (asfparse, "Parsing data object");
+
+ reader = gst_byte_reader_new_from_buffer (buffer);
+ /* skip to packet count */
+ if (!gst_byte_reader_skip (reader, 40))
+ goto error;
+ if (!gst_byte_reader_get_uint64_le (reader, &packet_count))
+ goto error;
+
+ if (asfparse->asfinfo->packets_count != packet_count) {
+ GST_WARNING_OBJECT (asfparse, "File properties object and data object have "
+ "different packets count, %" G_GUINT64_FORMAT " %" G_GUINT64_FORMAT,
+ asfparse->asfinfo->packets_count, packet_count);
+ } else {
+ GST_DEBUG_OBJECT (asfparse, "Total packets: %" G_GUINT64_FORMAT,
+ packet_count);
+ }
+
+ gst_byte_reader_free (reader);
+ return gst_asf_parse_push (asfparse, buffer);
+
+error:
+ ret = GST_FLOW_ERROR;
+ GST_ERROR_OBJECT (asfparse, "Error while parsing data object headers");
+ gst_byte_reader_free (reader);
+ return ret;
+}
+
+static GstFlowReturn
+gst_asf_parse_parse_packet (GstAsfParse * asfparse, GstBuffer * buffer)
+{
+ GstAsfPacketInfo *packetinfo = asfparse->packetinfo;
+
+ if (!gst_asf_parse_packet (buffer, packetinfo, FALSE))
+ goto error;
+
+ GST_DEBUG_OBJECT (asfparse, "Received packet of length %" G_GUINT32_FORMAT
+ ", padding %" G_GUINT32_FORMAT ", send time %" G_GUINT32_FORMAT
+ ", duration %" G_GUINT16_FORMAT " and %s keyframe(s)",
+ packetinfo->packet_size, packetinfo->padding,
+ packetinfo->send_time, packetinfo->duration,
+ (packetinfo->has_keyframe) ? "with" : "without");
+
+ /* set gstbuffer fields */
+ if (!packetinfo->has_keyframe) {
+ GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT);
+ }
+ GST_BUFFER_TIMESTAMP (buffer) = ((GstClockTime) packetinfo->send_time)
+ * GST_MSECOND;
+ GST_BUFFER_DURATION (buffer) = ((GstClockTime) packetinfo->duration)
+ * GST_MSECOND;
+
+ return gst_asf_parse_push (asfparse, buffer);
+
+error:
+ GST_ERROR_OBJECT (asfparse, "Error while parsing data packet");
+ return GST_FLOW_ERROR;
+}
+
+static GstFlowReturn
+gst_asf_parse_pull_headers (GstAsfParse * asfparse)
+{
+ GstBuffer *guid_and_size = NULL;
+ GstBuffer *headers = NULL;
+ guint64 size;
+ GstFlowReturn ret;
+
+ if ((ret = gst_pad_pull_range (asfparse->sinkpad, asfparse->offset,
+ ASF_GUID_OBJSIZE_SIZE, &guid_and_size)) != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (asfparse, "Failed to pull data from headers");
+ goto leave;
+ }
+ asfparse->offset += ASF_GUID_OBJSIZE_SIZE;
+ size = gst_asf_match_and_peek_obj_size (GST_BUFFER_DATA (guid_and_size),
+ &(guids[ASF_HEADER_OBJECT_INDEX]));
+
+ if (size == 0) {
+ GST_ERROR_OBJECT (asfparse, "ASF starting identifier missing");
+ goto leave;
+ }
+
+ if ((ret = gst_pad_pull_range (asfparse->sinkpad, asfparse->offset,
+ size - ASF_GUID_OBJSIZE_SIZE, &headers)) != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (asfparse, "Failed to pull data from headers");
+ goto leave;
+ }
+ headers = gst_buffer_join (guid_and_size, headers);
+ guid_and_size = NULL;
+ asfparse->offset += size - ASF_GUID_OBJSIZE_SIZE;
+ if (!gst_asf_parse_headers (headers, asfparse->asfinfo)) {
+ goto leave;
+ }
+ return gst_asf_parse_push (asfparse, headers);
+
+leave:
+ if (headers)
+ gst_buffer_unref (headers);
+ if (guid_and_size)
+ gst_buffer_unref (guid_and_size);
+ return ret;
+}
+
+static GstFlowReturn
+gst_asf_parse_pull_data_header (GstAsfParse * asfparse)
+{
+ GstBuffer *buf = NULL;
+ GstFlowReturn ret;
+
+ if ((ret = gst_pad_pull_range (asfparse->sinkpad, asfparse->offset,
+ ASF_DATA_OBJECT_SIZE, &buf)) != GST_FLOW_OK) {
+ GST_ERROR_OBJECT (asfparse, "Failed to pull data header");
+ return ret;
+ }
+ asfparse->offset += ASF_DATA_OBJECT_SIZE;
+ asfparse->data_size = gst_asf_match_and_peek_obj_size (GST_BUFFER_DATA (buf),
+ &(guids[ASF_DATA_OBJECT_INDEX]));
+ if (asfparse->data_size == 0) {
+ GST_ERROR_OBJECT (asfparse, "Unexpected object, was expecting data object");
+ gst_buffer_unref (buf);
+ return GST_FLOW_ERROR;
+ }
+
+ return gst_asf_parse_parse_data_object (asfparse, buf);
+}
+
+static GstFlowReturn
+gst_asf_parse_pull_packets (GstAsfParse * asfparse)
+{
+ GstFlowReturn ret;
+ while (asfparse->asfinfo->broadcast ||
+ asfparse->parsed_packets < asfparse->asfinfo->packets_count) {
+ GstBuffer *packet = NULL;
+
+ GST_DEBUG_OBJECT (asfparse, "Parsing packet %" G_GUINT64_FORMAT,
+ asfparse->parsed_packets);
+
+ /* get the packet */
+ ret = gst_pad_pull_range (asfparse->sinkpad, asfparse->offset,
+ asfparse->asfinfo->packet_size, &packet);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ asfparse->parsed_packets++;
+ asfparse->offset += asfparse->asfinfo->packet_size;
+
+ /* parse the packet */
+ ret = gst_asf_parse_parse_packet (asfparse, packet);
+ if (ret != GST_FLOW_OK)
+ return ret;
+ }
+ return GST_FLOW_OK;
+}
+
+static GstFlowReturn
+gst_asf_parse_pull_indexes (GstAsfParse * asfparse)
+{
+ GstBuffer *guid_and_size = NULL;
+ GstBuffer *buf = NULL;
+ guint64 obj_size;
+ GstFlowReturn ret = GST_FLOW_OK;
+ while (1) {
+ ret = gst_pad_pull_range (asfparse->sinkpad, asfparse->offset,
+ ASF_GUID_OBJSIZE_SIZE, &guid_and_size);
+ if (ret != GST_FLOW_OK)
+ break;
+ /* we can peek at the object size */
+ obj_size =
+ gst_asf_match_and_peek_obj_size (GST_BUFFER_DATA (guid_and_size), NULL);
+ if (obj_size == 0) {
+ GST_ERROR_OBJECT (asfparse, "Incomplete object found");
+ gst_buffer_unref (guid_and_size);
+ ret = GST_FLOW_ERROR;
+ break;
+ }
+ asfparse->offset += ASF_GUID_OBJSIZE_SIZE;
+
+ /* pull the rest of the object */
+ ret = gst_pad_pull_range (asfparse->sinkpad, asfparse->offset, obj_size,
+ &buf);
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (guid_and_size);
+ break;
+ }
+ asfparse->offset += obj_size - ASF_GUID_OBJSIZE_SIZE;
+
+ buf = gst_buffer_join (guid_and_size, buf);
+ ret = gst_asf_parse_push (asfparse, buf);
+ if (ret != GST_FLOW_OK)
+ break;
+ }
+ return ret;
+}
+
+static void
+gst_asf_parse_loop (GstPad * pad)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstAsfParse *asfparse = GST_ASF_PARSE_CAST (GST_OBJECT_PARENT (pad));
+
+ GST_LOG_OBJECT (asfparse, "Processing data in loop function");
+ switch (asfparse->parse_state) {
+ case ASF_PARSING_HEADERS:
+ GST_INFO_OBJECT (asfparse, "Starting to parse headers");
+ ret = gst_asf_parse_pull_headers (asfparse);
+ if (ret != GST_FLOW_OK)
+ goto pause;
+ asfparse->parse_state = ASF_PARSING_DATA;
+
+ case ASF_PARSING_DATA:
+ GST_INFO_OBJECT (asfparse, "Parsing data object headers");
+ ret = gst_asf_parse_pull_data_header (asfparse);
+ if (ret != GST_FLOW_OK)
+ goto pause;
+ asfparse->parse_state = ASF_PARSING_PACKETS;
+
+ case ASF_PARSING_PACKETS:
+ GST_INFO_OBJECT (asfparse, "Starting packet parsing");
+ GST_INFO_OBJECT (asfparse, "Broadcast mode %s",
+ asfparse->asfinfo->broadcast ? "on" : "off");
+ ret = gst_asf_parse_pull_packets (asfparse);
+ if (ret != GST_FLOW_OK)
+ goto pause;
+
+ /* test if all packets have been processed */
+ if (!asfparse->asfinfo->broadcast &&
+ asfparse->parsed_packets == asfparse->asfinfo->packets_count) {
+ GST_INFO_OBJECT (asfparse,
+ "All %" G_GUINT64_FORMAT " packets processed",
+ asfparse->parsed_packets);
+ asfparse->parse_state = ASF_PARSING_INDEXES;
+ }
+
+ case ASF_PARSING_INDEXES:
+ /* we currently don't care about indexes, so just push them forward */
+ GST_INFO_OBJECT (asfparse, "Starting indexes parsing");
+ ret = gst_asf_parse_pull_indexes (asfparse);
+ if (ret != GST_FLOW_OK)
+ goto pause;
+ default:
+ break;
+ }
+
+pause:
+ {
+ const gchar *reason = gst_flow_get_name (ret);
+
+ GST_INFO_OBJECT (asfparse, "Pausing sinkpad task");
+ gst_pad_pause_task (pad);
+
+ if (GST_FLOW_IS_FATAL (ret) || ret == GST_FLOW_NOT_LINKED) {
+ if (ret == GST_FLOW_UNEXPECTED) {
+ } else {
+ GST_ELEMENT_ERROR (asfparse, STREAM, FAILED,
+ (NULL), ("streaming task paused, reason %s (%d)", reason, ret));
+ }
+ gst_pad_push_event (asfparse->srcpad, gst_event_new_eos ());
+ }
+ }
+}
+
+static GstFlowReturn
+gst_asf_parse_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstAsfParse *asfparse;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ asfparse = GST_ASF_PARSE (GST_PAD_PARENT (pad));
+ gst_adapter_push (asfparse->adapter, buffer);
+
+ switch (asfparse->parse_state) {
+ case ASF_PARSING_HEADERS:
+ if (asfparse->headers_size == 0 &&
+ gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) {
+
+ /* we can peek at the object size */
+ asfparse->headers_size =
+ gst_asf_match_and_peek_obj_size (gst_adapter_peek
+ (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE),
+ &(guids[ASF_HEADER_OBJECT_INDEX]));
+
+ if (asfparse->headers_size == 0) {
+ /* something is wrong, this probably ain't an ASF stream */
+ GST_ERROR_OBJECT (asfparse, "ASF starting identifier missing");
+ ret = GST_FLOW_ERROR;
+ goto end;
+ }
+ }
+ if (gst_adapter_available (asfparse->adapter) >= asfparse->headers_size) {
+ GstBuffer *headers = gst_adapter_take_buffer (asfparse->adapter,
+ asfparse->headers_size);
+ if (gst_asf_parse_headers (headers, asfparse->asfinfo)) {
+ ret = gst_asf_parse_push (asfparse, headers);
+ asfparse->parse_state = ASF_PARSING_DATA;
+ } else {
+ ret = GST_FLOW_ERROR;
+ GST_ERROR_OBJECT (asfparse, "Failed to parse headers");
+ }
+ }
+ break;
+ case ASF_PARSING_DATA:
+ if (asfparse->data_size == 0 &&
+ gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) {
+
+ /* we can peek at the object size */
+ asfparse->data_size =
+ gst_asf_match_and_peek_obj_size (gst_adapter_peek
+ (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE),
+ &(guids[ASF_DATA_OBJECT_INDEX]));
+
+ if (asfparse->data_size == 0) {
+ /* something is wrong */
+ GST_ERROR_OBJECT (asfparse, "Unexpected object after headers, was "
+ "expecting a data object");
+ ret = GST_FLOW_ERROR;
+ goto end;
+ }
+ }
+ /* if we have received the full data object headers */
+ if (gst_adapter_available (asfparse->adapter) >= ASF_DATA_OBJECT_SIZE) {
+ ret = gst_asf_parse_parse_data_object (asfparse,
+ gst_adapter_take_buffer (asfparse->adapter, ASF_DATA_OBJECT_SIZE));
+ if (ret != GST_FLOW_OK) {
+ goto end;
+ }
+ asfparse->parse_state = ASF_PARSING_PACKETS;
+ }
+ break;
+ case ASF_PARSING_PACKETS:
+ g_assert (asfparse->asfinfo->packet_size);
+ while ((asfparse->asfinfo->broadcast ||
+ asfparse->parsed_packets < asfparse->asfinfo->packets_count) &&
+ gst_adapter_available (asfparse->adapter) >=
+ asfparse->asfinfo->packet_size) {
+ GstBuffer *packet = gst_adapter_take_buffer (asfparse->adapter,
+ asfparse->asfinfo->packet_size);
+ asfparse->parsed_packets++;
+ ret = gst_asf_parse_parse_packet (asfparse, packet);
+ if (ret != GST_FLOW_OK)
+ goto end;
+ }
+ if (!asfparse->asfinfo->broadcast &&
+ asfparse->parsed_packets >= asfparse->asfinfo->packets_count) {
+ GST_INFO_OBJECT (asfparse, "Finished parsing packets");
+ asfparse->parse_state = ASF_PARSING_INDEXES;
+ }
+ break;
+ case ASF_PARSING_INDEXES:
+ /* we currently don't care about any of those objects */
+ if (gst_adapter_available (asfparse->adapter) >= ASF_GUID_OBJSIZE_SIZE) {
+ guint64 obj_size;
+ /* we can peek at the object size */
+ obj_size = gst_asf_match_and_peek_obj_size (gst_adapter_peek
+ (asfparse->adapter, ASF_GUID_OBJSIZE_SIZE), NULL);
+ if (gst_adapter_available (asfparse->adapter) >= obj_size) {
+ GST_DEBUG_OBJECT (asfparse, "Skiping object");
+ ret = gst_asf_parse_push (asfparse,
+ gst_adapter_take_buffer (asfparse->adapter, obj_size));
+ if (ret != GST_FLOW_OK) {
+ goto end;
+ }
+ }
+ }
+ break;
+ default:
+ break;
+ }
+
+end:
+ return ret;
+}
+
+static void
+gst_asf_parse_base_init (gpointer g_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_factory));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_factory));
+
+ gst_element_class_set_details (element_class, &gst_asf_parse_details);
+
+ GST_DEBUG_CATEGORY_INIT (asfparse_debug, "asfparse", 0,
+ "Parser for ASF streams");
+}
+
+static void
+gst_asf_parse_finalize (GObject * object)
+{
+ GstAsfParse *asfparse = GST_ASF_PARSE (object);
+ gst_adapter_clear (asfparse->adapter);
+ g_object_unref (G_OBJECT (asfparse->adapter));
+ gst_caps_unref (asfparse->outcaps);
+ gst_asf_file_info_free (asfparse->asfinfo);
+ g_free (asfparse->packetinfo);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_asf_parse_class_init (GstAsfParseClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ parent_class = g_type_class_peek_parent (klass);
+
+ gobject_class->get_property = gst_asf_parse_get_property;
+ gobject_class->set_property = gst_asf_parse_set_property;
+ gobject_class->finalize = gst_asf_parse_finalize;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_asf_parse_change_state);
+}
+
+static void
+gst_asf_parse_init (GstAsfParse * asfparse, GstAsfParseClass * klass)
+{
+ asfparse->sinkpad = gst_pad_new_from_static_template (&sink_factory, "sink");
+ gst_pad_set_chain_function (asfparse->sinkpad, gst_asf_parse_chain);
+ gst_pad_set_activate_function (asfparse->sinkpad,
+ gst_asf_parse_sink_activate);
+ gst_pad_set_activatepull_function (asfparse->sinkpad,
+ gst_asf_parse_sink_activate_pull);
+ gst_element_add_pad (GST_ELEMENT (asfparse), asfparse->sinkpad);
+
+ asfparse->srcpad = gst_pad_new_from_static_template (&src_factory, "src");
+ gst_pad_use_fixed_caps (asfparse->srcpad);
+ gst_element_add_pad (GST_ELEMENT (asfparse), asfparse->srcpad);
+
+ asfparse->adapter = gst_adapter_new ();
+ asfparse->outcaps = gst_caps_new_simple ("video/x-ms-asf", NULL);
+ asfparse->asfinfo = gst_asf_file_info_new ();
+ asfparse->packetinfo = g_new0 (GstAsfPacketInfo, 1);
+ gst_asf_parse_reset (asfparse);
+}
+
+static void
+gst_asf_parse_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec)
+{
+ GstAsfParse *asfparse;
+
+ asfparse = GST_ASF_PARSE (object);
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_asf_parse_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec)
+{
+ GstAsfParse *asfparse;
+
+ asfparse = GST_ASF_PARSE (object);
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static GstStateChangeReturn
+gst_asf_parse_change_state (GstElement * element, GstStateChange transition)
+{
+ GstAsfParse *asfparse;
+ GstStateChangeReturn ret;
+
+ asfparse = GST_ASF_PARSE (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ gst_asf_parse_reset (asfparse);
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ goto done;
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ break;
+ default:
+ break;
+ }
+
+done:
+ return ret;
+}
+
+gboolean
+gst_asf_parse_plugin_init (GstPlugin * plugin)
+{
+ return gst_element_register (plugin, "asfparse",
+ GST_RANK_NONE, GST_TYPE_ASF_PARSE);
+}
diff --git a/gst/asfmux/gstasfparse.h b/gst/asfmux/gstasfparse.h
new file mode 100644
index 00000000..760e81f4
--- /dev/null
+++ b/gst/asfmux/gstasfparse.h
@@ -0,0 +1,88 @@
+/* ASF Parser plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_ASF_PARSE_H__
+#define __GST_ASF_PARSE_H__
+
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+#include <gst/base/gstbytereader.h>
+
+#include "gstasfobjects.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_ASF_PARSE \
+ (gst_asf_parse_get_type())
+#define GST_ASF_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_ASF_PARSE,GstAsfParse))
+#define GST_ASF_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_ASF_PARSE,GstAsfParseClass))
+#define GST_IS_ASF_PARSE(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_ASF_PARSE))
+#define GST_IS_ASF_PARSE_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_ASF_PARSE))
+
+#define GST_ASF_PARSE_CAST(obj) ((GstAsfParse*)(obj))
+
+enum GstAsfParsingState {
+ ASF_PARSING_HEADERS,
+ ASF_PARSING_DATA,
+ ASF_PARSING_PACKETS,
+ ASF_PARSING_INDEXES
+};
+
+typedef struct _GstAsfParse GstAsfParse;
+typedef struct _GstAsfParseClass GstAsfParseClass;
+
+struct _GstAsfParse {
+ GstElement element;
+
+ enum GstAsfParsingState parse_state;
+
+ GstAdapter *adapter;
+
+ GstPad *srcpad;
+ GstPad *sinkpad;
+ GstCaps *outcaps;
+
+ guint64 parsed_packets;
+
+ guint64 offset; /* used in pull mode */
+
+ /* parsed info */
+ GstAsfFileInfo *asfinfo;
+ GstAsfPacketInfo *packetinfo; /* we keep it here to avoid allocs */
+ guint64 headers_size;
+ guint64 data_size;
+};
+
+struct _GstAsfParseClass {
+ GstElementClass parent_class;
+};
+
+GType gst_asf_parse_get_type(void);
+gboolean gst_asf_parse_plugin_init (GstPlugin * plugin);
+
+G_END_DECLS
+
+
+#endif /* __GST_ASF_PARSE_H__ */
diff --git a/gst/asfmux/gstrtpasfpay.c b/gst/asfmux/gstrtpasfpay.c
new file mode 100644
index 00000000..7c57984f
--- /dev/null
+++ b/gst/asfmux/gstrtpasfpay.c
@@ -0,0 +1,446 @@
+/* ASF RTP Payloader plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/* FIXME
+ * - this element doesn't follow (max/min) time properties,
+ * is it possible to do it with a container format?
+ */
+
+#ifdef HAVE_CONFIG_H
+# include "config.h"
+#endif
+
+#include <gst/rtp/gstrtpbuffer.h>
+#include <string.h>
+
+#include "gstrtpasfpay.h"
+
+GST_DEBUG_CATEGORY_STATIC (rtpasfpay_debug);
+#define GST_CAT_DEFAULT (rtpasfpay_debug)
+
+/* elementfactory information */
+static const GstElementDetails gst_rtp_asf_pay_details =
+GST_ELEMENT_DETAILS ("RTP ASF payloader",
+ "Codec/Payloader/Network",
+ "Payload-encodes ASF into RTP packets (MS_RTSP)",
+ "Thiago Santos <thiagoss@embedded.ufcg.edu.br>");
+
+static GstStaticPadTemplate gst_rtp_asf_pay_sink_template =
+GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-ms-asf, " "parsed = (boolean) true")
+ );
+
+static GstStaticPadTemplate gst_rtp_asf_pay_src_template =
+GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("application/x-rtp, "
+ "media = (string) {\"audio\", \"video\", \"application\"}, "
+ "clock-rate = (int) 1000, " "encoding-name = (string) \"X-ASF-PF\"")
+ );
+
+static GstFlowReturn
+gst_rtp_asf_pay_handle_buffer (GstBaseRTPPayload * rtppay, GstBuffer * buffer);
+static gboolean
+gst_rtp_asf_pay_set_caps (GstBaseRTPPayload * rtppay, GstCaps * caps);
+
+GST_BOILERPLATE (GstRtpAsfPay, gst_rtp_asf_pay, GstBaseRTPPayload,
+ GST_TYPE_BASE_RTP_PAYLOAD);
+
+static void
+gst_rtp_asf_pay_init (GstRtpAsfPay * rtpasfpay, GstRtpAsfPayClass * klass)
+{
+ rtpasfpay->first_ts = 0;
+ rtpasfpay->config = NULL;
+ rtpasfpay->packets_count = 0;
+ rtpasfpay->state = ASF_NOT_STARTED;
+ rtpasfpay->headers = NULL;
+ rtpasfpay->current = NULL;
+}
+
+static void
+gst_rtp_asf_pay_finalize (GObject * object)
+{
+ GstRtpAsfPay *rtpasfpay;
+ rtpasfpay = GST_RTP_ASF_PAY (object);
+ g_free (rtpasfpay->config);
+ if (rtpasfpay->headers)
+ gst_buffer_unref (rtpasfpay->headers);
+ G_OBJECT_CLASS (parent_class)->finalize (object);
+}
+
+static void
+gst_rtp_asf_pay_base_init (gpointer klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_asf_pay_sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&gst_rtp_asf_pay_src_template));
+ gst_element_class_set_details (element_class, &gst_rtp_asf_pay_details);
+}
+
+static void
+gst_rtp_asf_pay_class_init (GstRtpAsfPayClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstBaseRTPPayloadClass *gstbasertppayload_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstbasertppayload_class = (GstBaseRTPPayloadClass *) klass;
+
+ gobject_class->finalize = gst_rtp_asf_pay_finalize;
+
+ gstbasertppayload_class->handle_buffer = gst_rtp_asf_pay_handle_buffer;
+ gstbasertppayload_class->set_caps = gst_rtp_asf_pay_set_caps;
+
+ GST_DEBUG_CATEGORY_INIT (rtpasfpay_debug, "rtpasfpay", 0,
+ "ASF RTP Payloader");
+}
+
+static gboolean
+gst_rtp_asf_pay_set_caps (GstBaseRTPPayload * rtppay, GstCaps * caps)
+{
+ /* FIXME change application for the actual content */
+ gst_basertppayload_set_options (rtppay, "application", TRUE, "X-ASF-PF",
+ 1000);
+ return TRUE;
+}
+
+static GstFlowReturn
+gst_rtp_asf_pay_handle_packet (GstRtpAsfPay * rtpasfpay, GstBuffer * buffer)
+{
+ GstBaseRTPPayload *rtppay;
+ GstAsfPacketInfo *packetinfo;
+ guint8 flags;
+ guint8 *data;
+ guint32 packet_util_size;
+ guint32 packet_offset;
+ guint32 size_left;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ rtppay = GST_BASE_RTP_PAYLOAD (rtpasfpay);
+ packetinfo = &rtpasfpay->packetinfo;
+
+ if (!gst_asf_parse_packet (buffer, packetinfo, TRUE)) {
+ GST_ERROR_OBJECT (rtpasfpay, "Error while parsing asf packet");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ if (packetinfo->packet_size == 0)
+ packetinfo->packet_size = rtpasfpay->asfinfo.packet_size;
+
+ GST_LOG_OBJECT (rtpasfpay, "Packet size: %" G_GUINT32_FORMAT
+ ", padding: %" G_GUINT32_FORMAT, packetinfo->packet_size,
+ packetinfo->padding);
+
+ /* FIXME - should update the padding field to 0 */
+
+ packet_util_size = packetinfo->packet_size - packetinfo->padding;
+ packet_offset = 0;
+ while (packet_util_size > 0) {
+ /* Even if we don't fill completely an output buffer we
+ * push it when we add an fragment. Because it seems that
+ * it is not possible to determine where a asf packet
+ * fragment ends inside a rtp packet payload.
+ * This flag tells us to push the packet.
+ */
+ gboolean force_push = FALSE;
+
+ /* we have no output buffer pending, create one */
+ if (rtpasfpay->current == NULL) {
+ GST_LOG_OBJECT (rtpasfpay, "Creating new output buffer");
+ rtpasfpay->current =
+ gst_rtp_buffer_new_allocate_len (GST_BASE_RTP_PAYLOAD_MTU (rtpasfpay),
+ 0, 0);
+ rtpasfpay->cur_off = gst_rtp_buffer_get_header_len (rtpasfpay->current);
+ rtpasfpay->has_ts = FALSE;
+ rtpasfpay->marker = FALSE;
+ }
+ data = GST_BUFFER_DATA (rtpasfpay->current) + rtpasfpay->cur_off;
+ size_left = GST_BUFFER_SIZE (rtpasfpay->current) - rtpasfpay->cur_off;
+
+ GST_DEBUG_OBJECT (rtpasfpay, "Input buffer bytes consumed: %"
+ G_GUINT32_FORMAT "/%" G_GUINT32_FORMAT, packet_offset,
+ GST_BUFFER_SIZE (buffer));
+
+ GST_DEBUG_OBJECT (rtpasfpay, "Output rtpbuffer status");
+ GST_DEBUG_OBJECT (rtpasfpay, "Current offset: %" G_GUINT32_FORMAT,
+ rtpasfpay->cur_off);
+ GST_DEBUG_OBJECT (rtpasfpay, "Size left: %" G_GUINT32_FORMAT, size_left);
+ GST_DEBUG_OBJECT (rtpasfpay, "Has ts: %s",
+ rtpasfpay->has_ts ? "yes" : "no");
+ if (rtpasfpay->has_ts) {
+ GST_DEBUG_OBJECT (rtpasfpay, "Ts: %" G_GUINT32_FORMAT, rtpasfpay->ts);
+ }
+
+ flags = 0;
+ if (packetinfo->has_keyframe) {
+ flags = flags | 0x80;
+ }
+ flags = flags | 0x20; /* Relative timestamp is present */
+
+ if (!rtpasfpay->has_ts) {
+ /* this is the first asf packet, its send time is the
+ * rtp packet timestamp */
+ rtpasfpay->has_ts = TRUE;
+ rtpasfpay->ts = packetinfo->send_time;
+ }
+
+ if (GST_BUFFER_SIZE (rtpasfpay->current) - rtpasfpay->cur_off >=
+ packet_util_size + 8) {
+ /* enough space for the rest of the packet */
+ if (packet_offset == 0) {
+ flags = flags | 0x40;
+ GST_WRITE_UINT24_BE (data + 1, packet_util_size);
+ } else {
+ GST_WRITE_UINT24_BE (data + 1, packet_offset);
+ force_push = TRUE;
+ }
+ data[0] = flags;
+ GST_WRITE_UINT32_BE (data + 4,
+ (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
+ memcpy (data + 8, GST_BUFFER_DATA (buffer) + packet_offset,
+ packet_util_size);
+
+ /* updating status variables */
+ rtpasfpay->cur_off += 8 + packet_util_size;
+ size_left -= packet_util_size + 8;
+ packet_offset += packet_util_size;
+ packet_util_size = 0;
+ rtpasfpay->marker = TRUE;
+ } else {
+ /* fragment packet */
+ data[0] = flags;
+ GST_WRITE_UINT24_BE (data + 1, packet_offset);
+ GST_WRITE_UINT32_BE (data + 4,
+ (gint32) (packetinfo->send_time) - (gint32) rtpasfpay->ts);
+ memcpy (data + 8, GST_BUFFER_DATA (buffer) + packet_offset,
+ size_left - 8);
+
+ /* updating status variables */
+ rtpasfpay->cur_off += size_left;
+ packet_offset += size_left - 8;
+ packet_util_size -= size_left - 8;
+ size_left = 0;
+ force_push = TRUE;
+ }
+
+ /* there is not enough room for any more buffers */
+ if (force_push || size_left <= 8) {
+
+ if (size_left != 0) {
+ /* trim remaining bytes not used */
+ GstBuffer *aux = gst_buffer_create_sub (rtpasfpay->current, 0,
+ GST_BUFFER_SIZE (rtpasfpay->current) - size_left);
+ gst_buffer_unref (rtpasfpay->current);
+ rtpasfpay->current = aux;
+ }
+ gst_rtp_buffer_set_ssrc (rtpasfpay->current, rtppay->current_ssrc);
+ gst_rtp_buffer_set_marker (rtpasfpay->current, rtpasfpay->marker);
+ gst_rtp_buffer_set_payload_type (rtpasfpay->current,
+ GST_BASE_RTP_PAYLOAD_PT (rtppay));
+ gst_rtp_buffer_set_seq (rtpasfpay->current, rtppay->seqnum + 1);
+ gst_rtp_buffer_set_timestamp (rtpasfpay->current, packetinfo->send_time);
+
+ GST_BUFFER_TIMESTAMP (rtpasfpay->current) = GST_BUFFER_TIMESTAMP (buffer);
+
+ gst_buffer_set_caps (rtpasfpay->current,
+ GST_PAD_CAPS (GST_BASE_RTP_PAYLOAD_SRCPAD (rtppay)));
+
+ rtppay->seqnum++;
+ rtppay->timestamp = packetinfo->send_time;
+
+ GST_DEBUG_OBJECT (rtpasfpay, "Pushing rtp buffer");
+ ret =
+ gst_pad_push (GST_BASE_RTP_PAYLOAD_SRCPAD (rtppay),
+ rtpasfpay->current);
+ rtpasfpay->current = NULL;
+ if (ret != GST_FLOW_OK) {
+ gst_buffer_unref (buffer);
+ return ret;
+ }
+ }
+ }
+ gst_buffer_unref (buffer);
+ return ret;
+}
+
+static GstFlowReturn
+gst_rtp_asf_pay_parse_headers (GstRtpAsfPay * rtpasfpay)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ gchar *maxps;
+ g_return_val_if_fail (rtpasfpay->headers, GST_FLOW_ERROR);
+
+ if (!gst_asf_parse_headers (rtpasfpay->headers, &rtpasfpay->asfinfo))
+ goto error;
+
+ GST_DEBUG_OBJECT (rtpasfpay, "Packets number: %" G_GUINT64_FORMAT,
+ rtpasfpay->asfinfo.packets_count);
+ GST_DEBUG_OBJECT (rtpasfpay, "Packets size: %" G_GUINT32_FORMAT,
+ rtpasfpay->asfinfo.packet_size);
+ GST_DEBUG_OBJECT (rtpasfpay, "Broadcast mode: %s",
+ rtpasfpay->asfinfo.broadcast ? "true" : "false");
+
+ /* get the config for caps */
+ g_free (rtpasfpay->config);
+ rtpasfpay->config = g_base64_encode (GST_BUFFER_DATA (rtpasfpay->headers),
+ GST_BUFFER_SIZE (rtpasfpay->headers));
+ GST_DEBUG_OBJECT (rtpasfpay, "Serialized headers to base64 string %s",
+ rtpasfpay->config);
+
+ g_assert (rtpasfpay->config != NULL);
+ GST_DEBUG_OBJECT (rtpasfpay, "Setting optional caps values: maxps=%"
+ G_GUINT32_FORMAT " and config=%s", rtpasfpay->asfinfo.packet_size,
+ rtpasfpay->config);
+ maxps =
+ g_strdup_printf ("%" G_GUINT32_FORMAT, rtpasfpay->asfinfo.packet_size);
+ gst_basertppayload_set_outcaps (GST_BASE_RTP_PAYLOAD (rtpasfpay), "maxps",
+ G_TYPE_STRING, maxps, "config", G_TYPE_STRING, rtpasfpay->config, NULL);
+ g_free (maxps);
+
+ return GST_FLOW_OK;
+
+error:
+ ret = GST_FLOW_ERROR;
+ GST_ERROR_OBJECT (rtpasfpay, "Error while parsing headers");
+ return GST_FLOW_ERROR;
+}
+
+static GstFlowReturn
+gst_rtp_asf_pay_handle_buffer (GstBaseRTPPayload * rtppay, GstBuffer * buffer)
+{
+ GstRtpAsfPay *rtpasfpay = GST_RTP_ASF_PAY_CAST (rtppay);
+
+ if (G_UNLIKELY (rtpasfpay->state == ASF_END)) {
+ GST_LOG_OBJECT (rtpasfpay,
+ "Dropping buffer as we already pushed all packets");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_UNEXPECTED; /* we already finished our job */
+ }
+
+ /* receive headers
+ * we only accept if they are in a single buffer */
+ if (G_UNLIKELY (rtpasfpay->state == ASF_NOT_STARTED)) {
+ guint64 header_size;
+
+ if (GST_BUFFER_SIZE (buffer) < 24) { /* guid+object size size */
+ GST_ERROR_OBJECT (rtpasfpay,
+ "Buffer too small, smaller than a Guid and object size");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ header_size = gst_asf_match_and_peek_obj_size (GST_BUFFER_DATA (buffer),
+ &(guids[ASF_HEADER_OBJECT_INDEX]));
+ if (header_size > 0) {
+ GST_DEBUG_OBJECT (rtpasfpay, "ASF header guid received, size %"
+ G_GUINT64_FORMAT, header_size);
+
+ if (GST_BUFFER_SIZE (buffer) < header_size) {
+ GST_ERROR_OBJECT (rtpasfpay, "Headers should be contained in a single"
+ " buffer");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ } else {
+ rtpasfpay->state = ASF_DATA_OBJECT;
+
+ /* clear previous headers, if any */
+ if (rtpasfpay->headers) {
+ gst_buffer_unref (rtpasfpay->headers);
+ }
+
+ GST_DEBUG_OBJECT (rtpasfpay, "Storing headers");
+ if (GST_BUFFER_SIZE (buffer) == header_size) {
+ rtpasfpay->headers = buffer;
+ return GST_FLOW_OK;
+ } else {
+ /* headers are a subbuffer of thie buffer */
+ GstBuffer *aux = gst_buffer_create_sub (buffer, header_size,
+ GST_BUFFER_SIZE (buffer) - header_size);
+ rtpasfpay->headers = gst_buffer_create_sub (buffer, 0, header_size);
+ gst_buffer_replace (&buffer, aux);
+ }
+ }
+ } else {
+ GST_ERROR_OBJECT (rtpasfpay, "Missing ASF header start");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (G_UNLIKELY (rtpasfpay->state == ASF_DATA_OBJECT)) {
+ if (GST_BUFFER_SIZE (buffer) != ASF_DATA_OBJECT_SIZE) {
+ GST_ERROR_OBJECT (rtpasfpay, "Received buffer of different size of "
+ "the data object header");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+
+ if (gst_asf_match_guid (GST_BUFFER_DATA (buffer),
+ &(guids[ASF_DATA_OBJECT_INDEX]))) {
+ GST_DEBUG_OBJECT (rtpasfpay, "Received data object header");
+ rtpasfpay->headers = gst_buffer_join (rtpasfpay->headers, buffer);
+ rtpasfpay->state = ASF_PACKETS;
+
+ return gst_rtp_asf_pay_parse_headers (rtpasfpay);
+ } else {
+ GST_ERROR_OBJECT (rtpasfpay, "Unexpected object received (was expecting "
+ "data object)");
+ gst_buffer_unref (buffer);
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (G_LIKELY (rtpasfpay->state == ASF_PACKETS)) {
+ /* in broadcast mode we can't trust the packets count information
+ * from the headers
+ * We assume that if this is on broadcast mode it is a live stream
+ * and we are going to keep receiving packets indefinitely
+ */
+ if (rtpasfpay->asfinfo.broadcast ||
+ rtpasfpay->packets_count < rtpasfpay->asfinfo.packets_count) {
+ GST_DEBUG_OBJECT (rtpasfpay, "Received packet %"
+ G_GUINT64_FORMAT "/%" G_GUINT64_FORMAT,
+ rtpasfpay->packets_count, rtpasfpay->asfinfo.packets_count);
+ rtpasfpay->packets_count++;
+ return gst_rtp_asf_pay_handle_packet (rtpasfpay, buffer);
+ } else {
+ GST_INFO_OBJECT (rtpasfpay, "Packets ended");
+ rtpasfpay->state = ASF_END;
+ gst_buffer_unref (buffer);
+ return GST_FLOW_UNEXPECTED;
+ }
+ }
+
+ gst_buffer_unref (buffer);
+ return GST_FLOW_OK;
+}
+
+gboolean
+gst_rtp_asf_pay_plugin_init (GstPlugin * plugin)
+{
+ return gst_element_register (plugin, "rtpasfpay",
+ GST_RANK_NONE, GST_TYPE_RTP_ASF_PAY);
+}
diff --git a/gst/asfmux/gstrtpasfpay.h b/gst/asfmux/gstrtpasfpay.h
new file mode 100644
index 00000000..8c7505a5
--- /dev/null
+++ b/gst/asfmux/gstrtpasfpay.h
@@ -0,0 +1,87 @@
+/* ASF RTP Payloader plugin for GStreamer
+ * Copyright (C) 2009 Thiago Santos <thiagoss@embedded.ufcg.edu.br>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_RTP_ASF_PAY_H__
+#define __GST_RTP_ASF_PAY_H__
+
+#include <gst/gst.h>
+#include <gst/rtp/gstbasertppayload.h>
+#include <gst/rtp/gstrtpbuffer.h>
+#include <gst/base/gstadapter.h>
+
+#include "gstasfobjects.h"
+
+G_BEGIN_DECLS
+#define GST_TYPE_RTP_ASF_PAY \
+ (gst_rtp_asf_pay_get_type())
+#define GST_RTP_ASF_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_RTP_ASF_PAY,GstRtpAsfPay))
+#define GST_RTP_ASF_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_RTP_ASF_PAY,GstRtpAsfPayClass))
+#define GST_IS_RTP_ASF_PAY(obj) \
+ (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_RTP_ASF_PAY))
+#define GST_IS_RTP_ASF_PAY_CLASS(klass) \
+ (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_RTP_ASF_PAY))
+#define GST_RTP_ASF_PAY_CAST(obj) ((GstRtpAsfPay*)(obj))
+ enum GstRtpAsfPayState
+{
+ ASF_NOT_STARTED,
+ ASF_DATA_OBJECT,
+ ASF_PACKETS,
+ ASF_END
+};
+
+typedef struct _GstRtpAsfPay GstRtpAsfPay;
+typedef struct _GstRtpAsfPayClass GstRtpAsfPayClass;
+
+struct _GstRtpAsfPay
+{
+ GstBaseRTPPayload rtppay;
+
+ enum GstRtpAsfPayState state;
+
+ guint32 first_ts;
+ gchar *config;
+ guint64 packets_count;
+ GstAsfFileInfo asfinfo;
+
+ /* current output buffer */
+ GstBuffer *current;
+ guint32 cur_off;
+ guint32 ts;
+ gboolean has_ts;
+ gboolean marker;
+
+ /* keeping it here to avoid allocs/frees */
+ GstAsfPacketInfo packetinfo;
+
+ GstBuffer *headers;
+};
+
+struct _GstRtpAsfPayClass
+{
+ GstBaseRTPPayloadClass parent_class;
+};
+
+GType gst_rtp_asf_pay_get_type (void);
+gboolean gst_rtp_asf_pay_plugin_init (GstPlugin * plugin);
+
+G_END_DECLS
+#endif /* __GST_RTP_ASF_PAY_H__ */