summaryrefslogtreecommitdiffstats
path: root/sys/dshowdecwrapper
diff options
context:
space:
mode:
Diffstat (limited to 'sys/dshowdecwrapper')
-rw-r--r--sys/dshowdecwrapper/gstdshowaudiodec.c1168
-rw-r--r--sys/dshowdecwrapper/gstdshowaudiodec.h111
-rw-r--r--sys/dshowdecwrapper/gstdshowdecwrapper.c79
-rw-r--r--sys/dshowdecwrapper/gstdshowdecwrapper.h73
-rw-r--r--sys/dshowdecwrapper/gstdshowvideodec.c1122
-rw-r--r--sys/dshowdecwrapper/gstdshowvideodec.h105
6 files changed, 2658 insertions, 0 deletions
diff --git a/sys/dshowdecwrapper/gstdshowaudiodec.c b/sys/dshowdecwrapper/gstdshowaudiodec.c
new file mode 100644
index 00000000..c231ac4b
--- /dev/null
+++ b/sys/dshowdecwrapper/gstdshowaudiodec.c
@@ -0,0 +1,1168 @@
+/*
+ * GStreamer DirectShow codecs wrapper
+ * Copyright <2006, 2007, 2008> Fluendo <gstreamer@fluendo.com>
+ * Copyright <2006, 2007, 2008> Pioneers of the Inevitable <songbird@songbirdnest.com>
+ * Copyright <2007,2008> Sebastien Moutte <sebastien@moutte.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdshowaudiodec.h"
+#include <mmreg.h>
+
+GST_DEBUG_CATEGORY_STATIC (dshowaudiodec_debug);
+#define GST_CAT_DEFAULT dshowaudiodec_debug
+
+GST_BOILERPLATE (GstDshowAudioDec, gst_dshowaudiodec, GstElement,
+ GST_TYPE_ELEMENT);
+static const CodecEntry *tmp;
+
+static void gst_dshowaudiodec_dispose (GObject * object);
+static GstStateChangeReturn gst_dshowaudiodec_change_state
+ (GstElement * element, GstStateChange transition);
+
+/* sink pad overwrites */
+static gboolean gst_dshowaudiodec_sink_setcaps (GstPad * pad, GstCaps * caps);
+static GstFlowReturn gst_dshowaudiodec_chain (GstPad * pad, GstBuffer * buffer);
+static gboolean gst_dshowaudiodec_sink_event (GstPad * pad, GstEvent * event);
+
+/* callback used by directshow to push buffers */
+static gboolean gst_dshowaudiodec_push_buffer (byte * buffer, long size,
+ byte * src_object, UINT64 start, UINT64 stop);
+
+/* utils */
+static gboolean gst_dshowaudiodec_create_graph_and_filters (GstDshowAudioDec *
+ adec);
+static gboolean gst_dshowaudiodec_destroy_graph_and_filters (GstDshowAudioDec *
+ adec);
+static gboolean gst_dshowaudiodec_flush (GstDshowAudioDec * adec);
+static gboolean gst_dshowaudiodec_get_filter_settings (GstDshowAudioDec * adec);
+static gboolean gst_dshowaudiodec_setup_graph (GstDshowAudioDec * adec);
+
+/* gobal variable */
+const long bitrates[2][3][16] = {
+ /* version 0 */
+ {
+ /* one list per layer 1-3 */
+ {0, 32000, 48000, 56000, 64000, 80000, 96000, 112000, 128000, 144000,
+ 160000, 176000, 192000, 224000, 256000, 0},
+ {0, 8000, 16000, 24000, 32000, 40000, 48000, 56000, 64000, 80000, 96000,
+ 112000, 128000, 144000, 160000, 0},
+ {0, 8000, 16000, 24000, 32000, 40000, 48000, 56000, 64000, 80000, 96000,
+ 112000, 128000, 144000, 160000, 0},
+ },
+ /* version 1 */
+ {
+ /* one list per layer 1-3 */
+ {0, 32000, 64000, 96000, 128000, 160000, 192000, 224000, 256000,
+ 288000, 320000, 352000, 384000, 416000, 448000, 0},
+ {0, 32000, 48000, 56000, 64000, 80000, 96000, 112000, 128000,
+ 160000, 192000, 224000, 256000, 320000, 384000, 0},
+ {0, 32000, 40000, 48000, 56000, 64000, 80000, 96000, 112000,
+ 128000, 160000, 192000, 224000, 256000, 320000, 0},
+ }
+};
+
+#define GUID_MEDIATYPE_AUDIO {0x73647561, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_PCM {0x00000001, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMAV1 {0x00000160, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMAV2 {0x00000161, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMAV3 {0x00000162, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMAV4 {0x00000163, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMS {0x0000000a, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_MP3 {0x00000055, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_MPEG1AudioPayload {0x00000050, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xAA, 0x00, 0x38, 0x9b, 0x71 }}
+
+static const CodecEntry audio_dec_codecs[] = {
+ {"dshowadec_wma1",
+ "Windows Media Audio 7",
+ "DMO",
+ 0x00000160,
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_WMAV1,
+ "audio/x-wma, wmaversion = (int) 1",
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_PCM,
+ "audio/x-raw-int, "
+ "width = (int) { 1, 8, 16 }, depth = (int) { 1, 8, 16 }, "
+ "signed = (boolean) true, endianness = (int) "
+ G_STRINGIFY (G_LITTLE_ENDIAN)
+ },
+ {"dshowadec_wma2",
+ "Windows Media Audio 8",
+ "DMO",
+ 0x00000161,
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_WMAV2,
+ "audio/x-wma, wmaversion = (int) 2",
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_PCM,
+ "audio/x-raw-int, "
+ "width = (int) { 1, 8, 16 }, depth = (int) { 1, 8, 16 }, "
+ "signed = (boolean) true, endianness = (int) "
+ G_STRINGIFY (G_LITTLE_ENDIAN)
+ },
+ {"dshowadec_wma3",
+ "Windows Media Audio 9 Professional",
+ "DMO",
+ 0x00000162,
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_WMAV3,
+ "audio/x-wma, wmaversion = (int) 3",
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_PCM,
+ "audio/x-raw-int, "
+ "width = (int) { 1, 8, 16 }, depth = (int) { 1, 8, 16 }, "
+ "signed = (boolean) true, endianness = (int) "
+ G_STRINGIFY (G_LITTLE_ENDIAN)
+ },
+ {"dshowadec_wma4",
+ "Windows Media Audio 9 Lossless",
+ "DMO",
+ 0x00000163,
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_WMAV4,
+ "audio/x-wma, wmaversion = (int) 4",
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_PCM,
+ "audio/x-raw-int, "
+ "width = (int) { 1, 8, 16 }, depth = (int) { 1, 8, 16 }, "
+ "signed = (boolean) true, endianness = (int) "
+ G_STRINGIFY (G_LITTLE_ENDIAN)
+ },
+ {"dshowadec_wms",
+ "Windows Media Audio Voice v9",
+ "DMO",
+ 0x0000000a,
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_WMS,
+ "audio/x-wms",
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_PCM,
+ "audio/x-raw-int, "
+ "width = (int) { 1, 8, 16 }, depth = (int) { 1, 8, 16 }, "
+ "signed = (boolean) true, endianness = (int) "
+ G_STRINGIFY (G_LITTLE_ENDIAN)
+ },
+ {"dshowadec_mpeg1",
+ "MPEG-1 Layer 1,2,3 Audio",
+ "MPEG Layer-3 Decoder",
+ 0x00000055,
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_MP3,
+ "audio/mpeg, "
+ "mpegversion = (int) 1, "
+ "layer = (int) { 1 , 2, 3 }, "
+ "rate = (int) [ 8000, 48000 ], "
+ "channels = (int) [ 1, 2 ], " "parsed= (boolean) true",
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_PCM,
+ "audio/x-raw-int, "
+ "width = (int) { 1, 8, 16 }, depth = (int) { 1, 8, 16 }, "
+ "signed = (boolean) true, endianness = (int) "
+ G_STRINGIFY (G_LITTLE_ENDIAN)
+ }
+};
+
+/* Private map used when dshowadec_mpeg is loaded with layer=1 or 2.
+ * The problem is that gstreamer don't care about caps like layer when connecting pads.
+ * So I've only one element handling mpeg audio in the public codecs map and
+ * when it's loaded for mp3, I'm releasing mpeg audio decoder and replace it by
+ * the one described in this private map.
+*/
+static const CodecEntry audio_mpeg_1_2[] = { "dshowadec_mpeg_1_2",
+ "MPEG-1 Layer 1,2 Audio",
+ "MPEG Audio Decoder",
+ 0x00000050,
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_MPEG1AudioPayload,
+ "audio/mpeg, "
+ "mpegversion = (int) 1, "
+ "layer = (int) [ 1, 2 ], "
+ "rate = (int) [ 8000, 48000 ], "
+ "channels = (int) [ 1, 2 ], " "parsed= (boolean) true",
+ GUID_MEDIATYPE_AUDIO, GUID_MEDIASUBTYPE_PCM,
+ "audio/x-raw-int, "
+ "width = (int) { 1, 8, 16 }, depth = (int) { 1, 8, 16 }, "
+ "signed = (boolean) true, endianness = (int) "
+ G_STRINGIFY (G_LITTLE_ENDIAN)
+};
+
+static void
+gst_dshowaudiodec_base_init (GstDshowAudioDecClass * klass)
+{
+ GstPadTemplate *src, *sink;
+ GstCaps *srccaps, *sinkcaps;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementDetails details;
+
+ klass->entry = tmp;
+ details.longname = g_strdup_printf ("DirectShow %s Decoder Wrapper",
+ tmp->element_longname);
+ details.klass = g_strdup ("Codec/Decoder/Audio");
+ details.description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
+ tmp->element_longname);
+ details.author = "Sebastien Moutte <sebastien@moutte.net>";
+ gst_element_class_set_details (element_class, &details);
+ g_free (details.longname);
+ g_free (details.klass);
+ g_free (details.description);
+
+ sinkcaps = gst_caps_from_string (tmp->sinkcaps);
+ gst_caps_set_simple (sinkcaps,
+ "block_align", GST_TYPE_INT_RANGE, 0, G_MAXINT,
+ "bitrate", GST_TYPE_INT_RANGE, 0, G_MAXINT, NULL);
+
+ srccaps = gst_caps_from_string (tmp->srccaps);
+
+ sink = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, sinkcaps);
+ src = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, srccaps);
+
+ /* register */
+ gst_element_class_add_pad_template (element_class, src);
+ gst_element_class_add_pad_template (element_class, sink);
+}
+
+static void
+gst_dshowaudiodec_class_init (GstDshowAudioDecClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_dshowaudiodec_dispose);
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_dshowaudiodec_change_state);
+
+ if (!parent_class)
+ parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
+
+ if (!dshowaudiodec_debug) {
+ GST_DEBUG_CATEGORY_INIT (dshowaudiodec_debug, "dshowaudiodec", 0,
+ "Directshow filter audio decoder");
+ }
+}
+
+static void
+gst_dshowaudiodec_init (GstDshowAudioDec * adec,
+ GstDshowAudioDecClass * adec_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_GET_CLASS (adec);
+
+ /* setup pads */
+ adec->sinkpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (element_class, "sink"), "sink");
+
+ gst_pad_set_setcaps_function (adec->sinkpad, gst_dshowaudiodec_sink_setcaps);
+ gst_pad_set_event_function (adec->sinkpad, gst_dshowaudiodec_sink_event);
+ gst_pad_set_chain_function (adec->sinkpad, gst_dshowaudiodec_chain);
+ gst_element_add_pad (GST_ELEMENT (adec), adec->sinkpad);
+
+ adec->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (element_class, "src"), "src");
+ gst_element_add_pad (GST_ELEMENT (adec), adec->srcpad);
+
+ adec->srcfilter = NULL;
+ adec->gstdshowsrcfilter = NULL;
+ adec->decfilter = NULL;
+ adec->sinkfilter = NULL;
+ adec->filtergraph = NULL;
+ adec->mediafilter = NULL;
+ adec->timestamp = GST_CLOCK_TIME_NONE;
+ adec->segment = gst_segment_new ();
+ adec->setup = FALSE;
+ adec->depth = 0;
+ adec->bitrate = 0;
+ adec->block_align = 0;
+ adec->channels = 0;
+ adec->rate = 0;
+ adec->layer = 0;
+ adec->codec_data = NULL;
+
+ CoInitializeEx (NULL, COINIT_MULTITHREADED);
+}
+
+static void
+gst_dshowaudiodec_dispose (GObject * object)
+{
+ GstDshowAudioDec *adec = (GstDshowAudioDec *) (object);
+
+ if (adec->segment) {
+ gst_segment_free (adec->segment);
+ adec->segment = NULL;
+ }
+
+ if (adec->codec_data) {
+ gst_buffer_unref (adec->codec_data);
+ adec->codec_data = NULL;
+ }
+
+ CoUninitialize ();
+}
+
+
+static GstStateChangeReturn
+gst_dshowaudiodec_change_state (GstElement * element, GstStateChange transition)
+{
+ GstDshowAudioDec *adec = (GstDshowAudioDec *) (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!gst_dshowaudiodec_create_graph_and_filters (adec))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ adec->depth = 0;
+ adec->bitrate = 0;
+ adec->block_align = 0;
+ adec->channels = 0;
+ adec->rate = 0;
+ adec->layer = 0;
+ if (adec->codec_data) {
+ gst_buffer_unref (adec->codec_data);
+ adec->codec_data = NULL;
+ }
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (!gst_dshowaudiodec_destroy_graph_and_filters (adec))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ default:
+ break;
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+}
+
+static gboolean
+gst_dshowaudiodec_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ gboolean ret = FALSE;
+ GstDshowAudioDec *adec = (GstDshowAudioDec *) gst_pad_get_parent (pad);
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ const GValue *v = NULL;
+
+ adec->timestamp = GST_CLOCK_TIME_NONE;
+
+ /* read data, only rate and channels are needed */
+ if (!gst_structure_get_int (s, "rate", &adec->rate) ||
+ !gst_structure_get_int (s, "channels", &adec->channels)) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("error getting audio specs from caps"), (NULL));
+ goto end;
+ }
+
+ gst_structure_get_int (s, "depth", &adec->depth);
+ gst_structure_get_int (s, "bitrate", &adec->bitrate);
+ gst_structure_get_int (s, "block_align", &adec->block_align);
+ gst_structure_get_int (s, "layer", &adec->layer);
+
+ if (adec->codec_data) {
+ gst_buffer_unref (adec->codec_data);
+ adec->codec_data = NULL;
+ }
+
+ if ((v = gst_structure_get_value (s, "codec_data")))
+ adec->codec_data = gst_buffer_ref (gst_value_get_buffer (v));
+
+ if (adec->layer != 1 && adec->layer != 2) {
+ /* setup dshow graph for all formats except for
+ * MPEG-1 layer 1 and 2 for which we need negociate
+ * in _chain function.
+ */
+ ret = gst_dshowaudiodec_setup_graph (adec);
+ }
+
+ ret = TRUE;
+end:
+ gst_object_unref (adec);
+
+ return ret;
+}
+
+static GstFlowReturn
+gst_dshowaudiodec_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstDshowAudioDec *adec = (GstDshowAudioDec *) gst_pad_get_parent (pad);
+ gboolean discount = FALSE;
+
+ if (!adec->setup) {
+ if (adec->layer != 0) {
+ if (adec->codec_data) {
+ gst_buffer_unref (adec->codec_data);
+ adec->codec_data = NULL;
+ }
+ /* extract the 3 bytes of MPEG-1 audio frame header */
+ adec->codec_data = gst_buffer_create_sub (buffer, 1, 3);
+ }
+
+ /* setup dshow graph */
+ if (!gst_dshowaudiodec_setup_graph (adec)) {
+ return GST_FLOW_ERROR;
+ }
+ }
+
+ if (!adec->gstdshowsrcfilter) {
+ /* we are not setup */
+ ret = GST_FLOW_WRONG_STATE;
+ goto beach;
+ }
+
+ GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec, "chain (size %d)=> pts %"
+ GST_TIME_FORMAT " stop %" GST_TIME_FORMAT,
+ GST_BUFFER_SIZE (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer) +
+ GST_BUFFER_DURATION (buffer)));
+
+ /* if the incoming buffer has discont flag set => flush decoder data */
+ if (buffer && GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
+ GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec,
+ "this buffer has a DISCONT flag (%" GST_TIME_FORMAT "), flushing",
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
+ gst_dshowaudiodec_flush (adec);
+ discount = TRUE;
+ }
+
+ /* push the buffer to the directshow decoder */
+ IGstDshowInterface_gst_push_buffer (adec->gstdshowsrcfilter,
+ GST_BUFFER_DATA (buffer), GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer),
+ GST_BUFFER_SIZE (buffer), discount);
+
+beach:
+ gst_buffer_unref (buffer);
+ gst_object_unref (adec);
+ return ret;
+}
+
+static gboolean
+gst_dshowaudiodec_push_buffer (byte * buffer, long size, byte * src_object,
+ UINT64 dshow_start, UINT64 dshow_stop)
+{
+ GstDshowAudioDec *adec = (GstDshowAudioDec *) src_object;
+ GstBuffer *out_buf = NULL;
+ gboolean in_seg = FALSE;
+ gint64 buf_start, buf_stop;
+ gint64 clip_start = 0, clip_stop = 0;
+ size_t start_offset = 0, stop_offset = size;
+
+ if (!GST_CLOCK_TIME_IS_VALID (adec->timestamp)) {
+ adec->timestamp = dshow_start;
+ }
+
+ buf_start = adec->timestamp;
+ buf_stop = adec->timestamp + (dshow_stop - dshow_start);
+
+ /* save stop position to start next buffer with it */
+ adec->timestamp = buf_stop;
+
+ /* check if this buffer is in our current segment */
+ in_seg = gst_segment_clip (adec->segment, GST_FORMAT_TIME,
+ buf_start, buf_stop, &clip_start, &clip_stop);
+
+ /* if the buffer is out of segment do not push it downstream */
+ if (!in_seg) {
+ GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec,
+ "buffer is out of segment, start %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (buf_start), GST_TIME_ARGS (buf_stop));
+ return FALSE;
+ }
+
+ /* buffer is in our segment allocate a new out buffer and clip it if needed */
+
+ /* allocate a new buffer for raw audio */
+ gst_pad_alloc_buffer (adec->srcpad, GST_BUFFER_OFFSET_NONE,
+ size, GST_PAD_CAPS (adec->srcpad), &out_buf);
+ if (!out_buf) {
+ GST_CAT_ERROR_OBJECT (dshowaudiodec_debug, adec,
+ "can't not allocate a new GstBuffer");
+ return FALSE;
+ }
+
+ /* set buffer properties */
+ GST_BUFFER_SIZE (out_buf) = size;
+ GST_BUFFER_TIMESTAMP (out_buf) = buf_start;
+ GST_BUFFER_DURATION (out_buf) = buf_stop - buf_start;
+ memcpy (GST_BUFFER_DATA (out_buf), buffer, size);
+
+ /* we have to remove some heading samples */
+ if (clip_start > buf_start) {
+ start_offset = (size_t) gst_util_uint64_scale_int (clip_start - buf_start,
+ adec->rate, GST_SECOND) * adec->depth / 8 * adec->channels;
+ }
+ /* we have to remove some trailing samples */
+ if (clip_stop < buf_stop) {
+ stop_offset = (size_t) gst_util_uint64_scale_int (buf_stop - clip_stop,
+ adec->rate, GST_SECOND) * adec->depth / 8 * adec->channels;
+ }
+
+ /* truncating */
+ if ((start_offset != 0) || (stop_offset != (size_t) size)) {
+ GstBuffer *subbuf = gst_buffer_create_sub (out_buf, start_offset,
+ stop_offset - start_offset);
+
+ if (subbuf) {
+ gst_buffer_set_caps (subbuf, GST_PAD_CAPS (adec->srcpad));
+ gst_buffer_unref (out_buf);
+ out_buf = subbuf;
+ }
+ }
+
+ GST_BUFFER_TIMESTAMP (out_buf) = clip_start;
+ GST_BUFFER_DURATION (out_buf) = clip_stop - clip_start;
+
+ /* replace the saved stop position by the clipped one */
+ adec->timestamp = clip_stop;
+
+ GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec,
+ "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
+ " duration %" GST_TIME_FORMAT, size,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (out_buf) +
+ GST_BUFFER_DURATION (out_buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (out_buf)));
+
+ gst_pad_push (adec->srcpad, out_buf);
+
+ return TRUE;
+}
+
+static gboolean
+gst_dshowaudiodec_sink_event (GstPad * pad, GstEvent * event)
+{
+ gboolean ret = TRUE;
+ GstDshowAudioDec *adec = (GstDshowAudioDec *) gst_pad_get_parent (pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:{
+ gst_dshowaudiodec_flush (adec);
+ ret = gst_pad_event_default (pad, event);
+ break;
+ }
+ case GST_EVENT_NEWSEGMENT:
+ {
+ GstFormat format;
+ gdouble rate;
+ gint64 start, stop, time;
+ gboolean update;
+
+ gst_event_parse_new_segment (event, &update, &rate, &format, &start,
+ &stop, &time);
+
+ GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec,
+ "received new segment from %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+
+ if (update) {
+ GST_CAT_DEBUG_OBJECT (dshowaudiodec_debug, adec,
+ "closing current segment flushing..");
+ gst_dshowaudiodec_flush (adec);
+ }
+
+ /* save the new segment in our local current segment */
+ gst_segment_set_newsegment (adec->segment, update, rate, format, start,
+ stop, time);
+
+ ret = gst_pad_event_default (pad, event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, event);
+ break;
+ }
+ return ret;
+}
+
+static gboolean
+gst_dshowaudiodec_flush (GstDshowAudioDec * adec)
+{
+ if (!adec->gstdshowsrcfilter)
+ return FALSE;
+
+ /* flush dshow decoder and reset timestamp */
+ IGstDshowInterface_gst_flush (adec->gstdshowsrcfilter);
+ adec->timestamp = GST_CLOCK_TIME_NONE;
+
+ return TRUE;
+}
+
+
+static gboolean
+gst_dshowaudiodec_setup_graph (GstDshowAudioDec * adec)
+{
+ gboolean ret = FALSE;
+ GstDshowAudioDecClass *klass =
+ (GstDshowAudioDecClass *) G_OBJECT_GET_CLASS (adec);
+ HRESULT hres;
+ gint size = 0;
+ GstCaps *out;
+ AM_MEDIA_TYPE output_mediatype, input_mediatype;
+ WAVEFORMATEX *input_format = NULL, output_format;
+ IPin *output_pin = NULL, *input_pin = NULL;
+ IGstDshowInterface *gstdshowinterface = NULL;
+ CodecEntry *codec_entry = klass->entry;
+
+ if (adec->layer != 0) {
+ if (adec->layer == 1 || adec->layer == 2) {
+ /* for MPEG-1 layer 1 or 2 we have to release the current
+ * MP3 decoder and create an instance of MPEG Audio Decoder
+ */
+ IBaseFilter_Release (adec->decfilter);
+ adec->decfilter = NULL;
+ codec_entry = audio_mpeg_1_2;
+ gst_dshow_find_filter (codec_entry->input_majortype,
+ codec_entry->input_subtype,
+ codec_entry->output_majortype,
+ codec_entry->output_subtype,
+ codec_entry->prefered_filter_substring, &adec->decfilter);
+ IFilterGraph_AddFilter (adec->filtergraph, adec->decfilter, L"decoder");
+ } else {
+ /* mp3 don't need to negociate with MPEG1WAVEFORMAT */
+ adec->layer = 0;
+ }
+ }
+
+ /* set mediatype on fakesrc filter output pin */
+ memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
+ input_mediatype.majortype = codec_entry->input_majortype;
+ input_mediatype.subtype = codec_entry->input_subtype;
+ input_mediatype.bFixedSizeSamples = TRUE;
+ input_mediatype.bTemporalCompression = FALSE;
+ if (adec->block_align)
+ input_mediatype.lSampleSize = adec->block_align;
+ else
+ input_mediatype.lSampleSize = 8192; /* need to evaluate it dynamically */
+ input_mediatype.formattype = FORMAT_WaveFormatEx;
+
+ if (adec->layer != 0) {
+ MPEG1WAVEFORMAT *mpeg1_format;
+ BYTE b1, b2, b3;
+ gint samples, version, layer;
+
+ size = sizeof (MPEG1WAVEFORMAT);
+ input_format = g_malloc0 (size);
+ input_format->cbSize = sizeof (MPEG1WAVEFORMAT) - sizeof (WAVEFORMATEX);
+ mpeg1_format = (MPEG1WAVEFORMAT *) input_format;
+
+ /* initialize header bytes */
+ b1 = *GST_BUFFER_DATA (adec->codec_data);
+ b2 = *(GST_BUFFER_DATA (adec->codec_data) + 1);
+ b3 = *(GST_BUFFER_DATA (adec->codec_data) + 2);
+
+ /* fill MPEG1WAVEFORMAT using header */
+ input_format->wFormatTag = WAVE_FORMAT_MPEG;
+ mpeg1_format->wfx.nChannels = 2;
+ switch (b3 >> 6) {
+ case 0x00:
+ mpeg1_format->fwHeadMode = ACM_MPEG_STEREO;
+ break;
+ case 0x01:
+ mpeg1_format->fwHeadMode = ACM_MPEG_JOINTSTEREO;
+ break;
+ case 0x02:
+ mpeg1_format->fwHeadMode = ACM_MPEG_DUALCHANNEL;
+ break;
+ case 0x03:
+ mpeg1_format->fwHeadMode = ACM_MPEG_SINGLECHANNEL;
+ mpeg1_format->wfx.nChannels = 1;
+ break;
+ }
+
+ mpeg1_format->fwHeadModeExt = (WORD) (1 << (b3 >> 4));
+ mpeg1_format->wHeadEmphasis = (WORD) ((b3 & 0x03) + 1);
+ mpeg1_format->fwHeadFlags = (WORD) (((b2 & 1) ? ACM_MPEG_PRIVATEBIT : 0) +
+ ((b3 & 8) ? ACM_MPEG_COPYRIGHT : 0) +
+ ((b3 & 4) ? ACM_MPEG_ORIGINALHOME : 0) +
+ ((b1 & 1) ? ACM_MPEG_PROTECTIONBIT : 0) + ACM_MPEG_ID_MPEG1);
+
+ layer = (b1 >> 1) & 3;
+ switch (layer) {
+ case 1:
+ mpeg1_format->fwHeadLayer = ACM_MPEG_LAYER3;
+ layer = 3;
+ break;
+ case 2:
+ mpeg1_format->fwHeadLayer = ACM_MPEG_LAYER2;
+ break;
+ case 3:
+ mpeg1_format->fwHeadLayer = ACM_MPEG_LAYER1;
+ layer = 1;
+ break;
+ };
+
+ version = (b1 >> 3) & 1;
+ if (layer == 1) {
+ samples = 384;
+ } else {
+ if (version == 0) {
+ samples = 576;
+ } else {
+ samples = 1152;
+ }
+ }
+ mpeg1_format->wfx.nBlockAlign = (WORD) samples;
+ mpeg1_format->wfx.nSamplesPerSec = adec->rate;
+ mpeg1_format->dwHeadBitrate = bitrates[version][layer - 1][b2 >> 4];
+ mpeg1_format->wfx.nAvgBytesPerSec = mpeg1_format->dwHeadBitrate / 8;
+ } else {
+ size = sizeof (WAVEFORMATEX) +
+ (adec->codec_data ? GST_BUFFER_SIZE (adec->codec_data) : 0);
+ input_format = g_malloc0 (size);
+ if (adec->codec_data) { /* Codec data is appended after our header */
+ memcpy (((guchar *) input_format) + sizeof (WAVEFORMATEX),
+ GST_BUFFER_DATA (adec->codec_data),
+ GST_BUFFER_SIZE (adec->codec_data));
+ input_format->cbSize = GST_BUFFER_SIZE (adec->codec_data);
+ }
+
+ input_format->wFormatTag = codec_entry->format;
+ input_format->nChannels = adec->channels;
+ input_format->nSamplesPerSec = adec->rate;
+ input_format->nAvgBytesPerSec = adec->bitrate / 8;
+ input_format->nBlockAlign = adec->block_align;
+ input_format->wBitsPerSample = adec->depth;
+ }
+
+ input_mediatype.cbFormat = size;
+ input_mediatype.pbFormat = (BYTE *) input_format;
+
+ hres = IBaseFilter_QueryInterface (adec->srcfilter, &IID_IGstDshowInterface,
+ (void **) &gstdshowinterface);
+ if (hres != S_OK || !gstdshowinterface) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't get IGstDshowInterface interface from dshow fakesrc filter (error=%d)",
+ hres), (NULL));
+ goto end;
+ }
+
+ /* save a reference to IGstDshowInterface to use it processing functions */
+ if (!adec->gstdshowsrcfilter) {
+ adec->gstdshowsrcfilter = gstdshowinterface;
+ IBaseFilter_AddRef (adec->gstdshowsrcfilter);
+ }
+
+ IGstDshowInterface_gst_set_media_type (gstdshowinterface, &input_mediatype);
+ IGstDshowInterface_Release (gstdshowinterface);
+ gstdshowinterface = NULL;
+
+ /* connect our fake source to decoder */
+ gst_dshow_get_pin_from_filter (adec->srcfilter, PINDIR_OUTPUT, &output_pin);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't get output pin from our directshow fakesrc filter"), (NULL));
+ goto end;
+ }
+ gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_INPUT, &input_pin);
+ if (!input_pin) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't get input pin from decoder filter"), (NULL));
+ goto end;
+ }
+
+ hres =
+ IFilterGraph_ConnectDirect (adec->filtergraph, output_pin, input_pin,
+ NULL);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't connect fakesrc with decoder (error=%d)", hres), (NULL));
+ goto end;
+ }
+
+ IPin_Release (input_pin);
+ IPin_Release (output_pin);
+ input_pin = NULL;
+ output_pin = NULL;
+
+ if (!gst_dshowaudiodec_get_filter_settings (adec)) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't get audio depth from decoder"), (NULL));
+ goto end;
+ }
+
+ /* set mediatype on fake sink input pin */
+ memset (&output_format, 0, sizeof (WAVEFORMATEX));
+ output_format.wFormatTag = WAVE_FORMAT_PCM;
+ output_format.wBitsPerSample = adec->depth;
+ output_format.nChannels = adec->channels;
+ output_format.nBlockAlign = adec->channels * (adec->depth / 8);
+ output_format.nSamplesPerSec = adec->rate;
+ output_format.nAvgBytesPerSec = output_format.nBlockAlign * adec->rate;
+
+ memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE));
+ output_mediatype.majortype = codec_entry->output_majortype;
+ output_mediatype.subtype = codec_entry->output_subtype;
+ output_mediatype.bFixedSizeSamples = TRUE;
+ output_mediatype.bTemporalCompression = FALSE;
+ output_mediatype.lSampleSize = output_format.nBlockAlign;
+ output_mediatype.formattype = FORMAT_WaveFormatEx;
+ output_mediatype.cbFormat = sizeof (WAVEFORMATEX);
+ output_mediatype.pbFormat = (char *) &output_format;
+
+ hres = IBaseFilter_QueryInterface (adec->sinkfilter, &IID_IGstDshowInterface,
+ (void **) &gstdshowinterface);
+ if (hres != S_OK || !gstdshowinterface) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't get IGstDshowInterface interface from dshow fakesink filter (error=%d)",
+ hres), (NULL));
+ goto end;
+ }
+
+ IGstDshowInterface_gst_set_media_type (gstdshowinterface, &output_mediatype);
+ IGstDshowInterface_gst_set_buffer_callback (gstdshowinterface,
+ gst_dshowaudiodec_push_buffer, (byte *) adec);
+ IGstDshowInterface_Release (gstdshowinterface);
+ gstdshowinterface = NULL;
+
+ /* negotiate output */
+ out = gst_caps_from_string (codec_entry->srccaps);
+ gst_caps_set_simple (out,
+ "width", G_TYPE_INT, adec->depth,
+ "depth", G_TYPE_INT, adec->depth,
+ "rate", G_TYPE_INT, adec->rate,
+ "channels", G_TYPE_INT, adec->channels, NULL);
+ if (!gst_pad_set_caps (adec->srcpad, out)) {
+ gst_caps_unref (out);
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Failed to negotiate output"), (NULL));
+ goto end;
+ }
+ gst_caps_unref (out);
+
+ /* connect the decoder to our fake sink */
+ gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_OUTPUT, &output_pin);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't get output pin from our decoder filter"), (NULL));
+ goto end;
+ }
+ gst_dshow_get_pin_from_filter (adec->sinkfilter, PINDIR_INPUT, &input_pin);
+ if (!input_pin) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't get input pin from our directshow fakesink filter"), (NULL));
+ goto end;
+ }
+
+ hres =
+ IFilterGraph_ConnectDirect (adec->filtergraph, output_pin, input_pin,
+ NULL);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't connect decoder with fakesink (error=%d)", hres), (NULL));
+ goto end;
+ }
+
+ hres = IMediaFilter_Run (adec->mediafilter, -1);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("Can't run the directshow graph (error=%d)", hres), (NULL));
+ goto end;
+ }
+
+ ret = TRUE;
+ adec->setup = TRUE;
+end:
+ gst_object_unref (adec);
+ if (input_format)
+ g_free (input_format);
+ if (gstdshowinterface)
+ IGstDshowInterface_Release (gstdshowinterface);
+ if (input_pin)
+ IPin_Release (input_pin);
+ if (output_pin)
+ IPin_Release (output_pin);
+
+ return ret;
+}
+
+static gboolean
+gst_dshowaudiodec_get_filter_settings (GstDshowAudioDec * adec)
+{
+ IPin *output_pin = NULL;
+ IEnumMediaTypes *enum_mediatypes = NULL;
+ HRESULT hres;
+ ULONG fetched;
+ BOOL ret = FALSE;
+
+ if (!adec->decfilter)
+ return FALSE;
+
+ if (!gst_dshow_get_pin_from_filter (adec->decfilter, PINDIR_OUTPUT,
+ &output_pin)) {
+ GST_ELEMENT_ERROR (adec, CORE, NEGOTIATION,
+ ("failed getting ouput pin from the decoder"), (NULL));
+ return FALSE;
+ }
+
+ hres = IPin_EnumMediaTypes (output_pin, &enum_mediatypes);
+ if (hres == S_OK && enum_mediatypes) {
+ AM_MEDIA_TYPE *mediatype = NULL;
+
+ IEnumMediaTypes_Reset (enum_mediatypes);
+ while (hres =
+ IEnumMoniker_Next (enum_mediatypes, 1, &mediatype, &fetched),
+ hres == S_OK) {
+ RPC_STATUS rpcstatus;
+
+ if ((UuidCompare (&mediatype->subtype, &MEDIASUBTYPE_PCM, &rpcstatus) == 0
+ && rpcstatus == RPC_S_OK) &&
+ (UuidCompare (&mediatype->formattype, &FORMAT_WaveFormatEx,
+ &rpcstatus) == 0 && rpcstatus == RPC_S_OK)) {
+ WAVEFORMATEX *audio_info = (WAVEFORMATEX *) mediatype->pbFormat;
+
+ adec->channels = audio_info->nChannels;
+ adec->depth = audio_info->wBitsPerSample;
+ adec->rate = audio_info->nSamplesPerSec;
+ ret = TRUE;
+ }
+ gst_dshow_free_mediatype (mediatype);
+ if (ret)
+ break;
+ }
+ IEnumMediaTypes_Release (enum_mediatypes);
+ }
+ if (output_pin) {
+ IPin_Release (output_pin);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_dshowaudiodec_create_graph_and_filters (GstDshowAudioDec * adec)
+{
+ BOOL ret = FALSE;
+ HRESULT hres = S_FALSE;
+ GstDshowAudioDecClass *klass =
+ (GstDshowAudioDecClass *) G_OBJECT_GET_CLASS (adec);
+
+ /* create the filter graph manager object */
+ hres = CoCreateInstance (&CLSID_FilterGraph, NULL, CLSCTX_INPROC,
+ &IID_IFilterGraph, (LPVOID *) & adec->filtergraph);
+ if (hres != S_OK || !adec->filtergraph) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't create an instance of the directshow graph manager (error=%d)",
+ hres), (NULL));
+ goto error;
+ }
+
+ hres = IFilterGraph_QueryInterface (adec->filtergraph, &IID_IMediaFilter,
+ (void **) &adec->mediafilter);
+ if (hres != S_OK || !adec->mediafilter) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't get IMediacontrol interface from the graph manager (error=%d)",
+ hres), (NULL));
+ goto error;
+ }
+
+ /* create fake src filter */
+ hres = CoCreateInstance (&CLSID_DshowFakeSrc, NULL, CLSCTX_INPROC,
+ &IID_IBaseFilter, (LPVOID *) & adec->srcfilter);
+ if (hres != S_OK || !adec->srcfilter) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't create an instance of the directshow fakesrc (error=%d)", hres),
+ (NULL));
+ goto error;
+ }
+
+ /* create decoder filter */
+ if (!gst_dshow_find_filter (klass->entry->input_majortype,
+ klass->entry->input_subtype,
+ klass->entry->output_majortype,
+ klass->entry->output_subtype,
+ klass->entry->prefered_filter_substring, &adec->decfilter)) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't create an instance of the decoder filter"), (NULL));
+ goto error;
+ }
+
+ /* create fake sink filter */
+ hres = CoCreateInstance (&CLSID_DshowFakeSink, NULL, CLSCTX_INPROC,
+ &IID_IBaseFilter, (LPVOID *) & adec->sinkfilter);
+ if (hres != S_OK || !adec->sinkfilter) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't create an instance of the directshow fakesink (error=%d)",
+ hres), (NULL));
+ goto error;
+ }
+
+ /* add filters to the graph */
+ hres = IFilterGraph_AddFilter (adec->filtergraph, adec->srcfilter, L"src");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't add fakesrc filter to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres =
+ IFilterGraph_AddFilter (adec->filtergraph, adec->decfilter, L"decoder");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't add decoder filter to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres = IFilterGraph_AddFilter (adec->filtergraph, adec->sinkfilter, L"sink");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (adec, STREAM, FAILED,
+ ("Can't add fakesink filter to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ return TRUE;
+
+error:
+ if (adec->srcfilter) {
+ IBaseFilter_Release (adec->srcfilter);
+ adec->srcfilter = NULL;
+ }
+ if (adec->decfilter) {
+ IBaseFilter_Release (adec->decfilter);
+ adec->decfilter = NULL;
+ }
+ if (adec->sinkfilter) {
+ IBaseFilter_Release (adec->sinkfilter);
+ adec->sinkfilter = NULL;
+ }
+ if (adec->mediafilter) {
+ IMediaFilter_Release (adec->mediafilter);
+ adec->mediafilter = NULL;
+ }
+ if (adec->filtergraph) {
+ IFilterGraph_Release (adec->filtergraph);
+ adec->filtergraph = NULL;
+ }
+
+ return FALSE;
+}
+
+static gboolean
+gst_dshowaudiodec_destroy_graph_and_filters (GstDshowAudioDec * adec)
+{
+ if (adec->mediafilter) {
+ IMediaFilter_Stop (adec->mediafilter);
+ }
+
+ if (adec->gstdshowsrcfilter) {
+ IGstDshowInterface_Release (adec->gstdshowsrcfilter);
+ adec->gstdshowsrcfilter = NULL;
+ }
+ if (adec->srcfilter) {
+ if (adec->filtergraph)
+ IFilterGraph_RemoveFilter (adec->filtergraph, adec->srcfilter);
+ IBaseFilter_Release (adec->srcfilter);
+ adec->srcfilter = NULL;
+ }
+ if (adec->decfilter) {
+ if (adec->filtergraph)
+ IFilterGraph_RemoveFilter (adec->filtergraph, adec->decfilter);
+ IBaseFilter_Release (adec->decfilter);
+ adec->decfilter = NULL;
+ }
+ if (adec->sinkfilter) {
+ if (adec->filtergraph)
+ IFilterGraph_RemoveFilter (adec->filtergraph, adec->sinkfilter);
+ IBaseFilter_Release (adec->sinkfilter);
+ adec->sinkfilter = NULL;
+ }
+ if (adec->mediafilter) {
+ IMediaFilter_Release (adec->mediafilter);
+ adec->mediafilter = NULL;
+ }
+ if (adec->filtergraph) {
+ IFilterGraph_Release (adec->filtergraph);
+ adec->filtergraph = NULL;
+ }
+
+ adec->setup = FALSE;
+
+ return TRUE;
+}
+
+gboolean
+dshow_adec_register (GstPlugin * plugin)
+{
+ GTypeInfo info = {
+ sizeof (GstDshowAudioDecClass),
+ (GBaseInitFunc) gst_dshowaudiodec_base_init,
+ NULL,
+ (GClassInitFunc) gst_dshowaudiodec_class_init,
+ NULL,
+ NULL,
+ sizeof (GstDshowAudioDec),
+ 0,
+ (GInstanceInitFunc) gst_dshowaudiodec_init,
+ };
+ gint i;
+
+ GST_DEBUG_CATEGORY_INIT (dshowaudiodec_debug, "dshowaudiodec", 0,
+ "Directshow filter audio decoder");
+
+ CoInitializeEx (NULL, COINIT_MULTITHREADED);
+ for (i = 0; i < sizeof (audio_dec_codecs) / sizeof (CodecEntry); i++) {
+ GType type;
+
+ if (gst_dshow_find_filter (audio_dec_codecs[i].input_majortype,
+ audio_dec_codecs[i].input_subtype,
+ audio_dec_codecs[i].output_majortype,
+ audio_dec_codecs[i].output_subtype,
+ audio_dec_codecs[i].prefered_filter_substring, NULL)) {
+
+ GST_CAT_DEBUG (dshowaudiodec_debug, "Registering %s",
+ audio_dec_codecs[i].element_name);
+
+ tmp = &audio_dec_codecs[i];
+ type =
+ g_type_register_static (GST_TYPE_ELEMENT,
+ audio_dec_codecs[i].element_name, &info, 0);
+ if (!gst_element_register (plugin, audio_dec_codecs[i].element_name,
+ GST_RANK_PRIMARY, type)) {
+ return FALSE;
+ }
+ GST_CAT_DEBUG (dshowaudiodec_debug, "Registered %s",
+ audio_dec_codecs[i].element_name);
+ } else {
+ GST_CAT_DEBUG (dshowaudiodec_debug,
+ "Element %s not registered (the format is not supported by the system)",
+ audio_dec_codecs[i].element_name);
+ }
+ }
+
+ CoUninitialize ();
+ return TRUE;
+}
diff --git a/sys/dshowdecwrapper/gstdshowaudiodec.h b/sys/dshowdecwrapper/gstdshowaudiodec.h
new file mode 100644
index 00000000..d29ad7c1
--- /dev/null
+++ b/sys/dshowdecwrapper/gstdshowaudiodec.h
@@ -0,0 +1,111 @@
+/*
+ * GStreamer DirectShow codecs wrapper
+ * Copyright <2006, 2007, 2008> Fluendo <gstreamer@fluendo.com>
+ * Copyright <2006, 2007, 2008> Pioneers of the Inevitable <songbird@songbirdnest.com>
+ * Copyright <2007,2008> Sebastien Moutte <sebastien@moutte.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+
+#ifndef __GST_DSHOWAUDIODEC_H__
+#define __GST_DSHOWAUDIODEC_H__
+
+#include <gst/gst.h>
+#include "gstdshowdecwrapper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_DSHOWAUDIODEC (gst_dshowaudiodec_get_type())
+#define GST_DSHOWAUDIODEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DSHOWAUDIODEC,GstDshowAudioDec))
+#define GST_DSHOWAUDIODEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DSHOWAUDIODEC,GstDshowAudioDecClass))
+#define GST_IS_DSHOWAUDIODEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DSHOWAUDIODEC))
+#define GST_IS_DSHOWAUDIODEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DSHOWAUDIODEC))
+
+typedef struct _GstDshowAudioDec GstDshowAudioDec;
+typedef struct _GstDshowAudioDecClass GstDshowAudioDecClass;
+
+struct _GstDshowAudioDec
+{
+ GstElement element;
+
+ /* element pads */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* filters interfaces*/
+ IBaseFilter *srcfilter;
+ IGstDshowInterface *gstdshowsrcfilter;
+ IBaseFilter *decfilter;
+ IBaseFilter *sinkfilter;
+
+ /* graph manager interfaces */
+ IMediaFilter *mediafilter;
+ IFilterGraph *filtergraph;
+
+ /* true when dshow graph is setup */
+ gboolean setup;
+
+ /* audio settings */
+ gint bitrate;
+ gint block_align;
+ gint depth;
+ gint channels;
+ gint rate;
+ gint layer;
+ GstBuffer *codec_data;
+
+ /* current segment */
+ GstSegment * segment;
+
+ /* timestamp of the next buffer */
+ GstClockTime timestamp;
+};
+
+struct _GstDshowAudioDecClass
+{
+ GstElementClass parent_class;
+ const CodecEntry *entry;
+};
+
+gboolean dshow_adec_register (GstPlugin * plugin);
+
+G_END_DECLS
+
+#endif /* __GST_DSHOWAUDIODEC_H__ */
diff --git a/sys/dshowdecwrapper/gstdshowdecwrapper.c b/sys/dshowdecwrapper/gstdshowdecwrapper.c
new file mode 100644
index 00000000..363fdbc8
--- /dev/null
+++ b/sys/dshowdecwrapper/gstdshowdecwrapper.c
@@ -0,0 +1,79 @@
+/*
+ * GStreamer DirectShow codecs wrapper
+ * Copyright <2006, 2007, 2008> Fluendo <gstreamer@fluendo.com>
+ * Copyright <2006, 2007, 2008> Pioneers of the Inevitable <songbird@songbirdnest.com>
+ * Copyright <2007,2008> Sebastien Moutte <sebastien@moutte.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdshowaudiodec.h"
+#include "gstdshowvideodec.h"
+
+const GUID CLSID_GstreamerSrcFilter
+ =
+ { 0x6a780808, 0x9725, 0x4d0b, {0x86, 0x95, 0xa4, 0xdd, 0x8d, 0x21, 0x7,
+ 0x73} };
+
+const GUID IID_IGstSrcInterface =
+ { 0x542c0a24, 0x8bd1, 0x46cb, {0xaa, 0x57, 0x3e, 0x46, 0xd0, 0x6, 0xd2,
+ 0xf3} };
+
+
+static gboolean
+plugin_init (GstPlugin * plugin)
+{
+ /* register fake filters */
+ gst_dshow_register_fakefilters ();
+
+ if (!dshow_adec_register (plugin) || !dshow_vdec_register (plugin))
+ return FALSE;
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "dshowdecwrapper",
+ "Directshow decoder wrapper plugin",
+ plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)
diff --git a/sys/dshowdecwrapper/gstdshowdecwrapper.h b/sys/dshowdecwrapper/gstdshowdecwrapper.h
new file mode 100644
index 00000000..f912e18e
--- /dev/null
+++ b/sys/dshowdecwrapper/gstdshowdecwrapper.h
@@ -0,0 +1,73 @@
+/*
+ * GStreamer DirectShow codecs wrapper
+ * Copyright <2006, 2007, 2008> Fluendo <gstreamer@fluendo.com>
+ * Copyright <2006, 2007, 2008> Pioneers of the Inevitable <songbird@songbirdnest.com>
+ * Copyright <2007,2008> Sebastien Moutte <sebastien@moutte.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_DSHOW_H__
+#define __GST_DSHOW_H__
+
+#include <windows.h>
+#include <objbase.h>
+#include <dshow.h>
+#include <Rpc.h>
+
+#include <dshow/gstdshowinterface.h>
+
+#pragma warning( disable : 4090 4024)
+
+typedef struct _CodecEntry {
+ gchar *element_name;
+ gchar *element_longname;
+ gchar *prefered_filter_substring;
+ gint32 format;
+ GUID input_majortype;
+ GUID input_subtype;
+ gchar *sinkcaps;
+ GUID output_majortype;
+ GUID output_subtype;
+ gchar *srccaps;
+} CodecEntry;
+
+#define GUID_TYPE_ANY {0x00000000, 0x0000, 0x0000, { 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 }}
+
+#endif /* __GST_DSHOW_H__ */
diff --git a/sys/dshowdecwrapper/gstdshowvideodec.c b/sys/dshowdecwrapper/gstdshowvideodec.c
new file mode 100644
index 00000000..ea286d56
--- /dev/null
+++ b/sys/dshowdecwrapper/gstdshowvideodec.c
@@ -0,0 +1,1122 @@
+/*
+ * GStreamer DirectShow codecs wrapper
+ * Copyright <2006, 2007, 2008> Fluendo <gstreamer@fluendo.com>
+ * Copyright <2006, 2007, 2008> Pioneers of the Inevitable <songbird@songbirdnest.com>
+ * Copyright <2007,2008> Sebastien Moutte <sebastien@moutte.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstdshowvideodec.h"
+
+GST_DEBUG_CATEGORY_STATIC (dshowvideodec_debug);
+#define GST_CAT_DEFAULT dshowvideodec_debug
+
+GST_BOILERPLATE (GstDshowVideoDec, gst_dshowvideodec, GstElement,
+ GST_TYPE_ELEMENT);
+static const CodecEntry *tmp;
+
+static void gst_dshowvideodec_dispose (GObject * object);
+static GstStateChangeReturn gst_dshowvideodec_change_state
+ (GstElement * element, GstStateChange transition);
+
+/* sink pad overwrites */
+static gboolean gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps);
+static gboolean gst_dshowvideodec_sink_event (GstPad * pad, GstEvent * event);
+static GstFlowReturn gst_dshowvideodec_chain (GstPad * pad, GstBuffer * buffer);
+
+/* src pad overwrites */
+static GstCaps *gst_dshowvideodec_src_getcaps (GstPad * pad);
+static gboolean gst_dshowvideodec_src_setcaps (GstPad * pad, GstCaps * caps);
+
+/* callback called by our directshow fake sink when it receives a buffer */
+static gboolean gst_dshowvideodec_push_buffer (byte * buffer, long size,
+ byte * src_object, UINT64 start, UINT64 stop);
+
+/* utils */
+static gboolean gst_dshowvideodec_create_graph_and_filters (GstDshowVideoDec *
+ vdec);
+static gboolean gst_dshowvideodec_destroy_graph_and_filters (GstDshowVideoDec *
+ vdec);
+static gboolean gst_dshowvideodec_flush (GstDshowVideoDec * adec);
+static gboolean gst_dshowvideodec_get_filter_output_format (GstDshowVideoDec *
+ vdec, GUID * subtype, VIDEOINFOHEADER ** format, guint * size);
+
+
+#define GUID_MEDIATYPE_VIDEO {0x73646976, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMVV1 {0x31564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMVV2 {0x32564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMVV3 {0x33564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMVP {0x50564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_WMVA {0x41564d57, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_CVID {0x64697663, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_MP4S {0x5334504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_MP42 {0x3234504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_MP43 {0x3334504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_M4S2 {0x3253344d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_XVID {0x44495658, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_DX50 {0x30355844, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_DIVX {0x58564944, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_DIV3 {0x33564944, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+
+#define GUID_MEDIASUBTYPE_MPG4 {0x3447504d, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_MPEG1Payload {0xe436eb81, 0x524f, 0x11ce, {0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70}}
+
+
+/* output types */
+#define GUID_MEDIASUBTYPE_YUY2 {0x32595559, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_YV12 {0x32315659, 0x0000, 0x0010, { 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71 }}
+#define GUID_MEDIASUBTYPE_RGB32 {0xe436eb7e, 0x524f, 0x11ce, { 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70 }}
+#define GUID_MEDIASUBTYPE_RGB565 {0xe436eb7b, 0x524f, 0x11ce, { 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70 }}
+
+/* video codecs array */
+static const CodecEntry video_dec_codecs[] = {
+ {"dshowvdec_wmv1",
+ "Windows Media Video 7",
+ "DMO",
+ GST_MAKE_FOURCC ('W', 'M', 'V', '1'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV1,
+ "video/x-wmv, wmvversion = (int) 1",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_wmv2",
+ "Windows Media Video 8",
+ "DMO",
+ GST_MAKE_FOURCC ('W', 'M', 'V', '2'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV2,
+ "video/x-wmv, wmvversion = (int) 2",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_wmv3",
+ "Windows Media Video 9",
+ "DMO",
+ GST_MAKE_FOURCC ('W', 'M', 'V', '3'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVV3,
+ "video/x-wmv, wmvversion = (int) 3, " "format = (fourcc) WMV3",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_wmvp",
+ "Windows Media Video 9 Image",
+ "DMO",
+ GST_MAKE_FOURCC ('W', 'M', 'V', 'P'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVP,
+ "video/x-wmv, wmvversion = (int) 3, " "format = (fourcc) WMVP",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_wmva",
+ "Windows Media Video 9 Advanced",
+ "DMO",
+ GST_MAKE_FOURCC ('W', 'M', 'V', 'A'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_WMVA,
+ "video/x-wmv, wmvversion = (int) 3, " "format = (fourcc) WMVA",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_cinepak",
+ "Cinepack",
+ "AVI Decompressor",
+ 0x64697663,
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_CVID,
+ "video/x-cinepak",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_RGB32,
+ "video/x-raw-rgb, bpp=(int)32, depth=(int)24, "
+ "endianness=(int)4321, red_mask=(int)65280, "
+ "green_mask=(int)16711680, blue_mask=(int)-16777216"},
+ {"dshowvdec_msmpeg41",
+ "Microsoft ISO MPEG-4 version 1",
+ "DMO",
+ GST_MAKE_FOURCC ('M', 'P', '4', 'S'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP4S,
+ "video/x-msmpeg, msmpegversion=(int)41",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_msmpeg42",
+ "Microsoft ISO MPEG-4 version 2",
+ "DMO",
+ GST_MAKE_FOURCC ('M', 'P', '4', '2'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP42,
+ "video/x-msmpeg, msmpegversion=(int)42",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_msmpeg43",
+ "Microsoft ISO MPEG-4 version 3",
+ "DMO",
+ GST_MAKE_FOURCC ('M', 'P', '4', '3'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MP43,
+ "video/x-msmpeg, msmpegversion=(int)43",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_msmpeg4",
+ "Microsoft ISO MPEG-4 version 1.1",
+ "DMO",
+ GST_MAKE_FOURCC ('M', '4', 'S', '2'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_M4S2,
+ "video/x-msmpeg, msmpegversion=(int)4",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_mpeg1",
+ "MPEG-1 Video",
+ "MPEG Video Decoder",
+ GST_MAKE_FOURCC ('M', 'P', 'E', 'G'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MPEG1Payload,
+ "video/mpeg, mpegversion= (int) 1, "
+ "parsed= (boolean) true, " "systemstream= (boolean) false",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_xvid",
+ "XVID Video",
+ "ffdshow",
+ GST_MAKE_FOURCC ('X', 'V', 'I', 'D'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_XVID,
+ "video/x-xvid",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_divx5",
+ "DIVX 5.0 Video",
+ "ffdshow",
+ GST_MAKE_FOURCC ('D', 'X', '5', '0'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_DX50,
+ "video/x-divx, divxversion=(int)5",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_divx4",
+ "DIVX 4.0 Video",
+ "ffdshow",
+ GST_MAKE_FOURCC ('D', 'I', 'V', 'X'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_DIVX,
+ "video/x-divx, divxversion=(int)4",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"},
+ {"dshowvdec_divx3",
+ "DIVX 3.0 Video",
+ "ffdshow",
+ GST_MAKE_FOURCC ('D', 'I', 'V', '3'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_DIV3,
+ "video/x-divx, divxversion=(int)3",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"}
+ /*,
+ { "dshowvdec_mpeg4",
+ "DMO",
+ GST_MAKE_FOURCC ('M', 'P', 'G', '4'),
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_MPG4,
+ "video/mpeg, msmpegversion=(int)4",
+ GUID_MEDIATYPE_VIDEO, GUID_MEDIASUBTYPE_YUY2,
+ "video/x-raw-yuv, format=(fourcc)YUY2"
+ } */
+};
+
+static void
+gst_dshowvideodec_base_init (GstDshowVideoDecClass * klass)
+{
+ GstPadTemplate *src, *sink;
+ GstCaps *srccaps, *sinkcaps;
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ GstElementDetails details;
+
+ klass->entry = tmp;
+
+ details.longname = g_strdup_printf ("DirectShow %s Decoder Wrapper",
+ tmp->element_longname);
+ details.klass = g_strdup ("Codec/Decoder/Video");
+ details.description = g_strdup_printf ("DirectShow %s Decoder Wrapper",
+ tmp->element_longname);
+ details.author = "Sebastien Moutte <sebastien@moutte.net>";
+ gst_element_class_set_details (element_class, &details);
+ g_free (details.longname);
+ g_free (details.klass);
+ g_free (details.description);
+
+ sinkcaps = gst_caps_from_string (tmp->sinkcaps);
+ gst_caps_set_simple (sinkcaps,
+ "width", GST_TYPE_INT_RANGE, 16, 4096,
+ "height", GST_TYPE_INT_RANGE, 16, 4096,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+
+ srccaps = gst_caps_from_string (tmp->srccaps);
+
+ sink = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, sinkcaps);
+ src = gst_pad_template_new ("src", GST_PAD_SRC, GST_PAD_ALWAYS, srccaps);
+
+ gst_element_class_add_pad_template (element_class, src);
+ gst_element_class_add_pad_template (element_class, sink);
+}
+
+static void
+gst_dshowvideodec_class_init (GstDshowVideoDecClass * klass)
+{
+ GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
+ GstElementClass *gstelement_class = GST_ELEMENT_CLASS (klass);
+
+ gobject_class->dispose = GST_DEBUG_FUNCPTR (gst_dshowvideodec_dispose);
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_dshowvideodec_change_state);
+
+ if (!parent_class)
+ parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
+
+ if (!dshowvideodec_debug) {
+ GST_DEBUG_CATEGORY_INIT (dshowvideodec_debug, "dshowvideodec", 0,
+ "Directshow filter video decoder");
+ }
+}
+
+static void
+gst_dshowvideodec_init (GstDshowVideoDec * vdec,
+ GstDshowVideoDecClass * vdec_class)
+{
+ GstElementClass *element_class = GST_ELEMENT_GET_CLASS (vdec);
+
+ /* setup pads */
+ vdec->sinkpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (element_class, "sink"), "sink");
+
+ gst_pad_set_setcaps_function (vdec->sinkpad, gst_dshowvideodec_sink_setcaps);
+ gst_pad_set_event_function (vdec->sinkpad, gst_dshowvideodec_sink_event);
+ gst_pad_set_chain_function (vdec->sinkpad, gst_dshowvideodec_chain);
+ gst_element_add_pad (GST_ELEMENT (vdec), vdec->sinkpad);
+
+ vdec->srcpad =
+ gst_pad_new_from_template (gst_element_class_get_pad_template
+ (element_class, "src"), "src");
+/* needed to implement caps negociation on our src pad */
+/* gst_pad_set_getcaps_function (vdec->srcpad, gst_dshowvideodec_src_getcaps);
+ gst_pad_set_setcaps_function (vdec->srcpad, gst_dshowvideodec_src_setcaps);*/
+ gst_element_add_pad (GST_ELEMENT (vdec), vdec->srcpad);
+
+ vdec->srcfilter = NULL;
+ vdec->decfilter = NULL;
+ vdec->sinkfilter = NULL;
+
+ vdec->filtergraph = NULL;
+ vdec->mediafilter = NULL;
+ vdec->gstdshowsrcfilter = NULL;
+ vdec->srccaps = NULL;
+ vdec->segment = gst_segment_new ();
+
+ CoInitializeEx (NULL, COINIT_MULTITHREADED);
+}
+
+static void
+gst_dshowvideodec_dispose (GObject * object)
+{
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) (object);
+
+ if (vdec->segment) {
+ gst_segment_free (vdec->segment);
+ vdec->segment = NULL;
+ }
+
+ CoUninitialize ();
+}
+
+static GstStateChangeReturn
+gst_dshowvideodec_change_state (GstElement * element, GstStateChange transition)
+{
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ if (!gst_dshowvideodec_create_graph_and_filters (vdec))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
+ break;
+ case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
+ break;
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ if (!gst_dshowvideodec_destroy_graph_and_filters (vdec))
+ return GST_STATE_CHANGE_FAILURE;
+ break;
+ default:
+ break;
+ }
+
+ return GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+}
+
+static gboolean
+gst_dshowvideodec_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ gboolean ret = FALSE;
+ HRESULT hres;
+ GstStructure *s = gst_caps_get_structure (caps, 0);
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+ GstDshowVideoDecClass *klass =
+ (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
+ GstBuffer *extradata = NULL;
+ const GValue *v = NULL;
+ gint size = 0;
+ GstCaps *caps_out;
+ AM_MEDIA_TYPE output_mediatype, input_mediatype;
+ VIDEOINFOHEADER *input_vheader = NULL, *output_vheader = NULL;
+ IPin *output_pin = NULL, *input_pin = NULL;
+ IGstDshowInterface *gstdshowinterface = NULL;
+ const GValue *fps;
+
+ /* read data */
+ if (!gst_structure_get_int (s, "width", &vdec->width) ||
+ !gst_structure_get_int (s, "height", &vdec->height)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("error getting video width or height from caps"), (NULL));
+ goto end;
+ }
+ fps = gst_structure_get_value (s, "framerate");
+ if (!fps) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("error getting video framerate from caps"), (NULL));
+ goto end;
+ }
+ vdec->fps_n = gst_value_get_fraction_numerator (fps);
+ vdec->fps_d = gst_value_get_fraction_denominator (fps);
+
+ if ((v = gst_structure_get_value (s, "codec_data")))
+ extradata = gst_value_get_buffer (v);
+
+ /* define the input type format */
+ memset (&input_mediatype, 0, sizeof (AM_MEDIA_TYPE));
+ input_mediatype.majortype = klass->entry->input_majortype;
+ input_mediatype.subtype = klass->entry->input_subtype;
+ input_mediatype.bFixedSizeSamples = FALSE;
+ input_mediatype.bTemporalCompression = TRUE;
+
+ if (strstr (klass->entry->sinkcaps, "video/mpeg, mpegversion= (int) 1")) {
+ size =
+ sizeof (MPEG1VIDEOINFO) + (extradata ? GST_BUFFER_SIZE (extradata) -
+ 1 : 0);
+ input_vheader = g_malloc0 (size);
+
+ input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
+ if (extradata) {
+ MPEG1VIDEOINFO *mpeg_info = (MPEG1VIDEOINFO *) input_vheader;
+
+ memcpy (mpeg_info->bSequenceHeader,
+ GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
+ mpeg_info->cbSequenceHeader = GST_BUFFER_SIZE (extradata);
+ }
+ input_mediatype.formattype = FORMAT_MPEGVideo;
+ } else {
+ size =
+ sizeof (VIDEOINFOHEADER) +
+ (extradata ? GST_BUFFER_SIZE (extradata) : 0);
+ input_vheader = g_malloc0 (size);
+
+ input_vheader->bmiHeader.biSize = sizeof (BITMAPINFOHEADER);
+ if (extradata) { /* Codec data is appended after our header */
+ memcpy (((guchar *) input_vheader) + sizeof (VIDEOINFOHEADER),
+ GST_BUFFER_DATA (extradata), GST_BUFFER_SIZE (extradata));
+ input_vheader->bmiHeader.biSize += GST_BUFFER_SIZE (extradata);
+ }
+ input_mediatype.formattype = FORMAT_VideoInfo;
+ }
+ input_vheader->rcSource.top = input_vheader->rcSource.left = 0;
+ input_vheader->rcSource.right = vdec->width;
+ input_vheader->rcSource.bottom = vdec->height;
+ input_vheader->rcTarget = input_vheader->rcSource;
+ input_vheader->bmiHeader.biWidth = vdec->width;
+ input_vheader->bmiHeader.biHeight = vdec->height;
+ input_vheader->bmiHeader.biPlanes = 1;
+ input_vheader->bmiHeader.biBitCount = 16;
+ input_vheader->bmiHeader.biCompression = klass->entry->format;
+ input_vheader->bmiHeader.biSizeImage =
+ (vdec->width * vdec->height) * (input_vheader->bmiHeader.biBitCount / 8);
+
+ input_mediatype.cbFormat = size;
+ input_mediatype.pbFormat = (BYTE *) input_vheader;
+ input_mediatype.lSampleSize = input_vheader->bmiHeader.biSizeImage;
+
+ hres = IBaseFilter_QueryInterface (vdec->srcfilter, &IID_IGstDshowInterface,
+ (void **) &gstdshowinterface);
+ if (hres != S_OK || !gstdshowinterface) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get IGstDshowInterface interface from dshow fakesrc filter (error=%d)",
+ hres), (NULL));
+ goto end;
+ }
+
+ /* save a reference to IGstDshowInterface to use it processing functions */
+ if (!vdec->gstdshowsrcfilter) {
+ vdec->gstdshowsrcfilter = gstdshowinterface;
+ IBaseFilter_AddRef (vdec->gstdshowsrcfilter);
+ }
+
+ IGstDshowInterface_gst_set_media_type (gstdshowinterface, &input_mediatype);
+ IGstDshowInterface_Release (gstdshowinterface);
+ gstdshowinterface = NULL;
+
+ /* set the sample size for fakesrc filter to the output buffer size */
+ IGstDshowInterface_gst_set_sample_size (vdec->gstdshowsrcfilter,
+ input_mediatype.lSampleSize);
+
+ /* connect our fake src to decoder */
+ gst_dshow_get_pin_from_filter (vdec->srcfilter, PINDIR_OUTPUT, &output_pin);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get output pin from our directshow fakesrc filter"), (NULL));
+ goto end;
+ }
+ gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_INPUT, &input_pin);
+ if (!input_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get input pin from decoder filter"), (NULL));
+ goto end;
+ }
+
+ hres =
+ IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin,
+ NULL);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't connect fakesrc with decoder (error=%d)", hres), (NULL));
+ goto end;
+ }
+
+ IPin_Release (input_pin);
+ IPin_Release (output_pin);
+ input_pin = NULL;
+ output_pin = NULL;
+
+ /* get decoder output video format */
+ if (!gst_dshowvideodec_get_filter_output_format (vdec,
+ &klass->entry->output_subtype, &output_vheader, &size)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get decoder output video format"), (NULL));
+ goto end;
+ }
+
+ memset (&output_mediatype, 0, sizeof (AM_MEDIA_TYPE));
+ output_mediatype.majortype = klass->entry->output_majortype;
+ output_mediatype.subtype = klass->entry->output_subtype;
+ output_mediatype.bFixedSizeSamples = TRUE;
+ output_mediatype.bTemporalCompression = FALSE;
+ output_mediatype.lSampleSize = output_vheader->bmiHeader.biSizeImage;
+ output_mediatype.formattype = FORMAT_VideoInfo;
+ output_mediatype.cbFormat = size;
+ output_mediatype.pbFormat = (char *) output_vheader;
+
+ hres = IBaseFilter_QueryInterface (vdec->sinkfilter, &IID_IGstDshowInterface,
+ (void **) &gstdshowinterface);
+ if (hres != S_OK || !gstdshowinterface) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get IGstDshowInterface interface from dshow fakesink filter (error=%d)",
+ hres), (NULL));
+ goto end;
+ }
+
+ IGstDshowInterface_gst_set_media_type (gstdshowinterface, &output_mediatype);
+ IGstDshowInterface_gst_set_buffer_callback (gstdshowinterface,
+ gst_dshowvideodec_push_buffer, (byte *) vdec);
+ IGstDshowInterface_Release (gstdshowinterface);
+ gstdshowinterface = NULL;
+
+ /* connect decoder to our fake sink */
+ gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT, &output_pin);
+ if (!output_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get output pin from our decoder filter"), (NULL));
+ goto end;
+ }
+
+ gst_dshow_get_pin_from_filter (vdec->sinkfilter, PINDIR_INPUT, &input_pin);
+ if (!input_pin) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't get input pin from our directshow fakesink filter"), (NULL));
+ goto end;
+ }
+
+ hres =
+ IFilterGraph_ConnectDirect (vdec->filtergraph, output_pin, input_pin,
+ &output_mediatype);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't connect decoder with fakesink (error=%d)", hres), (NULL));
+ goto end;
+ }
+
+ /* negotiate output */
+ caps_out = gst_caps_from_string (klass->entry->srccaps);
+ gst_caps_set_simple (caps_out,
+ "width", G_TYPE_INT, vdec->width,
+ "height", G_TYPE_INT, vdec->height,
+ "framerate", GST_TYPE_FRACTION, vdec->fps_n, vdec->fps_d, NULL);
+ if (!gst_pad_set_caps (vdec->srcpad, caps_out)) {
+ gst_caps_unref (caps_out);
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Failed to negotiate output"), (NULL));
+ goto end;
+ }
+ gst_caps_unref (caps_out);
+
+ hres = IMediaFilter_Run (vdec->mediafilter, -1);
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("Can't run the directshow graph (error=%d)", hres), (NULL));
+ goto end;
+ }
+
+ ret = TRUE;
+end:
+ gst_object_unref (vdec);
+ if (input_vheader)
+ g_free (input_vheader);
+ if (gstdshowinterface)
+ IGstDshowInterface_Release (gstdshowinterface);
+ if (input_pin)
+ IPin_Release (input_pin);
+ if (output_pin)
+ IPin_Release (output_pin);
+
+ return ret;
+}
+
+static gboolean
+gst_dshowvideodec_sink_event (GstPad * pad, GstEvent * event)
+{
+ gboolean ret = TRUE;
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ gst_dshowvideodec_flush (vdec);
+ ret = gst_pad_event_default (pad, event);
+ break;
+ case GST_EVENT_NEWSEGMENT:
+ {
+ GstFormat format;
+ gdouble rate;
+ gint64 start, stop, time;
+ gboolean update;
+
+ gst_event_parse_new_segment (event, &update, &rate, &format, &start,
+ &stop, &time);
+
+ /* save the new segment in our local current segment */
+ gst_segment_set_newsegment (vdec->segment, update, rate, format, start,
+ stop, time);
+
+ GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
+ "new segment received => start=%" GST_TIME_FORMAT " stop=%"
+ GST_TIME_FORMAT, GST_TIME_ARGS (vdec->segment->start),
+ GST_TIME_ARGS (vdec->segment->stop));
+
+ if (update) {
+ GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
+ "closing current segment flushing..");
+ gst_dshowvideodec_flush (vdec);
+ }
+
+ ret = gst_pad_event_default (pad, event);
+ break;
+ }
+ default:
+ ret = gst_pad_event_default (pad, event);
+ break;
+ }
+ return ret;
+}
+
+static GstFlowReturn
+gst_dshowvideodec_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstFlowReturn ret = GST_FLOW_OK;
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+ gboolean discount = FALSE;
+ GstClockTime stop;
+
+ if (!vdec->gstdshowsrcfilter) {
+ /* we are not setup */
+ ret = GST_FLOW_WRONG_STATE;
+ goto beach;
+ }
+
+ /* check if duration is valid and use duration only when it's valid
+ /* because dshow is not decoding frames having stop smaller than start */
+ if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
+ stop = GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer);
+ } else {
+ stop = GST_BUFFER_TIMESTAMP (buffer);
+ }
+
+ GST_CAT_LOG_OBJECT (dshowvideodec_debug, vdec,
+ "chain (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT,
+ GST_BUFFER_SIZE (buffer), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_TIME_ARGS (stop));
+
+ /* if the incoming buffer has discont flag set => flush decoder data */
+ if (buffer && GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT)) {
+ GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
+ "this buffer has a DISCONT flag (%" GST_TIME_FORMAT "), flushing",
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)));
+ gst_dshowvideodec_flush (vdec);
+ discount = TRUE;
+ }
+
+ /* push the buffer to the directshow decoder */
+ IGstDshowInterface_gst_push_buffer (vdec->gstdshowsrcfilter,
+ GST_BUFFER_DATA (buffer), GST_BUFFER_TIMESTAMP (buffer), stop,
+ GST_BUFFER_SIZE (buffer), discount);
+
+beach:
+ gst_buffer_unref (buffer);
+ gst_object_unref (vdec);
+
+ return ret;
+}
+
+static gboolean
+gst_dshowvideodec_push_buffer (byte * buffer, long size, byte * src_object,
+ UINT64 start, UINT64 stop)
+{
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) src_object;
+ GstDshowVideoDecClass *klass =
+ (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
+ GstBuffer *buf = NULL;
+ gboolean in_seg = FALSE;
+ gint64 clip_start = 0, clip_stop = 0;
+
+ /* check if this buffer is in our current segment */
+ in_seg = gst_segment_clip (vdec->segment, GST_FORMAT_TIME,
+ start, stop, &clip_start, &clip_stop);
+
+ /* if the buffer is out of segment do not push it downstream */
+ if (!in_seg) {
+ GST_CAT_DEBUG_OBJECT (dshowvideodec_debug, vdec,
+ "buffer is out of segment, start %" GST_TIME_FORMAT " stop %"
+ GST_TIME_FORMAT, GST_TIME_ARGS (start), GST_TIME_ARGS (stop));
+ return FALSE;
+ }
+
+ /* buffer is in our segment allocate a new out buffer and clip its timestamps */
+ gst_pad_alloc_buffer (vdec->srcpad, GST_BUFFER_OFFSET_NONE,
+ size, GST_PAD_CAPS (vdec->srcpad), &buf);
+ if (!buf) {
+ GST_CAT_WARNING_OBJECT (dshowvideodec_debug, vdec,
+ "can't not allocate a new GstBuffer");
+ return FALSE;
+ }
+
+ /* set buffer properties */
+ GST_BUFFER_SIZE (buf) = size;
+ GST_BUFFER_TIMESTAMP (buf) = clip_start;
+ GST_BUFFER_DURATION (buf) = clip_stop - clip_start;
+
+ if (strstr (klass->entry->srccaps, "rgb")) {
+ /* FOR RGB directshow decoder will return bottom-up BITMAP
+ * There is probably a way to get top-bottom video frames from
+ * the decoder...
+ */
+ gint line = 0;
+ guint stride = vdec->width * 4;
+
+ for (; line < vdec->height; line++) {
+ memcpy (GST_BUFFER_DATA (buf) + (line * stride),
+ buffer + (size - ((line + 1) * (stride))), stride);
+ }
+ } else {
+ memcpy (GST_BUFFER_DATA (buf), buffer, size);
+ }
+
+ GST_CAT_LOG_OBJECT (dshowvideodec_debug, vdec,
+ "push_buffer (size %d)=> pts %" GST_TIME_FORMAT " stop %" GST_TIME_FORMAT
+ " duration %" GST_TIME_FORMAT, size,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf) + GST_BUFFER_DURATION (buf)),
+ GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
+
+ /* push the buffer downstream */
+ gst_pad_push (vdec->srcpad, buf);
+
+ return TRUE;
+}
+
+
+static GstCaps *
+gst_dshowvideodec_src_getcaps (GstPad * pad)
+{
+ GstDshowVideoDec *vdec = (GstDshowVideoDec *) gst_pad_get_parent (pad);
+
+ if (!vdec->srccaps)
+ vdec->srccaps = gst_caps_new_empty ();
+
+ if (vdec->decfilter) {
+ IPin *output_pin = NULL;
+ IEnumMediaTypes *enum_mediatypes = NULL;
+ HRESULT hres;
+ ULONG fetched;
+
+ if (!gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT,
+ &output_pin)) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
+ ("failed getting ouput pin from the decoder"), (NULL));
+ return NULL;
+ }
+
+ hres = IPin_EnumMediaTypes (output_pin, &enum_mediatypes);
+ if (hres == S_OK && enum_mediatypes) {
+ AM_MEDIA_TYPE *mediatype = NULL;
+
+ IEnumMediaTypes_Reset (enum_mediatypes);
+ while (hres =
+ IEnumMoniker_Next (enum_mediatypes, 1, &mediatype, &fetched),
+ hres == S_OK) {
+ RPC_STATUS rpcstatus;
+ VIDEOINFOHEADER *video_info;
+ GstCaps *mediacaps = NULL;
+
+ /* RGB24 */
+ if ((UuidCompare (&mediatype->subtype, &MEDIASUBTYPE_RGB24,
+ &rpcstatus) == 0 && rpcstatus == RPC_S_OK)
+ && (UuidCompare (&mediatype->formattype, &FORMAT_VideoInfo,
+ &rpcstatus) == 0 && rpcstatus == RPC_S_OK)) {
+ video_info = (VIDEOINFOHEADER *) mediatype->pbFormat;
+
+ /* ffmpegcolorspace handles RGB24 in BIG_ENDIAN */
+ mediacaps = gst_caps_new_simple ("video/x-raw-rgb",
+ "bpp", G_TYPE_INT, 24,
+ "depth", G_TYPE_INT, 24,
+ "width", G_TYPE_INT, video_info->bmiHeader.biWidth,
+ "height", G_TYPE_INT, video_info->bmiHeader.biHeight,
+ "framerate", GST_TYPE_FRACTION,
+ (int) (10000000 / video_info->AvgTimePerFrame), 1, "endianness",
+ G_TYPE_INT, G_BIG_ENDIAN, "red_mask", G_TYPE_INT, 255,
+ "green_mask", G_TYPE_INT, 65280, "blue_mask", G_TYPE_INT,
+ 16711680, NULL);
+
+ if (mediacaps) {
+ vdec->mediatypes = g_list_append (vdec->mediatypes, mediatype);
+ gst_caps_append (vdec->srccaps, mediacaps);
+ } else {
+ gst_dshow_free_mediatype (mediatype);
+ }
+ } else {
+ gst_dshow_free_mediatype (mediatype);
+ }
+
+ }
+ IEnumMediaTypes_Release (enum_mediatypes);
+ }
+ if (output_pin) {
+ IPin_Release (output_pin);
+ }
+ }
+
+ if (vdec->srccaps)
+ return gst_caps_ref (vdec->srccaps);
+
+ return NULL;
+}
+
+static gboolean
+gst_dshowvideodec_src_setcaps (GstPad * pad, GstCaps * caps)
+{
+ gboolean ret = FALSE;
+
+ return ret;
+}
+
+static gboolean
+gst_dshowvideodec_flush (GstDshowVideoDec * vdec)
+{
+ if (!vdec->gstdshowsrcfilter)
+ return FALSE;
+
+ /* flush dshow decoder and reset timestamp */
+ IGstDshowInterface_gst_flush (vdec->gstdshowsrcfilter);
+
+ return TRUE;
+}
+
+static gboolean
+gst_dshowvideodec_get_filter_output_format (GstDshowVideoDec * vdec,
+ GUID * subtype, VIDEOINFOHEADER ** format, guint * size)
+{
+ IPin *output_pin = NULL;
+ IEnumMediaTypes *enum_mediatypes = NULL;
+ HRESULT hres;
+ ULONG fetched;
+ BOOL ret = FALSE;
+
+ if (!vdec->decfilter)
+ return FALSE;
+
+ if (!gst_dshow_get_pin_from_filter (vdec->decfilter, PINDIR_OUTPUT,
+ &output_pin)) {
+ GST_ELEMENT_ERROR (vdec, CORE, NEGOTIATION,
+ ("failed getting ouput pin from the decoder"), (NULL));
+ return FALSE;
+ }
+
+ hres = IPin_EnumMediaTypes (output_pin, &enum_mediatypes);
+ if (hres == S_OK && enum_mediatypes) {
+ AM_MEDIA_TYPE *mediatype = NULL;
+
+ IEnumMediaTypes_Reset (enum_mediatypes);
+ while (hres =
+ IEnumMoniker_Next (enum_mediatypes, 1, &mediatype, &fetched),
+ hres == S_OK) {
+ RPC_STATUS rpcstatus;
+
+ if ((UuidCompare (&mediatype->subtype, subtype, &rpcstatus) == 0
+ && rpcstatus == RPC_S_OK) &&
+ (UuidCompare (&mediatype->formattype, &FORMAT_VideoInfo,
+ &rpcstatus) == 0 && rpcstatus == RPC_S_OK)) {
+ *size = mediatype->cbFormat;
+ *format = g_malloc0 (*size);
+ memcpy (*format, mediatype->pbFormat, *size);
+ ret = TRUE;
+ }
+ gst_dshow_free_mediatype (mediatype);
+ if (ret)
+ break;
+ }
+ IEnumMediaTypes_Release (enum_mediatypes);
+ }
+ if (output_pin) {
+ IPin_Release (output_pin);
+ }
+
+ return ret;
+}
+
+static gboolean
+gst_dshowvideodec_create_graph_and_filters (GstDshowVideoDec * vdec)
+{
+ HRESULT hres = S_FALSE;
+ GstDshowVideoDecClass *klass =
+ (GstDshowVideoDecClass *) G_OBJECT_GET_CLASS (vdec);
+
+ /* create the filter graph manager object */
+ hres = CoCreateInstance (&CLSID_FilterGraph, NULL, CLSCTX_INPROC,
+ &IID_IFilterGraph, (LPVOID *) & vdec->filtergraph);
+ if (hres != S_OK || !vdec->filtergraph) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
+ "of the directshow graph manager (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres = IFilterGraph_QueryInterface (vdec->filtergraph, &IID_IMediaFilter,
+ (void **) &vdec->mediafilter);
+ if (hres != S_OK || !vdec->mediafilter) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED,
+ ("Can't get IMediacontrol interface "
+ "from the graph manager (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ /* create fake src filter */
+ hres = CoCreateInstance (&CLSID_DshowFakeSrc, NULL, CLSCTX_INPROC,
+ &IID_IBaseFilter, (LPVOID *) & vdec->srcfilter);
+ if (hres != S_OK || !vdec->srcfilter) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
+ "of the directshow fakesrc (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ /* search a decoder filter and create it */
+ if (!gst_dshow_find_filter (klass->entry->input_majortype,
+ klass->entry->input_subtype,
+ klass->entry->output_majortype,
+ klass->entry->output_subtype,
+ klass->entry->prefered_filter_substring, &vdec->decfilter)) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
+ "of the decoder filter"), (NULL));
+ goto error;
+ }
+
+ /* create fake sink filter */
+ hres = CoCreateInstance (&CLSID_DshowFakeSink, NULL, CLSCTX_INPROC,
+ &IID_IBaseFilter, (LPVOID *) & vdec->sinkfilter);
+ if (hres != S_OK || !vdec->sinkfilter) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't create an instance "
+ "of the directshow fakesink (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ /* add filters to the graph */
+ hres = IFilterGraph_AddFilter (vdec->filtergraph, vdec->srcfilter, L"src");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesrc filter "
+ "to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres =
+ IFilterGraph_AddFilter (vdec->filtergraph, vdec->decfilter, L"decoder");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add decoder filter "
+ "to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ hres = IFilterGraph_AddFilter (vdec->filtergraph, vdec->sinkfilter, L"sink");
+ if (hres != S_OK) {
+ GST_ELEMENT_ERROR (vdec, STREAM, FAILED, ("Can't add fakesink filter "
+ "to the graph (error=%d)", hres), (NULL));
+ goto error;
+ }
+
+ return TRUE;
+
+error:
+ if (vdec->srcfilter) {
+ IBaseFilter_Release (vdec->srcfilter);
+ vdec->srcfilter = NULL;
+ }
+ if (vdec->decfilter) {
+ IBaseFilter_Release (vdec->decfilter);
+ vdec->decfilter = NULL;
+ }
+ if (vdec->sinkfilter) {
+ IBaseFilter_Release (vdec->sinkfilter);
+ vdec->sinkfilter = NULL;
+ }
+ if (vdec->mediafilter) {
+ IMediaFilter_Release (vdec->mediafilter);
+ vdec->mediafilter = NULL;
+ }
+ if (vdec->filtergraph) {
+ IFilterGraph_Release (vdec->filtergraph);
+ vdec->filtergraph = NULL;
+ }
+
+ return FALSE;
+}
+
+static gboolean
+gst_dshowvideodec_destroy_graph_and_filters (GstDshowVideoDec * vdec)
+{
+ if (vdec->mediafilter) {
+ IMediaFilter_Stop (vdec->mediafilter);
+ }
+
+ if (vdec->gstdshowsrcfilter) {
+ IGstDshowInterface_Release (vdec->gstdshowsrcfilter);
+ vdec->gstdshowsrcfilter = NULL;
+ }
+ if (vdec->srcfilter) {
+ if (vdec->filtergraph)
+ IFilterGraph_RemoveFilter (vdec->filtergraph, vdec->srcfilter);
+ IBaseFilter_Release (vdec->srcfilter);
+ vdec->srcfilter = NULL;
+ }
+ if (vdec->decfilter) {
+ if (vdec->filtergraph)
+ IFilterGraph_RemoveFilter (vdec->filtergraph, vdec->decfilter);
+ IBaseFilter_Release (vdec->decfilter);
+ vdec->decfilter = NULL;
+ }
+ if (vdec->sinkfilter) {
+ if (vdec->filtergraph)
+ IFilterGraph_RemoveFilter (vdec->filtergraph, vdec->sinkfilter);
+ IBaseFilter_Release (vdec->sinkfilter);
+ vdec->sinkfilter = NULL;
+ }
+ if (vdec->mediafilter) {
+ IMediaFilter_Release (vdec->mediafilter);
+ vdec->mediafilter = NULL;
+ }
+ if (vdec->filtergraph) {
+ IFilterGraph_Release (vdec->filtergraph);
+ vdec->filtergraph = NULL;
+ }
+
+ return TRUE;
+}
+
+gboolean
+dshow_vdec_register (GstPlugin * plugin)
+{
+ GTypeInfo info = {
+ sizeof (GstDshowVideoDecClass),
+ (GBaseInitFunc) gst_dshowvideodec_base_init,
+ NULL,
+ (GClassInitFunc) gst_dshowvideodec_class_init,
+ NULL,
+ NULL,
+ sizeof (GstDshowVideoDec),
+ 0,
+ (GInstanceInitFunc) gst_dshowvideodec_init,
+ };
+ gint i;
+
+ GST_DEBUG_CATEGORY_INIT (dshowvideodec_debug, "dshowvideodec", 0,
+ "Directshow filter video decoder");
+
+ CoInitializeEx (NULL, COINIT_MULTITHREADED);
+ for (i = 0; i < sizeof (video_dec_codecs) / sizeof (CodecEntry); i++) {
+ GType type;
+
+ if (gst_dshow_find_filter (video_dec_codecs[i].input_majortype,
+ video_dec_codecs[i].input_subtype,
+ video_dec_codecs[i].output_majortype,
+ video_dec_codecs[i].output_subtype,
+ video_dec_codecs[i].prefered_filter_substring, NULL)) {
+
+ GST_CAT_DEBUG (dshowvideodec_debug, "Registering %s",
+ video_dec_codecs[i].element_name);
+
+ tmp = &video_dec_codecs[i];
+ type =
+ g_type_register_static (GST_TYPE_ELEMENT,
+ video_dec_codecs[i].element_name, &info, 0);
+ if (!gst_element_register (plugin, video_dec_codecs[i].element_name,
+ GST_RANK_PRIMARY, type)) {
+ return FALSE;
+ }
+ GST_CAT_DEBUG (dshowvideodec_debug, "Registered %s",
+ video_dec_codecs[i].element_name);
+ } else {
+ GST_CAT_DEBUG (dshowvideodec_debug,
+ "Element %s not registered (the format is not supported by the system)",
+ video_dec_codecs[i].element_name);
+ }
+ }
+
+ CoUninitialize ();
+ return TRUE;
+}
diff --git a/sys/dshowdecwrapper/gstdshowvideodec.h b/sys/dshowdecwrapper/gstdshowvideodec.h
new file mode 100644
index 00000000..12f3a28e
--- /dev/null
+++ b/sys/dshowdecwrapper/gstdshowvideodec.h
@@ -0,0 +1,105 @@
+/*
+ * GStreamer DirectShow codecs wrapper
+ * Copyright <2006, 2007, 2008> Fluendo <gstreamer@fluendo.com>
+ * Copyright <2006, 2007, 2008> Pioneers of the Inevitable <songbird@songbirdnest.com>
+ * Copyright <2007,2008> Sebastien Moutte <sebastien@moutte.net>
+ *
+ * Permission is hereby granted, free of charge, to any person obtaining a
+ * copy of this software and associated documentation files (the "Software"),
+ * to deal in the Software without restriction, including without limitation
+ * the rights to use, copy, modify, merge, publish, distribute, sublicense,
+ * and/or sell copies of the Software, and to permit persons to whom the
+ * Software is furnished to do so, subject to the following conditions:
+ *
+ * The above copyright notice and this permission notice shall be included in
+ * all copies or substantial portions of the Software.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+ * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+ * DEALINGS IN THE SOFTWARE.
+ *
+ * Alternatively, the contents of this file may be used under the
+ * GNU Lesser General Public License Version 2.1 (the "LGPL"), in
+ * which case the following provisions apply instead of the ones
+ * mentioned above:
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_DSHOWVIDEODEC_H__
+#define __GST_DSHOWVIDEODEC_H__
+
+#include <gst/gst.h>
+#include "gstdshowdecwrapper.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_DSHOWVIDEODEC (gst_dshowvideodec_get_type())
+#define GST_DSHOWVIDEODEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DSHOWVIDEODEC,GstDshowVideoDec))
+#define GST_DSHOWVIDEODEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DSHOWVIDEODEC,GstDshowVideoDecClass))
+#define GST_IS_DSHOWVIDEODEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_DSHOWVIDEODEC))
+#define GST_IS_DSHOWVIDEODEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_DSHOWVIDEODEC))
+
+typedef struct _GstDshowVideoDec GstDshowVideoDec;
+typedef struct _GstDshowVideoDecClass GstDshowVideoDecClass;
+
+struct _GstDshowVideoDec
+{
+ GstElement element;
+
+ /* element pads */
+ GstPad *sinkpad;
+ GstPad *srcpad;
+
+ /* caps of our src pad */
+ GstCaps *srccaps;
+
+ /* list of dshow mediatypes coresponding to the caps list */
+ GList *mediatypes;
+
+ /* filters interfaces */
+ IBaseFilter *srcfilter;
+ IGstDshowInterface *gstdshowsrcfilter;
+ IBaseFilter *decfilter;
+ IBaseFilter *sinkfilter;
+
+ /* graph manager interfaces */
+ IMediaFilter *mediafilter;
+ IFilterGraph *filtergraph;
+
+ /* settings */
+ gint width, height;
+ gint fps_n, fps_d;
+
+ /* current segment */
+ GstSegment *segment;
+};
+
+struct _GstDshowVideoDecClass
+{
+ GstElementClass parent_class;
+ const CodecEntry *entry;
+};
+
+gboolean dshow_vdec_register (GstPlugin * plugin);
+
+G_END_DECLS
+
+#endif /* __GST_DSHOWVIDEODEC_H__ */