summaryrefslogtreecommitdiffstats
path: root/sys
diff options
context:
space:
mode:
Diffstat (limited to 'sys')
-rw-r--r--sys/Makefile.am10
-rw-r--r--sys/vdpau/Makefile.am27
-rw-r--r--sys/vdpau/gstvdp.c29
-rw-r--r--sys/vdpau/gstvdpdevice.c266
-rw-r--r--sys/vdpau/gstvdpdevice.h132
-rw-r--r--sys/vdpau/gstvdpmpegdec.c1156
-rw-r--r--sys/vdpau/gstvdpmpegdec.h105
-rw-r--r--sys/vdpau/gstvdpvideobuffer.c138
-rw-r--r--sys/vdpau/gstvdpvideobuffer.h59
-rw-r--r--sys/vdpau/gstvdpvideoyuv.c462
-rw-r--r--sys/vdpau/gstvdpvideoyuv.h60
-rw-r--r--sys/vdpau/gstvdpyuvvideo.c476
-rw-r--r--sys/vdpau/gstvdpyuvvideo.h62
-rw-r--r--sys/vdpau/mpegutil.c430
-rw-r--r--sys/vdpau/mpegutil.h150
15 files changed, 3560 insertions, 2 deletions
diff --git a/sys/Makefile.am b/sys/Makefile.am
index 161f3339..15c89e17 100644
--- a/sys/Makefile.am
+++ b/sys/Makefile.am
@@ -70,8 +70,14 @@ else
ACM_DIR=
endif
-SUBDIRS = $(ACM_DIR) $(DIRECTDRAW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(OSS4_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(VCD_DIR) $(WININET_DIR)
+if USE_VDPAU
+VDPAU_DIR=vdpau
+else
+VDPAU_DIR=
+endif
+
+SUBDIRS = $(ACM_DIR) $(DIRECTDRAW_DIR) $(DVB_DIR) $(FBDEV_DIR) $(OSS4_DIR) $(OSX_VIDEO_DIR) $(QT_DIR) $(VCD_DIR) $(VDPAU_DIR) $(WININET_DIR)
DIST_SUBDIRS = acmenc acmmp3dec directdraw dvb fbdev dshowdecwrapper dshowsrcwrapper dshowvideosink \
- oss4 osxvideo qtwrapper vcd wasapi wininet winks winscreencap
+ oss4 osxvideo qtwrapper vcd vdpau wasapi wininet winks winscreencap
diff --git a/sys/vdpau/Makefile.am b/sys/vdpau/Makefile.am
new file mode 100644
index 00000000..93a7513a
--- /dev/null
+++ b/sys/vdpau/Makefile.am
@@ -0,0 +1,27 @@
+plugin_LTLIBRARIES = libgstvdpau.la
+
+libgstvdpau_la_SOURCES = \
+ gstvdpdevice.c \
+ gstvdpmpegdec.c \
+ mpegutil.c \
+ gstvdpvideoyuv.c \
+ gstvdpvideobuffer.c \
+ gstvdp.c \
+ gstvdpyuvvideo.c
+
+libgstvdpau_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) $(X11_CFLAGS) $(VDPAU_CFLAGS)
+libgstvdpau_la_LIBADD = $(GST_LIBS) $(GST_BASE_LIBS) \
+ $(GST_PLUGINS_BASE_LIBS) $(X11_LIBS) -lgstvideo-$(GST_MAJORMINOR) \
+ $(VDPAU_LIBS)
+libgstvdpau_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
+libgstvdpau_la_LIBTOOLFLAGS = --tag=disable-static
+
+noinst_HEADERS = \
+ gstvdpdevice.h \
+ gstvdpmpegdec.h \
+ mpegutil.h \
+ gstvdpvideoyuv.h \
+ gstvdpvideobuffer.h \
+ gstvdpyuvvideo.h
+
+
diff --git a/sys/vdpau/gstvdp.c b/sys/vdpau/gstvdp.c
new file mode 100644
index 00000000..5c524968
--- /dev/null
+++ b/sys/vdpau/gstvdp.c
@@ -0,0 +1,29 @@
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+
+#include <gst/gst.h>
+
+#include "gstvdpmpegdec.h"
+#include "gstvdpvideoyuv.h"
+#include "gstvdpyuvvideo.h"
+
+static gboolean
+vdpau_init (GstPlugin * vdpau_plugin)
+{
+ gst_element_register (vdpau_plugin, "vdpaumpegdec",
+ GST_RANK_PRIMARY - 1, GST_TYPE_VDP_MPEG_DEC);
+ gst_element_register (vdpau_plugin, "vdpauvideoyuv",
+ GST_RANK_PRIMARY, GST_TYPE_VDP_VIDEO_YUV);
+ gst_element_register (vdpau_plugin, "vdpauyuvvideo",
+ GST_RANK_PRIMARY, GST_TYPE_VDP_YUV_VIDEO);
+
+ return TRUE;
+}
+
+GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
+ GST_VERSION_MINOR,
+ "vdpau",
+ "Various elements utilizing VDPAU",
+ vdpau_init, VERSION, "LGPL", "GStreamer", "http://gstreamer.net/")
diff --git a/sys/vdpau/gstvdpdevice.c b/sys/vdpau/gstvdpdevice.c
new file mode 100644
index 00000000..4ed1b177
--- /dev/null
+++ b/sys/vdpau/gstvdpdevice.c
@@ -0,0 +1,266 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <vdpau/vdpau_x11.h>
+#include <gst/gst.h>
+
+#include "gstvdpdevice.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_device_debug);
+#define GST_CAT_DEFAULT gst_vdp_device_debug
+
+enum
+{
+ PROP_0,
+ PROP_DISPLAY
+};
+
+
+
+G_DEFINE_TYPE (GstVdpDevice, gst_vdp_device, G_TYPE_OBJECT);
+
+static void
+gst_vdp_device_init (GstVdpDevice * device)
+{
+ device->display_name = NULL;
+ device->display = NULL;
+ device->device = VDP_INVALID_HANDLE;
+}
+
+static void
+gst_vdp_device_finalize (GObject * object)
+{
+ GstVdpDevice *device = (GstVdpDevice *) object;
+
+ device->vdp_device_destroy (device->device);
+ XCloseDisplay (device->display);
+ g_free (device->display_name);
+
+ G_OBJECT_CLASS (gst_vdp_device_parent_class)->finalize (object);
+}
+
+static void
+gst_vdp_device_constructed (GObject * object)
+{
+ GstVdpDevice *device = (GstVdpDevice *) object;
+ gint screen;
+ VdpStatus status;
+ gint i;
+
+ typedef struct
+ {
+ gint id;
+ void *func;
+ } VdpFunction;
+
+ VdpFunction vdp_function[] = {
+ {VDP_FUNC_ID_DEVICE_DESTROY, &device->vdp_device_destroy},
+ {VDP_FUNC_ID_VIDEO_SURFACE_CREATE,
+ &device->vdp_video_surface_create},
+ {VDP_FUNC_ID_VIDEO_SURFACE_DESTROY,
+ &device->vdp_video_surface_destroy},
+ {VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES,
+ &device->vdp_video_surface_query_capabilities},
+ {VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES,
+ &device->vdp_video_surface_query_ycbcr_capabilities},
+ {VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR,
+ &device->vdp_video_surface_get_bits_ycbcr},
+ {VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR,
+ &device->vdp_video_surface_put_bits_ycbcr},
+ {VDP_FUNC_ID_VIDEO_SURFACE_GET_PARAMETERS,
+ &device->vdp_video_surface_get_parameters},
+ {VDP_FUNC_ID_DECODER_CREATE, &device->vdp_decoder_create},
+ {VDP_FUNC_ID_DECODER_RENDER, &device->vdp_decoder_render},
+ {VDP_FUNC_ID_DECODER_DESTROY, &device->vdp_decoder_destroy},
+ {VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES,
+ &device->vdp_decoder_query_capabilities},
+ {VDP_FUNC_ID_DECODER_GET_PARAMETERS,
+ &device->vdp_decoder_get_parameters},
+ {0, NULL}
+ };
+
+ device->display = XOpenDisplay (device->display_name);
+ if (!device->display) {
+ GST_ERROR_OBJECT (device, "Could not open X display with name: %s",
+ device->display_name);
+ return;
+ }
+
+ screen = DefaultScreen (device->display);
+ status =
+ vdp_device_create_x11 (device->display, screen, &device->device,
+ &device->vdp_get_proc_address);
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR_OBJECT (device, "Could not create VDPAU device");
+ XCloseDisplay (device->display);
+ device->display = NULL;
+
+ return;
+ }
+
+ status = device->vdp_get_proc_address (device->device,
+ VDP_FUNC_ID_GET_ERROR_STRING, (void **) &device->vdp_get_error_string);
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR_OBJECT (device,
+ "Could not get vdp_get_error_string function pointer from VDPAU");
+ goto error;
+ }
+
+ for (i = 0; vdp_function[i].func != NULL; i++) {
+ status = device->vdp_get_proc_address (device->device,
+ vdp_function[i].id, vdp_function[i].func);
+
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR_OBJECT (device, "Could not get function pointer from VDPAU,"
+ " error returned was: %s", device->vdp_get_error_string (status));
+ goto error;
+ }
+ }
+
+ return;
+
+error:
+ XCloseDisplay (device->display);
+ device->display = NULL;
+
+ if (device->device != VDP_INVALID_HANDLE) {
+ device->vdp_device_destroy (device->device);
+ device->device = VDP_INVALID_HANDLE;
+ }
+}
+
+static void
+gst_vdp_device_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVdpDevice *device;
+
+ g_return_if_fail (GST_IS_VDP_DEVICE (object));
+
+ device = (GstVdpDevice *) object;
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ device->display_name = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_device_get_property (GObject * object, guint prop_id, GValue * value,
+ GParamSpec * pspec)
+{
+ GstVdpDevice *device;
+
+ g_return_if_fail (GST_IS_VDP_DEVICE (object));
+
+ device = (GstVdpDevice *) object;
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_value_set_string (value, device->display_name);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_device_class_init (GstVdpDeviceClass * klass)
+{
+ GObjectClass *object_class = G_OBJECT_CLASS (klass);
+
+ object_class->constructed = gst_vdp_device_constructed;
+ object_class->finalize = gst_vdp_device_finalize;
+ object_class->get_property = gst_vdp_device_get_property;
+ object_class->set_property = gst_vdp_device_set_property;
+
+ g_object_class_install_property (object_class,
+ PROP_DISPLAY,
+ g_param_spec_string ("display",
+ "Display",
+ "X Display Name",
+ "", G_PARAM_READABLE | G_PARAM_WRITABLE | G_PARAM_CONSTRUCT_ONLY));
+}
+
+GstVdpDevice *
+gst_vdp_device_new (const gchar * display_name)
+{
+ GstVdpDevice *device;
+
+ device = g_object_new (GST_TYPE_VDP_DEVICE, "display", display_name, NULL);
+
+ return device;
+}
+
+static void
+device_destroyed_cb (gpointer data, GObject * object)
+{
+ GHashTable *devices_hash = data;
+ GHashTableIter iter;
+ gpointer device;
+
+ GST_DEBUG ("Removing object from hash table");
+
+ g_hash_table_iter_init (&iter, devices_hash);
+ while (g_hash_table_iter_next (&iter, NULL, &device)) {
+ if (device == object) {
+ g_hash_table_iter_remove (&iter);
+ break;
+ }
+ }
+}
+
+GstVdpDevice *
+gst_vdp_get_device (const gchar * display_name)
+{
+ static gsize once = 0;
+ static GHashTable *devices_hash;
+ GstVdpDevice *device;
+
+ if (g_once_init_enter (&once)) {
+ GST_DEBUG_CATEGORY_INIT (gst_vdp_device_debug, "vdpaudevice",
+ 0, "vdpaudevice");
+ devices_hash =
+ g_hash_table_new_full (g_str_hash, g_str_equal, g_free, NULL);
+ g_once_init_leave (&once, 1);
+ }
+
+ if (display_name)
+ device = g_hash_table_lookup (devices_hash, display_name);
+ else
+ device = g_hash_table_lookup (devices_hash, "");
+
+ if (!device) {
+ device = gst_vdp_device_new (display_name);
+ g_object_weak_ref (G_OBJECT (device), device_destroyed_cb, devices_hash);
+ if (display_name)
+ g_hash_table_insert (devices_hash, g_strdup (display_name), device);
+ else
+ g_hash_table_insert (devices_hash, g_strdup (""), device);
+ } else
+ g_object_ref (device);
+
+ return device;
+}
diff --git a/sys/vdpau/gstvdpdevice.h b/sys/vdpau/gstvdpdevice.h
new file mode 100644
index 00000000..ac036d73
--- /dev/null
+++ b/sys/vdpau/gstvdpdevice.h
@@ -0,0 +1,132 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_VDP_DEVICE_H_
+#define _GST_VDP_DEVICE_H_
+
+#include <X11/Xlib.h>
+#include <vdpau/vdpau.h>
+
+#include <glib-object.h>
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_DEVICE (gst_vdp_device_get_type ())
+#define GST_VDP_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_DEVICE, GstVdpDevice))
+#define GST_VDP_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST ((klass), GST_TYPE_VDP_DEVICE, GstVdpDeviceClass))
+#define GST_IS_VDP_DEVICE(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_DEVICE))
+#define GST_IS_VDP_DEVICE_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE ((klass), GST_TYPE_VDP_DEVICE))
+#define GST_VDP_DEVICE_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_DEVICE, GstVdpDeviceClass))
+
+typedef struct _GstVdpDeviceClass GstVdpDeviceClass;
+typedef struct _GstVdpDevice GstVdpDevice;
+
+struct _GstVdpDeviceClass
+{
+ GObjectClass parent_class;
+};
+
+struct _GstVdpDevice
+{
+ GObject object;
+
+ gchar *display_name;
+ Display *display;
+ VdpDevice device;
+
+ VdpDeviceDestroy *vdp_device_destroy;
+ VdpGetProcAddress *vdp_get_proc_address;
+ VdpGetErrorString *vdp_get_error_string;
+
+ VdpVideoSurfaceCreate *vdp_video_surface_create;
+ VdpVideoSurfaceDestroy *vdp_video_surface_destroy;
+ VdpVideoSurfaceQueryCapabilities *vdp_video_surface_query_capabilities;
+ VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *vdp_video_surface_query_ycbcr_capabilities;
+ VdpVideoSurfaceGetParameters *vdp_video_surface_get_parameters;
+ VdpVideoSurfaceGetBitsYCbCr *vdp_video_surface_get_bits_ycbcr;
+ VdpVideoSurfacePutBitsYCbCr *vdp_video_surface_put_bits_ycbcr;
+
+ VdpDecoderCreate *vdp_decoder_create;
+ VdpDecoderDestroy *vdp_decoder_destroy;
+ VdpDecoderRender *vdp_decoder_render;
+ VdpDecoderQueryCapabilities *vdp_decoder_query_capabilities;
+ VdpDecoderGetParameters *vdp_decoder_get_parameters;
+};
+
+typedef struct
+{
+ VdpChromaType chroma_type;
+ VdpYCbCrFormat format;
+ guint32 fourcc;
+} VdpauFormats;
+
+#define N_CHROMA_TYPES 3
+#define N_FORMATS 7
+
+static const VdpChromaType chroma_types[N_CHROMA_TYPES] =
+ { VDP_CHROMA_TYPE_420, VDP_CHROMA_TYPE_422, VDP_CHROMA_TYPE_444 };
+
+static const VdpauFormats formats[N_FORMATS] = {
+ {
+ VDP_CHROMA_TYPE_420,
+ VDP_YCBCR_FORMAT_NV12,
+ GST_MAKE_FOURCC ('N', 'V', '1', '2')
+ },
+ {
+ VDP_CHROMA_TYPE_422,
+ VDP_YCBCR_FORMAT_UYVY,
+ GST_MAKE_FOURCC ('U', 'Y', 'V', 'Y')
+ },
+ {
+ VDP_CHROMA_TYPE_444,
+ VDP_YCBCR_FORMAT_V8U8Y8A8,
+ GST_MAKE_FOURCC ('A', 'Y', 'U', 'V')
+ },
+ {
+ VDP_CHROMA_TYPE_444,
+ VDP_YCBCR_FORMAT_Y8U8V8A8,
+ GST_MAKE_FOURCC ('A', 'V', 'U', 'Y')
+ },
+ {
+ VDP_CHROMA_TYPE_422,
+ VDP_YCBCR_FORMAT_YUYV,
+ GST_MAKE_FOURCC ('Y', 'U', 'Y', 'V')
+ },
+ {
+ VDP_CHROMA_TYPE_420,
+ VDP_YCBCR_FORMAT_YV12,
+ GST_MAKE_FOURCC ('Y', 'V', '1', '2')
+ },
+ {
+ VDP_CHROMA_TYPE_420,
+ VDP_YCBCR_FORMAT_YV12,
+ GST_MAKE_FOURCC ('I', '4', '2', '0')
+ }
+};
+
+GType gst_vdp_device_get_type (void) G_GNUC_CONST;
+
+GstVdpDevice *gst_vdp_device_new (const gchar *display_name);
+
+GstVdpDevice *gst_vdp_get_device (const gchar *display_name);
+
+G_END_DECLS
+
+#endif /* _GST_VDP_DEVICE_H_ */
diff --git a/sys/vdpau/gstvdpmpegdec.c b/sys/vdpau/gstvdpmpegdec.c
new file mode 100644
index 00000000..674146c4
--- /dev/null
+++ b/sys/vdpau/gstvdpmpegdec.c
@@ -0,0 +1,1156 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+/**
+ * SECTION:element-vdpaumpegdec
+ *
+ * FIXME:Describe vdpaumpegdec here.
+ *
+ * <refsect2>
+ * <title>Example launch line</title>
+ * |[
+ * gst-launch -v -m fakesrc ! vdpaumpegdec ! fakesink silent=TRUE
+ * ]|
+ * </refsect2>
+ */
+
+#ifdef HAVE_CONFIG_H
+# include <config.h>
+#endif
+
+#include <gst/gst.h>
+#include <gst/base/gstbytereader.h>
+#include <gst/base/gstbitreader.h>
+#include <string.h>
+
+#include "mpegutil.h"
+#include "gstvdpmpegdec.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_mpeg_dec_debug);
+#define GST_CAT_DEFAULT gst_vdp_mpeg_dec_debug
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_DISPLAY
+};
+
+/* the capabilities of the inputs and outputs.
+ *
+ * describe the real formats here.
+ */
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/mpeg, mpegversion = (int) [ 1, 2 ], "
+ "systemstream = (boolean) false, parsed = (boolean) true")
+ );
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
+
+#define DEBUG_INIT(bla) \
+GST_DEBUG_CATEGORY_INIT (gst_vdp_mpeg_dec_debug, "vdpaumpegdec", 0, "VDPAU powered mpeg decoder");
+
+GST_BOILERPLATE_FULL (GstVdpMpegDec, gst_vdp_mpeg_dec,
+ GstElement, GST_TYPE_ELEMENT, DEBUG_INIT);
+
+static void gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info);
+static void gst_vdp_mpeg_dec_finalize (GObject * object);
+static void gst_vdp_mpeg_dec_set_property (GObject * object,
+ guint prop_id, const GValue * value, GParamSpec * pspec);
+static void gst_vdp_mpeg_dec_get_property (GObject * object,
+ guint prop_id, GValue * value, GParamSpec * pspec);
+
+guint8 *
+mpeg_util_find_start_code (guint32 * sync_word, guint8 * cur, guint8 * end)
+{
+ guint32 code;
+
+ if (G_UNLIKELY (cur == NULL))
+ return NULL;
+
+ code = *sync_word;
+
+ while (cur < end) {
+ code <<= 8;
+
+ if (code == 0x00000100) {
+ /* Reset the sync word accumulator */
+ *sync_word = 0xffffffff;
+ return cur;
+ }
+
+ /* Add the next available byte to the collected sync word */
+ code |= *cur++;
+ }
+
+ *sync_word = code;
+ return NULL;
+}
+
+typedef struct
+{
+ GstBuffer *buffer;
+ guint8 *cur;
+ guint8 *end;
+} GstVdpMpegPacketizer;
+
+static GstBuffer *
+gst_vdp_mpeg_packetizer_get_next_packet (GstVdpMpegPacketizer * packetizer)
+{
+ guint32 sync_word = 0xffffff;
+ guint8 *packet_start;
+ guint8 *packet_end;
+
+ if (!packetizer->cur)
+ return NULL;
+
+ packet_start = packetizer->cur - 3;
+ packetizer->cur = packet_end = mpeg_util_find_start_code (&sync_word,
+ packetizer->cur, packetizer->end);
+
+ if (packet_end)
+ packet_end -= 3;
+ else
+ packet_end = packetizer->end;
+
+ return gst_buffer_create_sub (packetizer->buffer,
+ packet_start - GST_BUFFER_DATA (packetizer->buffer),
+ packet_end - packet_start);
+}
+
+static void
+gst_vdp_mpeg_packetizer_init (GstVdpMpegPacketizer * packetizer,
+ GstBuffer * buffer)
+{
+ guint32 sync_word = 0xffffffff;
+
+ packetizer->buffer = buffer;
+ packetizer->end = GST_BUFFER_DATA (buffer) + GST_BUFFER_SIZE (buffer);
+ packetizer->cur = mpeg_util_find_start_code (&sync_word,
+ GST_BUFFER_DATA (buffer), packetizer->end);
+}
+
+static gboolean
+gst_vdp_mpeg_dec_set_caps (GstPad * pad, GstCaps * caps)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ GstStructure *structure;
+
+ gint width, height;
+ gint fps_n, fps_d;
+ gint par_n, par_d;
+ gboolean interlaced = FALSE;
+
+ GstCaps *src_caps;
+ gboolean res;
+
+ const GValue *value;
+ VdpDecoderProfile profile;
+ GstVdpDevice *device;
+ VdpStatus status;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ /* create src_pad caps */
+ gst_structure_get_int (structure, "width", &width);
+ gst_structure_get_int (structure, "height", &height);
+ gst_structure_get_fraction (structure, "framerate", &fps_n, &fps_d);
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio", &par_n, &par_d);
+ gst_structure_get_boolean (structure, "interlaced", &interlaced);
+
+ src_caps = gst_caps_new_simple ("video/x-vdpau-video",
+ "device", G_TYPE_OBJECT, mpeg_dec->device,
+ "chroma-type", G_TYPE_INT, VDP_CHROMA_TYPE_420,
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ "framerate", GST_TYPE_FRACTION, fps_n, fps_d,
+ "pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d,
+ "interlaced", G_TYPE_BOOLEAN, interlaced, NULL);
+
+ GST_DEBUG_OBJECT (mpeg_dec, "Setting source caps to %" GST_PTR_FORMAT,
+ src_caps);
+
+ res = gst_pad_set_caps (mpeg_dec->src, src_caps);
+ gst_caps_unref (src_caps);
+ if (!res)
+ goto done;
+
+ mpeg_dec->width = width;
+ mpeg_dec->height = height;
+ mpeg_dec->fps_n = fps_n;
+ mpeg_dec->fps_d = fps_d;
+ mpeg_dec->interlaced = interlaced;
+
+ /* parse caps to setup decoder */
+ gst_structure_get_int (structure, "mpegversion", &mpeg_dec->version);
+ if (mpeg_dec->version == 1)
+ profile = VDP_DECODER_PROFILE_MPEG1;
+
+ value = gst_structure_get_value (structure, "codec_data");
+ if (value) {
+ GstBuffer *codec_data, *buf;
+ GstVdpMpegPacketizer packetizer;
+
+ codec_data = gst_value_get_buffer (value);
+ gst_vdp_mpeg_packetizer_init (&packetizer, codec_data);
+ if ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) {
+ MPEGSeqHdr hdr;
+ guint32 bitrate;
+
+ mpeg_util_parse_sequence_hdr (&hdr, buf);
+
+ memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
+ &hdr.intra_quantizer_matrix, 64);
+ memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
+ &hdr.non_intra_quantizer_matrix, 64);
+
+ bitrate = hdr.bitrate;
+ gst_buffer_unref (buf);
+
+ if ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) {
+ MPEGSeqExtHdr ext;
+
+ mpeg_util_parse_sequence_extension (&ext, buf);
+ if (mpeg_dec->version != 1) {
+ switch (ext.profile) {
+ case 5:
+ profile = VDP_DECODER_PROFILE_MPEG2_SIMPLE;
+ break;
+ default:
+ profile = VDP_DECODER_PROFILE_MPEG2_MAIN;
+ break;
+ }
+ }
+
+ bitrate += (ext.bitrate_ext << 18);
+ gst_buffer_unref (buf);
+ }
+
+ mpeg_dec->duration =
+ gst_util_uint64_scale (1, GST_SECOND * mpeg_dec->fps_d,
+ mpeg_dec->fps_n);
+
+ mpeg_dec->byterate = bitrate * 50;
+ GST_DEBUG ("byterate: %" G_GINT64_FORMAT, mpeg_dec->byterate);
+ }
+ }
+
+ device = mpeg_dec->device;
+
+ if (mpeg_dec->decoder != VDP_INVALID_HANDLE) {
+ device->vdp_decoder_destroy (mpeg_dec->decoder);
+ mpeg_dec->decoder = VDP_INVALID_HANDLE;
+ }
+
+ status = device->vdp_decoder_create (device->device, profile, mpeg_dec->width,
+ mpeg_dec->height, 2, &mpeg_dec->decoder);
+ if (status != VDP_STATUS_OK) {
+ GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
+ ("Could not create vdpau decoder"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ res = FALSE;
+ goto done;
+ }
+ res = TRUE;
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+GstFlowReturn
+gst_vdp_mpeg_dec_push_video_buffer (GstVdpMpegDec * mpeg_dec,
+ GstVdpVideoBuffer * buffer)
+{
+ gint64 byterate;
+
+ if (GST_BUFFER_TIMESTAMP (buffer) == GST_CLOCK_TIME_NONE
+ && GST_CLOCK_TIME_IS_VALID (mpeg_dec->next_timestamp)) {
+ GST_BUFFER_TIMESTAMP (buffer) = mpeg_dec->next_timestamp;
+ } else if (GST_BUFFER_TIMESTAMP (buffer) == GST_CLOCK_TIME_NONE) {
+ GST_BUFFER_TIMESTAMP (buffer) = gst_util_uint64_scale (mpeg_dec->frame_nr,
+ GST_SECOND * mpeg_dec->fps_d, mpeg_dec->fps_n);
+ }
+
+ if (mpeg_dec->seeking) {
+ GstEvent *event;
+
+ event = gst_event_new_new_segment (FALSE,
+ mpeg_dec->segment.rate, GST_FORMAT_TIME, GST_BUFFER_TIMESTAMP (buffer),
+ mpeg_dec->segment.stop, GST_BUFFER_TIMESTAMP (buffer));
+
+ gst_pad_push_event (mpeg_dec->src, event);
+
+ mpeg_dec->seeking = FALSE;
+ }
+
+ mpeg_dec->next_timestamp = GST_BUFFER_TIMESTAMP (buffer) +
+ GST_BUFFER_DURATION (buffer);
+
+ gst_segment_set_last_stop (&mpeg_dec->segment, GST_FORMAT_TIME,
+ GST_BUFFER_TIMESTAMP (buffer));
+
+ mpeg_dec->accumulated_duration += GST_BUFFER_DURATION (buffer);
+ mpeg_dec->accumulated_size += GST_BUFFER_SIZE (buffer);
+ byterate = gst_util_uint64_scale (mpeg_dec->accumulated_size, GST_SECOND,
+ mpeg_dec->accumulated_duration);
+ GST_DEBUG ("byterate: %" G_GINT64_FORMAT, mpeg_dec->byterate);
+
+ mpeg_dec->byterate = (mpeg_dec->byterate + byterate) / 2;
+
+ gst_buffer_set_caps (GST_BUFFER (buffer), GST_PAD_CAPS (mpeg_dec->src));
+
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Pushing buffer with timestamp: %" GST_TIME_FORMAT
+ " frame_nr: %" G_GINT64_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buffer)),
+ GST_BUFFER_OFFSET (buffer));
+
+ return gst_pad_push (mpeg_dec->src, GST_BUFFER (buffer));
+}
+
+static GstFlowReturn
+gst_vdp_mpeg_dec_decode (GstVdpMpegDec * mpeg_dec,
+ GstClockTime timestamp, gint64 size)
+{
+ VdpPictureInfoMPEG1Or2 *info;
+ GstBuffer *buffer;
+ GstVdpVideoBuffer *outbuf;
+ VdpVideoSurface surface;
+ GstVdpDevice *device;
+ VdpBitstreamBuffer vbit[1];
+ VdpStatus status;
+
+ info = &mpeg_dec->vdp_info;
+
+ buffer = gst_adapter_take_buffer (mpeg_dec->adapter,
+ gst_adapter_available (mpeg_dec->adapter));
+
+ outbuf = gst_vdp_video_buffer_new (mpeg_dec->device, VDP_CHROMA_TYPE_420,
+ mpeg_dec->width, mpeg_dec->height);
+ GST_BUFFER_TIMESTAMP (outbuf) = timestamp;
+ GST_BUFFER_DURATION (outbuf) = mpeg_dec->duration;
+ GST_BUFFER_OFFSET (outbuf) = mpeg_dec->frame_nr;
+ GST_BUFFER_SIZE (outbuf) = size;
+
+ if (info->picture_coding_type == I_FRAME)
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+ else
+ GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
+
+ if (info->top_field_first)
+ GST_BUFFER_FLAG_SET (outbuf, GST_VIDEO_BUFFER_TFF);
+ else
+ GST_BUFFER_FLAG_UNSET (outbuf, GST_VIDEO_BUFFER_TFF);
+
+
+ if (info->picture_coding_type != B_FRAME) {
+ if (info->backward_reference != VDP_INVALID_HANDLE) {
+ gst_buffer_ref (mpeg_dec->b_buffer);
+ gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
+ GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));
+ }
+
+ if (info->forward_reference != VDP_INVALID_HANDLE) {
+ gst_buffer_unref (mpeg_dec->f_buffer);
+ info->forward_reference = VDP_INVALID_HANDLE;
+ }
+
+ info->forward_reference = info->backward_reference;
+ mpeg_dec->f_buffer = mpeg_dec->b_buffer;
+
+ info->backward_reference = VDP_INVALID_HANDLE;
+ }
+
+ if (info->forward_reference != VDP_INVALID_HANDLE &&
+ info->picture_coding_type != I_FRAME)
+ gst_vdp_video_buffer_add_reference (outbuf,
+ GST_VDP_VIDEO_BUFFER (mpeg_dec->f_buffer));
+
+ if (info->backward_reference != VDP_INVALID_HANDLE
+ && info->picture_coding_type == B_FRAME)
+ gst_vdp_video_buffer_add_reference (outbuf,
+ GST_VDP_VIDEO_BUFFER (mpeg_dec->b_buffer));
+
+ surface = outbuf->surface;
+
+ device = mpeg_dec->device;
+
+ vbit[0].struct_version = VDP_BITSTREAM_BUFFER_VERSION;
+ vbit[0].bitstream = GST_BUFFER_DATA (buffer);
+ vbit[0].bitstream_bytes = GST_BUFFER_SIZE (buffer);
+
+ status = device->vdp_decoder_render (mpeg_dec->decoder, surface,
+ (VdpPictureInfo *) info, 1, vbit);
+ gst_buffer_unref (buffer);
+ info->slice_count = 0;
+
+ if (status != VDP_STATUS_OK) {
+ GST_ELEMENT_ERROR (mpeg_dec, RESOURCE, READ,
+ ("Could not decode"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ gst_buffer_unref (GST_BUFFER (outbuf));
+
+ return GST_FLOW_ERROR;
+ }
+
+ if (info->picture_coding_type == B_FRAME) {
+ gst_vdp_mpeg_dec_push_video_buffer (mpeg_dec,
+ GST_VDP_VIDEO_BUFFER (outbuf));
+ } else {
+ info->backward_reference = surface;
+ mpeg_dec->b_buffer = GST_BUFFER (outbuf);
+ }
+
+ return GST_FLOW_OK;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_picture_coding (GstVdpMpegDec * mpeg_dec,
+ GstBuffer * buffer)
+{
+ MPEGPictureExt pic_ext;
+ VdpPictureInfoMPEG1Or2 *info;
+ gint fields;
+
+ info = &mpeg_dec->vdp_info;
+
+ if (!mpeg_util_parse_picture_coding_extension (&pic_ext, buffer))
+ return FALSE;
+
+ memcpy (&mpeg_dec->vdp_info.f_code, &pic_ext.f_code, 4);
+
+ info->intra_dc_precision = pic_ext.intra_dc_precision;
+ info->picture_structure = pic_ext.picture_structure;
+ info->top_field_first = pic_ext.top_field_first;
+ info->frame_pred_frame_dct = pic_ext.frame_pred_frame_dct;
+ info->concealment_motion_vectors = pic_ext.concealment_motion_vectors;
+ info->q_scale_type = pic_ext.q_scale_type;
+ info->intra_vlc_format = pic_ext.intra_vlc_format;
+ info->alternate_scan = pic_ext.alternate_scan;
+
+ fields = 2;
+ if (pic_ext.picture_structure == 3) {
+ if (mpeg_dec->interlaced) {
+ if (pic_ext.progressive_frame == 0)
+ fields = 2;
+ if (pic_ext.progressive_frame == 0 && pic_ext.repeat_first_field == 0)
+ fields = 2;
+ if (pic_ext.progressive_frame == 1 && pic_ext.repeat_first_field == 1)
+ fields = 3;
+ } else {
+ if (pic_ext.repeat_first_field == 0)
+ fields = 2;
+ if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 0)
+ fields = 4;
+ if (pic_ext.repeat_first_field == 1 && pic_ext.top_field_first == 1)
+ fields = 6;
+ }
+ } else
+ fields = 1;
+
+ GST_DEBUG ("fields: %d", fields);
+
+ mpeg_dec->duration = gst_util_uint64_scale (fields,
+ GST_SECOND * mpeg_dec->fps_d, 2 * mpeg_dec->fps_n);
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_sequence (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
+{
+ MPEGSeqHdr hdr;
+
+ if (!mpeg_util_parse_sequence_hdr (&hdr, buffer))
+ return FALSE;
+
+ memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
+ &hdr.intra_quantizer_matrix, 64);
+ memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
+ &hdr.non_intra_quantizer_matrix, 64);
+
+ if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_SEQUENCE)
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_DATA;
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_picture (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
+{
+ MPEGPictureHdr pic_hdr;
+
+ if (!mpeg_util_parse_picture_hdr (&pic_hdr, buffer))
+ return FALSE;
+
+ if (pic_hdr.pic_type != I_FRAME
+ && mpeg_dec->vdp_info.backward_reference == VDP_INVALID_HANDLE) {
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Drop frame since we haven't got an I_FRAME yet");
+ return FALSE;
+ }
+ if (pic_hdr.pic_type == B_FRAME
+ && mpeg_dec->vdp_info.forward_reference == VDP_INVALID_HANDLE) {
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Drop frame since we haven't got two non B_FRAMES yet");
+ return FALSE;
+ }
+
+ mpeg_dec->vdp_info.picture_coding_type = pic_hdr.pic_type;
+
+ if (mpeg_dec->version == 1) {
+ mpeg_dec->vdp_info.full_pel_forward_vector =
+ pic_hdr.full_pel_forward_vector;
+ mpeg_dec->vdp_info.full_pel_backward_vector =
+ pic_hdr.full_pel_backward_vector;
+ memcpy (&mpeg_dec->vdp_info.f_code, &pic_hdr.f_code, 4);
+ }
+
+ mpeg_dec->frame_nr = mpeg_dec->gop_frame + pic_hdr.tsn;
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_gop (GstVdpMpegDec * mpeg_dec, GstBuffer * buffer)
+{
+ MPEGGop gop;
+ GstClockTime time;
+
+ if (!mpeg_util_parse_gop (&gop, buffer))
+ return FALSE;
+
+ time = GST_SECOND * (gop.hour * 3600 + gop.minute * 60 + gop.second);
+
+ GST_DEBUG ("gop timestamp: %" GST_TIME_FORMAT, GST_TIME_ARGS (time));
+
+ mpeg_dec->gop_frame =
+ gst_util_uint64_scale (time, mpeg_dec->fps_n,
+ mpeg_dec->fps_d * GST_SECOND) + gop.frame;
+
+ if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_GOP)
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_DATA;
+
+ return TRUE;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_parse_quant_matrix (GstVdpMpegDec * mpeg_dec,
+ GstBuffer * buffer)
+{
+ MPEGQuantMatrix qm;
+
+ if (!mpeg_util_parse_quant_matrix (&qm, buffer))
+ return FALSE;
+
+ memcpy (&mpeg_dec->vdp_info.intra_quantizer_matrix,
+ &qm.intra_quantizer_matrix, 64);
+ memcpy (&mpeg_dec->vdp_info.non_intra_quantizer_matrix,
+ &qm.non_intra_quantizer_matrix, 64);
+ return TRUE;
+}
+
+static void
+gst_vdp_mpeg_dec_flush (GstVdpMpegDec * mpeg_dec)
+{
+ if (mpeg_dec->vdp_info.forward_reference != VDP_INVALID_HANDLE)
+ gst_buffer_unref (mpeg_dec->f_buffer);
+ if (mpeg_dec->vdp_info.backward_reference != VDP_INVALID_HANDLE)
+ gst_buffer_unref (mpeg_dec->b_buffer);
+
+ gst_vdp_mpeg_dec_init_info (&mpeg_dec->vdp_info);
+
+ gst_adapter_clear (mpeg_dec->adapter);
+
+ mpeg_dec->next_timestamp = GST_CLOCK_TIME_NONE;
+}
+
+static void
+gst_vdp_mpeg_dec_reset (GstVdpMpegDec * mpeg_dec)
+{
+ gst_vdp_mpeg_dec_flush (mpeg_dec);
+
+ if (mpeg_dec->decoder != VDP_INVALID_HANDLE)
+ mpeg_dec->device->vdp_decoder_destroy (mpeg_dec->decoder);
+ mpeg_dec->decoder = VDP_INVALID_HANDLE;
+ if (mpeg_dec->device)
+ g_object_unref (mpeg_dec->device);
+ mpeg_dec->device = NULL;
+
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_SEQUENCE;
+
+ gst_segment_init (&mpeg_dec->segment, GST_FORMAT_TIME);
+ mpeg_dec->seeking = FALSE;
+
+ mpeg_dec->accumulated_size = 0;
+ mpeg_dec->accumulated_duration = 0;
+}
+
+static GstFlowReturn
+gst_vdp_mpeg_dec_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ GstVdpMpegPacketizer packetizer;
+ GstBuffer *buf;
+ GstFlowReturn ret = GST_FLOW_OK;
+
+ if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buffer, GST_BUFFER_FLAG_DISCONT))) {
+ GST_DEBUG_OBJECT (mpeg_dec, "Received discont buffer");
+ gst_vdp_mpeg_dec_flush (mpeg_dec);
+ }
+
+
+ gst_vdp_mpeg_packetizer_init (&packetizer, buffer);
+ while ((buf = gst_vdp_mpeg_packetizer_get_next_packet (&packetizer))) {
+ GstBitReader b_reader = GST_BIT_READER_INIT_FROM_BUFFER (buf);
+ guint32 sync_code;
+ guint8 start_code;
+
+ /* skip sync_code */
+ gst_bit_reader_get_bits_uint32 (&b_reader, &sync_code, 8 * 3);
+
+ /* start_code */
+ gst_bit_reader_get_bits_uint8 (&b_reader, &start_code, 8);
+
+ if (start_code >= MPEG_PACKET_SLICE_MIN
+ && start_code <= MPEG_PACKET_SLICE_MAX) {
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SLICE");
+
+ gst_buffer_ref (buf);
+ gst_adapter_push (mpeg_dec->adapter, buf);
+ mpeg_dec->vdp_info.slice_count++;
+ }
+
+ switch (start_code) {
+ case MPEG_PACKET_PICTURE:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_PICTURE");
+
+ if (!gst_vdp_mpeg_dec_parse_picture (mpeg_dec, buf))
+ goto done;
+
+ break;
+ case MPEG_PACKET_SEQUENCE:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_SEQUENCE");
+ gst_vdp_mpeg_dec_parse_sequence (mpeg_dec, buf);
+ break;
+ case MPEG_PACKET_EXTENSION:
+ {
+ guint8 ext_code;
+
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXTENSION");
+
+ /* ext_code */
+ gst_bit_reader_get_bits_uint8 (&b_reader, &ext_code, 4);
+ switch (ext_code) {
+ case MPEG_PACKET_EXT_PICTURE_CODING:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_PICTURE_CODING");
+ gst_vdp_mpeg_dec_parse_picture_coding (mpeg_dec, buf);
+ break;
+ case MPEG_PACKET_EXT_QUANT_MATRIX:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_EXT_QUANT_MATRIX");
+ gst_vdp_mpeg_dec_parse_quant_matrix (mpeg_dec, buf);
+ break;
+ default:
+ break;
+ }
+ break;
+ }
+ case MPEG_PACKET_GOP:
+ GST_DEBUG_OBJECT (mpeg_dec, "MPEG_PACKET_GOP");
+ gst_vdp_mpeg_dec_parse_gop (mpeg_dec, buf);
+ break;
+ default:
+ break;
+ }
+
+ gst_buffer_unref (buf);
+ }
+
+ if (mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_SEQUENCE ||
+ mpeg_dec->state == GST_VDP_MPEG_DEC_NEED_GOP) {
+ gst_adapter_clear (mpeg_dec->adapter);
+ goto done;
+ }
+
+ if (mpeg_dec->vdp_info.slice_count > 0)
+ ret = gst_vdp_mpeg_dec_decode (mpeg_dec, GST_BUFFER_TIMESTAMP (buffer),
+ GST_BUFFER_SIZE (buffer));
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return ret;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_convert (GstVdpMpegDec * mpeg_dec,
+ GstFormat src_format, gint64 src_value,
+ GstFormat dest_format, gint64 * dest_value)
+{
+
+ if (src_format == dest_format) {
+ *dest_value = src_value;
+ return TRUE;
+ }
+
+ if (mpeg_dec->byterate == -1)
+ return FALSE;
+
+ if (src_format == GST_FORMAT_BYTES && dest_format == GST_FORMAT_TIME) {
+ *dest_value = gst_util_uint64_scale (GST_SECOND, src_value,
+ mpeg_dec->byterate);
+ return TRUE;
+ }
+
+ if (src_format == GST_FORMAT_TIME && dest_format == GST_FORMAT_BYTES) {
+ *dest_value =
+ gst_util_uint64_scale_int (src_value, mpeg_dec->byterate, GST_SECOND);
+ return TRUE;
+ }
+
+ return FALSE;
+}
+
+static const GstQueryType *
+gst_mpeg_dec_get_querytypes (GstPad * pad)
+{
+ static const GstQueryType list[] = {
+ GST_QUERY_POSITION,
+ GST_QUERY_DURATION,
+ 0
+ };
+
+ return list;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_src_query (GstPad * pad, GstQuery * query)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ gboolean res;
+
+ switch (GST_QUERY_TYPE (query)) {
+ case GST_QUERY_POSITION:
+ {
+ GstFormat format;
+
+ if ((res = gst_pad_query_default (pad, query)))
+ goto done;
+
+ gst_query_parse_position (query, &format, NULL);
+ if (format == GST_FORMAT_TIME &&
+ GST_CLOCK_TIME_IS_VALID (mpeg_dec->next_timestamp)) {
+ gst_query_set_position (query, GST_FORMAT_TIME,
+ mpeg_dec->next_timestamp);
+ res = TRUE;
+ }
+ break;
+ }
+
+ case GST_QUERY_DURATION:
+ {
+ GstFormat format;
+
+ if ((res = gst_pad_query_default (pad, query)))
+ goto done;
+
+ gst_query_parse_duration (query, &format, NULL);
+ if (format == GST_FORMAT_TIME) {
+ gint64 bytes;
+
+ format = GST_FORMAT_BYTES;
+ if (gst_pad_query_duration (pad, &format, &bytes)
+ && format == GST_FORMAT_BYTES) {
+ gint64 duration;
+
+ if (gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_BYTES,
+ bytes, GST_FORMAT_TIME, &duration)) {
+ GST_DEBUG ("duration: %" GST_TIME_FORMAT, GST_TIME_ARGS (duration));
+ gst_query_set_duration (query, GST_FORMAT_TIME, duration);
+ res = TRUE;
+ }
+ }
+ }
+ break;
+ }
+
+ default:
+ res = gst_pad_query_default (pad, query);
+ }
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+static gboolean
+normal_seek (GstVdpMpegDec * mpeg_dec, GstEvent * event)
+{
+ gdouble rate;
+ GstFormat format;
+ GstSeekFlags flags;
+ GstSeekType cur_type, stop_type;
+ gint64 time_cur, bytes_cur;
+ gint64 time_stop, bytes_stop;
+ gboolean res;
+ gboolean update;
+ GstEvent *peer_event;
+
+ GST_DEBUG ("normal seek");
+
+ gst_event_parse_seek (event, &rate, &format, &flags,
+ &cur_type, &time_cur, &stop_type, &time_stop);
+
+ if (format != GST_FORMAT_TIME)
+ return FALSE;
+
+ gst_segment_set_seek (&mpeg_dec->segment, rate, GST_FORMAT_TIME, flags,
+ cur_type, time_cur, stop_type, time_stop, &update);
+
+ if (update) {
+ /* seek on bytes */
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_TIME, time_cur,
+ GST_FORMAT_BYTES, &bytes_cur))
+ goto convert_failed;
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, GST_FORMAT_TIME, time_stop,
+ GST_FORMAT_BYTES, &bytes_stop))
+ goto convert_failed;
+
+ /* conversion succeeded, create the seek */
+ peer_event =
+ gst_event_new_seek (rate, GST_FORMAT_BYTES, flags,
+ cur_type, bytes_cur, stop_type, bytes_stop);
+
+ g_mutex_lock (mpeg_dec->mutex);
+
+ /* do the seek */
+ res = gst_pad_push_event (mpeg_dec->sink, peer_event);
+
+ if (res) {
+ mpeg_dec->state = GST_VDP_MPEG_DEC_NEED_GOP;
+ mpeg_dec->seeking = TRUE;
+ }
+
+ g_mutex_unlock (mpeg_dec->mutex);
+
+ } else {
+ GstEvent *event;
+
+ /* send segment with new rate */
+ event = gst_event_new_new_segment (TRUE,
+ mpeg_dec->segment.rate, GST_FORMAT_TIME, mpeg_dec->segment.start,
+ mpeg_dec->segment.stop, mpeg_dec->segment.time);
+
+ gst_pad_push_event (mpeg_dec->src, event);
+ res = TRUE;
+ }
+
+ return res;
+
+ /* ERRORS */
+convert_failed:
+ {
+ /* probably unsupported seek format */
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "failed to convert format %u into GST_FORMAT_TIME", format);
+ return FALSE;
+ }
+}
+
+static gboolean
+gst_vdp_mpeg_dec_src_event (GstPad * pad, GstEvent * event)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_SEEK:
+ {
+ if ((res = gst_pad_event_default (pad, event)))
+ goto done;
+
+ res = normal_seek (mpeg_dec, event);
+
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ }
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+static gboolean
+gst_vdp_mpeg_dec_sink_event (GstPad * pad, GstEvent * event)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (gst_pad_get_parent (pad));
+ gboolean res;
+
+ switch (GST_EVENT_TYPE (event)) {
+ case GST_EVENT_FLUSH_STOP:
+ {
+ GST_DEBUG_OBJECT (mpeg_dec, "flush stop");
+
+ gst_vdp_mpeg_dec_flush (mpeg_dec);
+ res = gst_pad_push_event (mpeg_dec->src, event);
+
+ break;
+ }
+ case GST_EVENT_NEWSEGMENT:
+ {
+ gboolean update;
+ gdouble rate;
+ GstFormat format;
+ gint64 start;
+ gint64 stop;
+ gint64 position;
+
+ gst_event_parse_new_segment (event, &update, &rate, &format,
+ &start, &stop, &position);
+
+ if (format != GST_FORMAT_TIME) {
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, start,
+ GST_FORMAT_TIME, &start))
+ goto convert_error;
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, stop,
+ GST_FORMAT_TIME, &stop))
+ goto convert_error;
+ if (!gst_vdp_mpeg_dec_convert (mpeg_dec, format, position,
+ GST_FORMAT_TIME, &position))
+ goto convert_error;
+
+ gst_event_unref (event);
+ event = gst_event_new_new_segment (update, rate, GST_FORMAT_TIME, start,
+ stop, position);
+ }
+
+ g_mutex_lock (mpeg_dec->mutex);
+ /* if we seek ourselves we don't push out a newsegment now since we
+ * use the calculated timestamp of the first frame for this */
+ if (mpeg_dec->seeking) {
+ gst_event_unref (event);
+ res = TRUE;
+ g_mutex_unlock (mpeg_dec->mutex);
+ goto done;
+ }
+ g_mutex_unlock (mpeg_dec->mutex);
+
+ GST_DEBUG_OBJECT (mpeg_dec,
+ "Pushing new segment update %d format %d start %"
+ GST_TIME_FORMAT " stop %" GST_TIME_FORMAT " position %"
+ GST_TIME_FORMAT, update, format, GST_TIME_ARGS (start),
+ GST_TIME_ARGS (stop), GST_TIME_ARGS (position));
+ convert_error:
+ res = gst_pad_push_event (mpeg_dec->src, event);
+
+ break;
+ }
+ default:
+ res = gst_pad_event_default (pad, event);
+ }
+
+done:
+ gst_object_unref (mpeg_dec);
+
+ return res;
+}
+
+static GstStateChangeReturn
+gst_vdp_mpeg_dec_change_state (GstElement * element, GstStateChange transition)
+{
+ GstVdpMpegDec *mpeg_dec;
+ GstStateChangeReturn ret;
+
+ mpeg_dec = GST_VDP_MPEG_DEC (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_READY_TO_PAUSED:
+ mpeg_dec->device = gst_vdp_get_device (mpeg_dec->display_name);
+ break;
+ default:
+ break;
+ }
+
+ ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_PAUSED_TO_READY:
+ gst_vdp_mpeg_dec_reset (mpeg_dec);
+ break;
+ default:
+ break;
+ }
+
+ return ret;
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_vdp_mpeg_dec_base_init (gpointer gclass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
+
+ gst_element_class_set_details_simple (element_class,
+ "VDPAU Mpeg Decoder",
+ "Decoder",
+ "decode mpeg stream with vdpau",
+ "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+}
+
+/* initialize the vdpaumpegdecoder's class */
+static void
+gst_vdp_mpeg_dec_class_init (GstVdpMpegDecClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_vdp_mpeg_dec_finalize;
+ gobject_class->set_property = gst_vdp_mpeg_dec_set_property;
+ gobject_class->get_property = gst_vdp_mpeg_dec_get_property;
+
+ gstelement_class->change_state =
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_change_state);
+
+ g_object_class_install_property (gobject_class, PROP_DISPLAY,
+ g_param_spec_string ("display", "Display", "X Display name",
+ NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+}
+
+static void
+gst_vdp_mpeg_dec_init_info (VdpPictureInfoMPEG1Or2 * vdp_info)
+{
+ vdp_info->forward_reference = VDP_INVALID_HANDLE;
+ vdp_info->backward_reference = VDP_INVALID_HANDLE;
+ vdp_info->slice_count = 0;
+ vdp_info->picture_structure = 3;
+ vdp_info->picture_coding_type = 0;
+ vdp_info->intra_dc_precision = 0;
+ vdp_info->frame_pred_frame_dct = 1;
+ vdp_info->concealment_motion_vectors = 0;
+ vdp_info->intra_vlc_format = 0;
+ vdp_info->alternate_scan = 0;
+ vdp_info->q_scale_type = 0;
+ vdp_info->top_field_first = 1;
+}
+
+static void
+gst_vdp_mpeg_dec_init (GstVdpMpegDec * mpeg_dec, GstVdpMpegDecClass * gclass)
+{
+ mpeg_dec->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_use_fixed_caps (mpeg_dec->src);
+ gst_pad_set_event_function (mpeg_dec->src,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_event));
+ gst_pad_set_query_function (mpeg_dec->src,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_src_query));
+ gst_pad_set_query_type_function (mpeg_dec->src,
+ GST_DEBUG_FUNCPTR (gst_mpeg_dec_get_querytypes));
+ gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->src);
+
+ mpeg_dec->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_setcaps_function (mpeg_dec->sink,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_set_caps));
+ gst_pad_set_chain_function (mpeg_dec->sink,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_chain));
+ gst_pad_set_event_function (mpeg_dec->sink,
+ GST_DEBUG_FUNCPTR (gst_vdp_mpeg_dec_sink_event));
+ gst_element_add_pad (GST_ELEMENT (mpeg_dec), mpeg_dec->sink);
+
+ mpeg_dec->display_name = NULL;
+ mpeg_dec->adapter = gst_adapter_new ();
+
+ mpeg_dec->device = NULL;
+ mpeg_dec->decoder = VDP_INVALID_HANDLE;
+ mpeg_dec->vdp_info.forward_reference = VDP_INVALID_HANDLE;
+ mpeg_dec->vdp_info.backward_reference = VDP_INVALID_HANDLE;
+
+ gst_vdp_mpeg_dec_reset (mpeg_dec);
+
+ mpeg_dec->mutex = g_mutex_new ();
+}
+
+static void
+gst_vdp_mpeg_dec_finalize (GObject * object)
+{
+ GstVdpMpegDec *mpeg_dec = (GstVdpMpegDec *) object;
+
+ g_object_unref (mpeg_dec->adapter);
+ g_mutex_free (mpeg_dec->mutex);
+}
+
+static void
+gst_vdp_mpeg_dec_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_free (mpeg_dec->display_name);
+ mpeg_dec->display_name = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_mpeg_dec_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVdpMpegDec *mpeg_dec = GST_VDP_MPEG_DEC (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_value_set_string (value, mpeg_dec->display_name);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/sys/vdpau/gstvdpmpegdec.h b/sys/vdpau/gstvdpmpegdec.h
new file mode 100644
index 00000000..fc36df5b
--- /dev/null
+++ b/sys/vdpau/gstvdpmpegdec.h
@@ -0,0 +1,105 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_VDP_MPEG_DEC_H__
+#define __GST_VDP_MPEG_DEC_H__
+
+#include <gst/gst.h>
+#include <gst/base/gstadapter.h>
+
+#include "gstvdpdevice.h"
+#include "gstvdpvideobuffer.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_MPEG_DEC (gst_vdp_mpeg_dec_get_type())
+#define GST_VDP_MPEG_DEC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_MPEG_DEC,GstVdpMpegDec))
+#define GST_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_MPEG_DEC,GstVdpMpegDecClass))
+#define GST_IS_VDP_MPEG_DEC(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_MPEG_DEC))
+#define GST_IS_VDP_MPEG_DEC_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_MPEG_DEC))
+
+typedef enum {
+ GST_VDP_MPEG_DEC_NEED_SEQUENCE,
+ GST_VDP_MPEG_DEC_NEED_GOP,
+ GST_VDP_MPEG_DEC_NEED_DATA
+} GstVdpMpegDecState;
+
+typedef struct _GstVdpMpegDec GstVdpMpegDec;
+typedef struct _GstVdpMpegDecClass GstVdpMpegDecClass;
+
+struct _GstVdpMpegDec
+{
+ GstElement element;
+
+ /* pads */
+ GstPad *src;
+ GstPad *sink;
+
+ gchar *display_name;
+ GstVdpDevice *device;
+ VdpDecoder decoder;
+
+ /* stream info */
+ gint width, height;
+ gint fps_n, fps_d;
+ gboolean interlaced;
+ gint version;
+
+ /* decoder state */
+ GstVdpMpegDecState state;
+
+ /* currently decoded frame info */
+ GstAdapter *adapter;
+ VdpPictureInfoMPEG1Or2 vdp_info;
+ guint64 frame_nr;
+ GstClockTime duration;
+
+ /* frame_nr from GOP */
+ guint64 gop_frame;
+
+ /* forward and backward reference */
+ GstBuffer *f_buffer;
+ GstBuffer *b_buffer;
+
+ /* calculated timestamp, size and duration */
+ GstClockTime next_timestamp;
+ guint64 accumulated_size;
+ guint64 accumulated_duration;
+
+ /* seek data */
+ GstSegment segment;
+ gboolean seeking;
+ gint64 byterate;
+
+ /* mutex */
+ GMutex *mutex;
+
+};
+
+struct _GstVdpMpegDecClass
+{
+ GstElementClass element_class;
+};
+
+GType gst_vdp_mpeg_dec_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_VDP_MPEG_DEC_H__ */
diff --git a/sys/vdpau/gstvdpvideobuffer.c b/sys/vdpau/gstvdpvideobuffer.c
new file mode 100644
index 00000000..8ae14e98
--- /dev/null
+++ b/sys/vdpau/gstvdpvideobuffer.c
@@ -0,0 +1,138 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include "gstvdpvideobuffer.h"
+
+
+void
+gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer * buffer,
+ GstVdpVideoBuffer * buf)
+{
+ g_assert (GST_IS_VDP_VIDEO_BUFFER (buffer));
+ g_assert (GST_IS_VDP_VIDEO_BUFFER (buf));
+
+ gst_buffer_ref (GST_BUFFER (buf));
+ buffer->refs = g_slist_prepend (buffer->refs, buf);
+}
+
+GstVdpVideoBuffer *
+gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type,
+ gint width, gint height)
+{
+ GstVdpVideoBuffer *buffer;
+ VdpStatus status;
+ VdpVideoSurface surface;
+
+ status = device->vdp_video_surface_create (device->device, chroma_type, width,
+ height, &surface);
+ if (status != VDP_STATUS_OK) {
+ GST_ERROR ("Couldn't create a VdpVideoSurface, error returned was: %s",
+ device->vdp_get_error_string (status));
+ return NULL;
+ }
+
+ buffer =
+ (GstVdpVideoBuffer *) gst_mini_object_new (GST_TYPE_VDP_VIDEO_BUFFER);
+
+ buffer->device = g_object_ref (device);
+ buffer->surface = surface;
+
+ return buffer;
+}
+
+static GObjectClass *gst_vdp_video_buffer_parent_class;
+
+static void
+gst_vdp_video_buffer_finalize (GstVdpVideoBuffer * buffer)
+{
+ GSList *iter;
+ GstVdpDevice *device;
+ VdpStatus status;
+
+ device = buffer->device;
+
+ status = device->vdp_video_surface_destroy (buffer->surface);
+ if (status != VDP_STATUS_OK)
+ GST_ERROR
+ ("Couldn't destroy the buffers VdpVideoSurface, error returned was: %s",
+ device->vdp_get_error_string (status));
+
+ g_object_unref (buffer->device);
+
+ for (iter = buffer->refs; iter; iter = g_slist_next (iter)) {
+ GstBuffer *buf;
+
+ buf = (GstBuffer *) (iter->data);
+ gst_buffer_unref (buf);
+ }
+ g_slist_free (buffer->refs);
+
+ GST_MINI_OBJECT_CLASS (gst_vdp_video_buffer_parent_class)->finalize
+ (GST_MINI_OBJECT (buffer));
+}
+
+static void
+gst_vdp_video_buffer_init (GstVdpVideoBuffer * buffer, gpointer g_class)
+{
+ buffer->device = NULL;
+ buffer->surface = VDP_INVALID_HANDLE;
+
+ buffer->refs = NULL;
+}
+
+static void
+gst_vdp_video_buffer_class_init (gpointer g_class, gpointer class_data)
+{
+ GstMiniObjectClass *mini_object_class = GST_MINI_OBJECT_CLASS (g_class);
+
+ gst_vdp_video_buffer_parent_class = g_type_class_peek_parent (g_class);
+
+ mini_object_class->finalize = (GstMiniObjectFinalizeFunction)
+ gst_vdp_video_buffer_finalize;
+}
+
+
+GType
+gst_vdp_video_buffer_get_type (void)
+{
+ static GType _gst_vdp_video_buffer_type;
+
+ if (G_UNLIKELY (_gst_vdp_video_buffer_type == 0)) {
+ static const GTypeInfo info = {
+ sizeof (GstBufferClass),
+ NULL,
+ NULL,
+ gst_vdp_video_buffer_class_init,
+ NULL,
+ NULL,
+ sizeof (GstVdpVideoBuffer),
+ 0,
+ (GInstanceInitFunc) gst_vdp_video_buffer_init,
+ NULL
+ };
+ _gst_vdp_video_buffer_type = g_type_register_static (GST_TYPE_BUFFER,
+ "GstVdpVideoBuffer", &info, 0);
+ }
+ return _gst_vdp_video_buffer_type;
+}
diff --git a/sys/vdpau/gstvdpvideobuffer.h b/sys/vdpau/gstvdpvideobuffer.h
new file mode 100644
index 00000000..36eddcbd
--- /dev/null
+++ b/sys/vdpau/gstvdpvideobuffer.h
@@ -0,0 +1,59 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef _GST_VDP_VIDEO_BUFFER_H_
+#define _GST_VDP_VIDEO_BUFFER_H_
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvdpdevice.h"
+
+#include "gstvdpvideobuffer.h"
+
+typedef struct _GstVdpVideoBuffer GstVdpVideoBuffer;
+
+#define GST_TYPE_VDP_VIDEO_BUFFER (gst_vdp_video_buffer_get_type())
+
+#define GST_IS_VDP_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_TYPE ((obj), GST_TYPE_VDP_VIDEO_BUFFER))
+#define GST_VDP_VIDEO_BUFFER(obj) (G_TYPE_CHECK_INSTANCE_CAST ((obj), GST_TYPE_VDP_VIDEO_BUFFER, GstVdpVideoBuffer))
+
+struct _GstVdpVideoBuffer {
+ GstBuffer buffer;
+
+ GstVdpDevice *device;
+ VdpVideoSurface surface;
+
+ GSList *refs;
+};
+
+GType gst_vdp_video_buffer_get_type (void);
+
+GstVdpVideoBuffer* gst_vdp_video_buffer_new (GstVdpDevice * device, VdpChromaType chroma_type, gint width, gint height);
+
+void gst_vdp_video_buffer_add_reference (GstVdpVideoBuffer *buffer, GstVdpVideoBuffer *buf);
+
+#define GST_VDP_VIDEO_CAPS \
+ "video/x-vdpau-video, " \
+ "chroma-type = (int)[0,2], " \
+ "width = (int)[1,4096], " \
+ "height = (int)[1,4096]"
+
+#endif
diff --git a/sys/vdpau/gstvdpvideoyuv.c b/sys/vdpau/gstvdpvideoyuv.c
new file mode 100644
index 00000000..2318cd40
--- /dev/null
+++ b/sys/vdpau/gstvdpvideoyuv.c
@@ -0,0 +1,462 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvdpvideobuffer.h"
+#include "gstvdpvideoyuv.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_video_yuv_debug);
+#define GST_CAT_DEFAULT gst_vdp_video_yuv_debug
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0
+};
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "framerate = (fraction) [ 0, MAX ], "
+ "width = (int) [ 1, MAX ], " "height = (int) [ 1, MAX ]"));
+
+#define DEBUG_INIT(bla) \
+ GST_DEBUG_CATEGORY_INIT (gst_vdp_video_yuv_debug, "vdpauvideoyuv", 0, "VDPAU VdpSurface to YUV");
+
+GST_BOILERPLATE_FULL (GstVdpVideoYUV, gst_vdp_video_yuv, GstElement,
+ GST_TYPE_ELEMENT, DEBUG_INIT);
+
+static void gst_vdp_video_yuv_finalize (GObject * object);
+static void gst_vdp_video_yuv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_vdp_video_yuv_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+GstFlowReturn
+gst_vdp_video_yuv_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstVdpVideoYUV *video_yuv;
+ GstVdpDevice *device;
+ VdpVideoSurface surface;
+ GstBuffer *outbuf = NULL;
+ GstFlowReturn result = GST_FLOW_ERROR;
+
+ video_yuv = GST_VDP_VIDEO_YUV (GST_OBJECT_PARENT (pad));
+ device = GST_VDP_VIDEO_BUFFER (buffer)->device;
+ surface = GST_VDP_VIDEO_BUFFER (buffer)->surface;
+
+ GST_LOG_OBJECT (video_yuv, "Received buffer format %" GST_FOURCC_FORMAT,
+ GST_FOURCC_ARGS (video_yuv->format));
+
+ switch (video_yuv->format) {
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ {
+ gint size;
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ size =
+ gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, video_yuv->width,
+ video_yuv->height);
+ result =
+ gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
+ GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
+ if (G_UNLIKELY (result != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (video_yuv, "Pad alloc_buffer returned %d", result);
+ goto done;
+ }
+
+ data[0] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 0, video_yuv->width, video_yuv->height);
+ data[1] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 2, video_yuv->width, video_yuv->height);
+ data[2] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 1, video_yuv->width, video_yuv->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 0, video_yuv->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 2, video_yuv->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 1, video_yuv->width);
+
+ status =
+ device->vdp_video_surface_get_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ GST_LOG_OBJECT (video_yuv,
+ "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto done;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ {
+ gint size;
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ size =
+ gst_video_format_get_size (GST_VIDEO_FORMAT_YV12, video_yuv->width,
+ video_yuv->height);
+ result =
+ gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
+ GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
+ if (G_UNLIKELY (result != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (video_yuv, "Pad alloc_buffer returned %d", result);
+ goto done;
+ }
+
+ data[0] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 0, video_yuv->width, video_yuv->height);
+ data[1] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 2, video_yuv->width, video_yuv->height);
+ data[2] = GST_BUFFER_DATA (outbuf) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 1, video_yuv->width, video_yuv->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 0, video_yuv->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 2, video_yuv->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 1, video_yuv->width);
+
+ status =
+ device->vdp_video_surface_get_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ GST_LOG_OBJECT (video_yuv,
+ "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto done;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ {
+ gint size;
+ VdpStatus status;
+ guint8 *data[2];
+ guint32 stride[2];
+
+ size =
+ video_yuv->width * video_yuv->height +
+ video_yuv->width * video_yuv->height / 2;
+ GST_LOG_OBJECT (video_yuv, "Entering buffer_alloc");
+ result =
+ gst_pad_alloc_buffer_and_set_caps (video_yuv->src,
+ GST_BUFFER_OFFSET_NONE, size, GST_PAD_CAPS (video_yuv->src), &outbuf);
+ if (G_UNLIKELY (result != GST_FLOW_OK)) {
+ GST_DEBUG_OBJECT (video_yuv, "Pad alloc_buffer returned %d", result);
+ goto done;
+ }
+
+ data[0] = GST_BUFFER_DATA (outbuf);
+ data[1] = GST_BUFFER_DATA (outbuf) + video_yuv->width * video_yuv->height;
+
+ stride[0] = video_yuv->width;
+ stride[1] = video_yuv->width;
+
+ GST_LOG_OBJECT (video_yuv, "Entering vdp_video_surface_get_bits_ycbcr");
+ status =
+ device->vdp_video_surface_get_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
+ GST_LOG_OBJECT (video_yuv,
+ "Got status %d from vdp_video_surface_get_bits_ycbcr", status);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto done;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ gst_buffer_unref (buffer);
+
+ gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS);
+ GST_LOG_OBJECT (video_yuv, "Pushing buffer with ts %" GST_TIME_FORMAT,
+ GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (outbuf)));
+ return gst_pad_push (video_yuv->src, outbuf);
+
+done:
+ if (outbuf)
+ gst_buffer_unref (outbuf);
+ gst_buffer_unref (buffer);
+ return result;
+}
+
+static GstCaps *
+gst_vdp_video_yuv_get_caps (GstVdpVideoYUV * video_yuv,
+ GstVdpDevice * device, gint chroma_type, gint width, gint height,
+ gint framerate_numerator, gint framerate_denominator, gint par_numerator,
+ gint par_denominator)
+{
+ GstCaps *caps;
+ gint i;
+
+ caps = gst_caps_new_empty ();
+
+ for (i = 0; i < N_FORMATS; i++) {
+ VdpStatus status;
+ VdpBool is_supported;
+
+ if (formats[i].chroma_type != chroma_type)
+ continue;
+
+ status =
+ device->vdp_video_surface_query_ycbcr_capabilities (device->device,
+ chroma_type, formats[i].format, &is_supported);
+ if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
+ GST_ELEMENT_ERROR (video_yuv, RESOURCE, READ,
+ ("Could not query VDPAU YCbCr capabilites"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ return NULL;
+ }
+ if (is_supported) {
+ GstCaps *format_caps;
+
+ format_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, formats[i].fourcc,
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ "framerate", GST_TYPE_FRACTION, framerate_numerator,
+ framerate_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ par_numerator, par_denominator, NULL);
+ gst_caps_append (caps, format_caps);
+ }
+ }
+
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ return NULL;
+ }
+
+ return caps;
+}
+
+static gboolean
+gst_vdp_video_yuv_sink_set_caps (GstPad * pad, GstCaps * caps)
+{
+ GstVdpVideoYUV *video_yuv = GST_VDP_VIDEO_YUV (GST_OBJECT_PARENT (pad));
+
+ GstCaps *src_caps, *new_caps;
+ GstStructure *structure;
+ const GValue *value;
+ GstVdpDevice *device;
+ gint chroma_type;
+ gint width, height;
+ gint framerate_numerator, framerate_denominator;
+ gint par_numerator, par_denominator;
+ guint32 fourcc_format;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+ value = gst_structure_get_value (structure, "device");
+ device = g_value_get_object (value);
+
+ gst_structure_get_int (structure, "chroma-type", &chroma_type);
+ gst_structure_get_int (structure, "width", &width);
+ gst_structure_get_int (structure, "height", &height);
+ gst_structure_get_fraction (structure, "framerate",
+ &framerate_numerator, &framerate_denominator);
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &par_numerator, &par_denominator);
+
+ src_caps =
+ gst_vdp_video_yuv_get_caps (video_yuv, device, chroma_type, width,
+ height, framerate_numerator, framerate_denominator, par_numerator,
+ par_denominator);
+ if (G_UNLIKELY (!src_caps))
+ return FALSE;
+
+ video_yuv->src_caps = src_caps;
+
+ src_caps = gst_pad_get_allowed_caps (video_yuv->src);
+ if (G_UNLIKELY (!src_caps || !gst_caps_get_size (src_caps)))
+ return FALSE;
+
+ new_caps = gst_caps_copy_nth (src_caps, 0);
+ gst_caps_unref (src_caps);
+ if (G_UNLIKELY (!new_caps))
+ return FALSE;
+
+ structure = gst_caps_get_structure (new_caps, 0);
+ gst_structure_get_fourcc (structure, "format", &fourcc_format);
+
+ gst_pad_fixate_caps (video_yuv->src, new_caps);
+ res = gst_pad_set_caps (video_yuv->src, new_caps);
+
+ gst_caps_unref (new_caps);
+
+ if (G_UNLIKELY (!res))
+ return FALSE;
+
+ video_yuv->width = width;
+ video_yuv->height = height;
+ video_yuv->framerate_numerator = framerate_numerator;
+ video_yuv->framerate_denominator = framerate_denominator;
+ video_yuv->format = fourcc_format;
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_vdp_video_yuv_src_getcaps (GstPad * pad)
+{
+ GstVdpVideoYUV *video_yuv;
+
+ video_yuv = GST_VDP_VIDEO_YUV (GST_OBJECT_PARENT (pad));
+
+ if (video_yuv->src_caps)
+ return gst_caps_copy (video_yuv->src_caps);
+
+ if (GST_PAD_CAPS (video_yuv->src))
+ return gst_caps_copy (GST_PAD_CAPS (video_yuv->src));
+
+ return gst_caps_copy (gst_pad_get_pad_template_caps (video_yuv->src));
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_vdp_video_yuv_base_init (gpointer klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_set_details_simple (element_class,
+ "VdpauVideoYUV",
+ "Covideo_yuv/Decoder/Video",
+ "VDPAU video surface to YUV",
+ "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+}
+
+static void
+gst_vdp_video_yuv_class_init (GstVdpVideoYUVClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_vdp_video_yuv_finalize;
+ gobject_class->set_property = gst_vdp_video_yuv_set_property;
+ gobject_class->get_property = gst_vdp_video_yuv_get_property;
+}
+
+static void
+gst_vdp_video_yuv_init (GstVdpVideoYUV * video_yuv, GstVdpVideoYUVClass * klass)
+{
+ video_yuv->src_caps = NULL;
+
+ video_yuv->height = 0;
+ video_yuv->width = 0;
+ video_yuv->framerate_numerator = 0;
+ video_yuv->framerate_denominator = 0;
+ video_yuv->par_numerator = 1;
+ video_yuv->par_denominator = 1;
+
+ video_yuv->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_pad_set_getcaps_function (video_yuv->src, gst_vdp_video_yuv_src_getcaps);
+ gst_element_add_pad (GST_ELEMENT (video_yuv), video_yuv->src);
+
+ video_yuv->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_setcaps_function (video_yuv->sink,
+ gst_vdp_video_yuv_sink_set_caps);
+ gst_pad_set_chain_function (video_yuv->sink, gst_vdp_video_yuv_chain);
+ gst_element_add_pad (GST_ELEMENT (video_yuv), video_yuv->sink);
+ gst_pad_set_active (video_yuv->sink, TRUE);
+}
+
+static void
+gst_vdp_video_yuv_finalize (GObject * object)
+{
+ GstVdpVideoYUV *video_yuv = (GstVdpVideoYUV *) object;
+
+ if (video_yuv->src_caps)
+ gst_caps_unref (video_yuv->src_caps);
+}
+
+static void
+gst_vdp_video_yuv_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_video_yuv_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ switch (prop_id) {
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/sys/vdpau/gstvdpvideoyuv.h b/sys/vdpau/gstvdpvideoyuv.h
new file mode 100644
index 00000000..935fe700
--- /dev/null
+++ b/sys/vdpau/gstvdpvideoyuv.h
@@ -0,0 +1,60 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_VDP_VIDEO_YUV_H__
+#define __GST_VDP_VIDEO_YUV_H__
+
+#include <gst/gst.h>
+
+#include "gstvdpdevice.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_VIDEO_YUV (gst_vdp_video_yuv_get_type())
+#define GST_VDP_VIDEO_YUV(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_VIDEO_YUV,GstVdpVideoYUV))
+#define GST_VDP_VIDEO_YUV_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_VIDEO_YUV,GstVdpVideoYUVClass))
+#define GST_VDP_VIDEO_YUV_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_VIDEO_YUV, GstVdpVideoYUVClass))
+#define GST_IS_VDP_VIDEO_YUV(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_VIDEO_YUV))
+#define GST_IS_VDP_VIDEO_YUV_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_VIDEO_YUV))
+
+typedef struct _GstVdpVideoYUV GstVdpVideoYUV;
+typedef struct _GstVdpVideoYUVClass GstVdpVideoYUVClass;
+
+struct _GstVdpVideoYUV {
+ GstElement element;
+
+ GstPad *src, *sink;
+ GstCaps *src_caps;
+
+ gint width, height;
+ gint framerate_numerator, framerate_denominator;
+ gint par_numerator, par_denominator;
+ guint format;
+};
+
+struct _GstVdpVideoYUVClass {
+ GstElementClass parent_class;
+};
+
+GType gst_vdp_video_yuv_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_VDP_VIDEO_YUV_H__ */
diff --git a/sys/vdpau/gstvdpyuvvideo.c b/sys/vdpau/gstvdpyuvvideo.c
new file mode 100644
index 00000000..72c053e6
--- /dev/null
+++ b/sys/vdpau/gstvdpyuvvideo.c
@@ -0,0 +1,476 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifdef HAVE_CONFIG_H
+#include "config.h"
+#endif
+
+#include <gst/gst.h>
+#include <gst/video/video.h>
+
+#include "gstvdpvideobuffer.h"
+#include "gstvdpyuvvideo.h"
+
+GST_DEBUG_CATEGORY_STATIC (gst_vdp_yuv_video_debug);
+#define GST_CAT_DEFAULT gst_vdp_yuv_video_debug
+
+/* Filter signals and args */
+enum
+{
+ /* FILL ME */
+ LAST_SIGNAL
+};
+
+enum
+{
+ PROP_0,
+ PROP_DISPLAY
+};
+
+static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, "
+ "framerate = (fraction) [ 0, MAX ], "
+ "width = (int) [ 1, MAX ], " "height = (int) [ 1, MAX ]"));
+
+static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
+ GST_PAD_SRC,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS (GST_VDP_VIDEO_CAPS));
+
+#define DEBUG_INIT(bla) \
+ GST_DEBUG_CATEGORY_INIT (gst_vdp_yuv_video_debug, "vdpauvideoyuv", 0, "YUV to VDPAU video surface");
+
+GST_BOILERPLATE_FULL (GstVdpYUVVideo, gst_vdp_yuv_video, GstElement,
+ GST_TYPE_ELEMENT, DEBUG_INIT);
+
+static void gst_vdp_yuv_video_finalize (GObject * object);
+static void gst_vdp_yuv_video_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec);
+static void gst_vdp_yuv_video_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec);
+
+GstFlowReturn
+gst_vdp_yuv_video_chain (GstPad * pad, GstBuffer * buffer)
+{
+ GstVdpYUVVideo *yuv_video;
+ GstVdpDevice *device;
+ VdpVideoSurface surface;
+ GstBuffer *outbuf = NULL;
+
+ yuv_video = GST_VDP_YUV_VIDEO (GST_OBJECT_PARENT (pad));
+ device = yuv_video->device;
+
+ outbuf =
+ GST_BUFFER (gst_vdp_video_buffer_new (device, yuv_video->chroma_type,
+ yuv_video->width, yuv_video->height));
+ surface = GST_VDP_VIDEO_BUFFER (outbuf)->surface;
+
+ switch (yuv_video->format) {
+ case GST_MAKE_FOURCC ('Y', 'V', '1', '2'):
+ {
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ data[0] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 0, yuv_video->width, yuv_video->height);
+ data[1] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 2, yuv_video->width, yuv_video->height);
+ data[2] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_YV12,
+ 1, yuv_video->width, yuv_video->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 0, yuv_video->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 2, yuv_video->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_YV12,
+ 1, yuv_video->width);
+
+ status =
+ device->vdp_video_surface_put_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Couldn't push YV12 data to VDPAU"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto error;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('I', '4', '2', '0'):
+ {
+ VdpStatus status;
+ guint8 *data[3];
+ guint32 stride[3];
+
+ data[0] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 0, yuv_video->width, yuv_video->height);
+ data[1] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 2, yuv_video->width, yuv_video->height);
+ data[2] = GST_BUFFER_DATA (buffer) +
+ gst_video_format_get_component_offset (GST_VIDEO_FORMAT_I420,
+ 1, yuv_video->width, yuv_video->height);
+
+ stride[0] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 0, yuv_video->width);
+ stride[1] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 2, yuv_video->width);
+ stride[2] = gst_video_format_get_row_stride (GST_VIDEO_FORMAT_I420,
+ 1, yuv_video->width);
+
+ status =
+ device->vdp_video_surface_put_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_YV12, (void *) data, stride);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Couldn't push YV12 data to VDPAU"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto error;
+ }
+ break;
+ }
+ case GST_MAKE_FOURCC ('N', 'V', '1', '2'):
+ {
+ VdpStatus status;
+ guint8 *data[2];
+ guint32 stride[2];
+
+ data[0] = GST_BUFFER_DATA (buffer);
+ data[1] = GST_BUFFER_DATA (buffer) + yuv_video->width * yuv_video->height;
+
+ stride[0] = yuv_video->width;
+ stride[1] = yuv_video->width;
+
+ status =
+ device->vdp_video_surface_put_bits_ycbcr (surface,
+ VDP_YCBCR_FORMAT_NV12, (void *) data, stride);
+ if (G_UNLIKELY (status != VDP_STATUS_OK)) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Couldn't get data from vdpau"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+ goto error;
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ gst_buffer_unref (buffer);
+
+ gst_buffer_copy_metadata (outbuf, buffer, GST_BUFFER_COPY_TIMESTAMPS);
+ gst_buffer_set_caps (outbuf, GST_PAD_CAPS (yuv_video->src));
+
+ return gst_pad_push (yuv_video->src, outbuf);
+
+error:
+ gst_buffer_unref (outbuf);
+ return GST_FLOW_ERROR;
+}
+
+static GstCaps *
+gst_vdp_yuv_video_get_caps (GstVdpYUVVideo * yuv_video)
+{
+ GstVdpDevice *device;
+ GstCaps *caps;
+ gint i;
+
+ device = yuv_video->device;
+
+ caps = gst_caps_new_empty ();
+
+ for (i = 0; i < N_CHROMA_TYPES; i++) {
+ VdpStatus status;
+ VdpBool is_supported;
+ guint32 max_w, max_h;
+
+ status =
+ device->vdp_video_surface_query_capabilities (device->device,
+ chroma_types[i], &is_supported, &max_w, &max_h);
+
+ if (status != VDP_STATUS_OK && status != VDP_STATUS_INVALID_CHROMA_TYPE) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Could not get query VDPAU video surface capabilites"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ goto error;
+ }
+ if (is_supported) {
+ gint j;
+
+ for (j = 0; j < N_FORMATS; j++) {
+ if (formats[j].chroma_type != chroma_types[i])
+ continue;
+
+ status =
+ device->vdp_video_surface_query_ycbcr_capabilities (device->device,
+ formats[j].chroma_type, formats[j].format, &is_supported);
+ if (status != VDP_STATUS_OK
+ && status != VDP_STATUS_INVALID_Y_CB_CR_FORMAT) {
+ GST_ELEMENT_ERROR (yuv_video, RESOURCE, READ,
+ ("Could not query VDPAU YCbCr capabilites"),
+ ("Error returned from vdpau was: %s",
+ device->vdp_get_error_string (status)));
+
+ goto error;
+ }
+ if (is_supported) {
+ GstCaps *format_caps;
+
+ format_caps = gst_caps_new_simple ("video/x-raw-yuv",
+ "format", GST_TYPE_FOURCC, formats[j].fourcc,
+ "width", GST_TYPE_INT_RANGE, 1, max_w,
+ "height", GST_TYPE_INT_RANGE, 1, max_h,
+ "framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
+ gst_caps_append (caps, format_caps);
+ }
+ }
+ }
+ }
+error:
+ if (gst_caps_is_empty (caps)) {
+ gst_caps_unref (caps);
+ return NULL;
+ }
+
+ return caps;
+}
+
+static gboolean
+gst_vdp_yuv_video_sink_setcaps (GstPad * pad, GstCaps * caps)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (GST_OBJECT_PARENT (pad));
+
+ GstStructure *structure;
+ guint32 fourcc;
+ gint chroma_type = 0;
+ gint width, height;
+ gint framerate_numerator, framerate_denominator;
+ gint par_numerator, par_denominator;
+ gint i;
+ GstCaps *src_caps, *new_caps;
+ gboolean res;
+
+ structure = gst_caps_get_structure (caps, 0);
+
+ gst_structure_get_fourcc (structure, "format", &fourcc);
+ gst_structure_get_int (structure, "width", &width);
+ gst_structure_get_int (structure, "height", &height);
+ gst_structure_get_fraction (structure, "framerate",
+ &framerate_numerator, &framerate_denominator);
+ gst_structure_get_fraction (structure, "pixel-aspect-ratio",
+ &par_numerator, &par_denominator);
+
+ for (i = 0; i < N_FORMATS; i++) {
+ if (formats[i].fourcc == fourcc) {
+ chroma_type = formats[i].chroma_type;
+ break;
+ }
+ }
+
+ src_caps = gst_pad_get_allowed_caps (yuv_video->src);
+ if (G_UNLIKELY (!src_caps || !gst_caps_get_size (src_caps)))
+ return FALSE;
+
+ new_caps = gst_caps_copy_nth (src_caps, 0);
+ gst_caps_unref (src_caps);
+ if (G_UNLIKELY (!new_caps))
+ return FALSE;
+
+ structure = gst_caps_get_structure (new_caps, 0);
+
+ gst_structure_set (structure,
+ "device", G_TYPE_OBJECT, yuv_video->device,
+ "chroma-type", G_TYPE_INT, chroma_type,
+ "width", G_TYPE_INT, width,
+ "height", G_TYPE_INT, height,
+ "framerate", GST_TYPE_FRACTION, framerate_numerator,
+ framerate_denominator, "pixel-aspect-ratio", GST_TYPE_FRACTION,
+ par_numerator, par_denominator, NULL);
+
+ gst_pad_fixate_caps (yuv_video->src, new_caps);
+ res = gst_pad_set_caps (yuv_video->src, new_caps);
+
+ gst_caps_unref (new_caps);
+
+ if (G_UNLIKELY (!res))
+ return FALSE;
+
+ yuv_video->width = width;
+ yuv_video->height = height;
+ yuv_video->format = fourcc;
+ yuv_video->chroma_type = chroma_type;
+
+ return TRUE;
+}
+
+static GstCaps *
+gst_vdp_yuv_video_sink_getcaps (GstPad * pad)
+{
+ GstVdpYUVVideo *yuv_video;
+
+ yuv_video = GST_VDP_YUV_VIDEO (GST_OBJECT_PARENT (pad));
+
+ if (yuv_video->sink_caps)
+ return gst_caps_copy (yuv_video->sink_caps);
+
+ return gst_caps_copy (gst_pad_get_pad_template_caps (yuv_video->sink));
+}
+
+static GstStateChangeReturn
+gst_vdp_yuv_video_change_state (GstElement * element, GstStateChange transition)
+{
+ GstVdpYUVVideo *yuv_video;
+
+ yuv_video = GST_VDP_YUV_VIDEO (element);
+
+ switch (transition) {
+ case GST_STATE_CHANGE_NULL_TO_READY:
+ yuv_video->device = gst_vdp_get_device (yuv_video->display);
+ if (!yuv_video->sink_caps)
+ yuv_video->sink_caps = gst_vdp_yuv_video_get_caps (yuv_video);
+ break;
+ case GST_STATE_CHANGE_READY_TO_NULL:
+ g_object_unref (yuv_video->device);
+ yuv_video->device = NULL;
+ break;
+ default:
+ break;
+ }
+
+ return GST_STATE_CHANGE_SUCCESS;
+}
+
+/* GObject vmethod implementations */
+
+static void
+gst_vdp_yuv_video_base_init (gpointer klass)
+{
+ GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+
+ gst_element_class_set_details_simple (element_class,
+ "VdpauYUVVideo",
+ "Coyuv_video/Decoder/Video",
+ "VDPAU video surface to YUV",
+ "Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>");
+
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&sink_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&src_template));
+}
+
+static void
+gst_vdp_yuv_video_class_init (GstVdpYUVVideoClass * klass)
+{
+ GObjectClass *gobject_class;
+ GstElementClass *gstelement_class;
+
+ gobject_class = (GObjectClass *) klass;
+ gstelement_class = (GstElementClass *) klass;
+
+ gobject_class->finalize = gst_vdp_yuv_video_finalize;
+ gobject_class->set_property = gst_vdp_yuv_video_set_property;
+ gobject_class->get_property = gst_vdp_yuv_video_get_property;
+
+ g_object_class_install_property (gobject_class, PROP_DISPLAY,
+ g_param_spec_string ("display", "Display", "X Display name",
+ NULL, G_PARAM_READWRITE | G_PARAM_CONSTRUCT));
+
+ gstelement_class->change_state = gst_vdp_yuv_video_change_state;
+}
+
+static void
+gst_vdp_yuv_video_init (GstVdpYUVVideo * yuv_video, GstVdpYUVVideoClass * klass)
+{
+ yuv_video->sink_caps = NULL;
+
+ yuv_video->display = NULL;
+ yuv_video->device = NULL;
+
+ yuv_video->height = 0;
+ yuv_video->width = 0;
+ yuv_video->format = 0;
+ yuv_video->chroma_type = 0;
+
+ yuv_video->src = gst_pad_new_from_static_template (&src_template, "src");
+ gst_element_add_pad (GST_ELEMENT (yuv_video), yuv_video->src);
+
+ yuv_video->sink = gst_pad_new_from_static_template (&sink_template, "sink");
+ gst_pad_set_getcaps_function (yuv_video->sink,
+ gst_vdp_yuv_video_sink_getcaps);
+ gst_pad_set_setcaps_function (yuv_video->sink,
+ gst_vdp_yuv_video_sink_setcaps);
+ gst_pad_set_chain_function (yuv_video->sink, gst_vdp_yuv_video_chain);
+ gst_element_add_pad (GST_ELEMENT (yuv_video), yuv_video->sink);
+ gst_pad_set_active (yuv_video->sink, TRUE);
+}
+
+static void
+gst_vdp_yuv_video_finalize (GObject * object)
+{
+ GstVdpYUVVideo *yuv_video = (GstVdpYUVVideo *) object;
+
+ g_free (yuv_video->display);
+}
+
+static void
+gst_vdp_yuv_video_set_property (GObject * object, guint prop_id,
+ const GValue * value, GParamSpec * pspec)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_free (yuv_video->display);
+ yuv_video->display = g_value_dup_string (value);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
+
+static void
+gst_vdp_yuv_video_get_property (GObject * object, guint prop_id,
+ GValue * value, GParamSpec * pspec)
+{
+ GstVdpYUVVideo *yuv_video = GST_VDP_YUV_VIDEO (object);
+
+ switch (prop_id) {
+ case PROP_DISPLAY:
+ g_value_set_string (value, yuv_video->display);
+ break;
+ default:
+ G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
+ break;
+ }
+}
diff --git a/sys/vdpau/gstvdpyuvvideo.h b/sys/vdpau/gstvdpyuvvideo.h
new file mode 100644
index 00000000..2349e1ba
--- /dev/null
+++ b/sys/vdpau/gstvdpyuvvideo.h
@@ -0,0 +1,62 @@
+/*
+ * GStreamer
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __GST_VDP_YUV_VIDEO_H__
+#define __GST_VDP_YUV_VIDEO_H__
+
+#include <gst/gst.h>
+
+#include "gstvdpdevice.h"
+
+G_BEGIN_DECLS
+
+#define GST_TYPE_VDP_YUV_VIDEO (gst_vdp_yuv_video_get_type())
+#define GST_VDP_YUV_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_VDP_YUV_VIDEO,GstVdpYUVVideo))
+#define GST_VDP_YUV_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_VDP_YUV_VIDEO,GstVdpYUVVideoClass))
+#define GST_VDP_YUV_VIDEO_GET_CLASS(obj) (G_TYPE_INSTANCE_GET_CLASS ((obj), GST_TYPE_VDP_YUV_VIDEO, GstVdpYUVVideoClass))
+#define GST_IS_VDP_YUV_VIDEO(obj) (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_VDP_YUV_VIDEO))
+#define GST_IS_VDP_YUV_VIDEO_CLASS(klass) (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_VDP_YUV_VIDEO))
+
+typedef struct _GstVdpYUVVideo GstVdpYUVVideo;
+typedef struct _GstVdpYUVVideoClass GstVdpYUVVideoClass;
+
+struct _GstVdpYUVVideo {
+ GstElement element;
+
+ GstPad *src, *sink;
+ GstCaps *sink_caps;
+
+ gchar *display;
+ GstVdpDevice *device;
+
+ guint32 format;
+ gint chroma_type;
+ gint width, height;
+};
+
+struct _GstVdpYUVVideoClass {
+ GstElementClass parent_class;
+};
+
+GType gst_vdp_yuv_video_get_type (void);
+
+G_END_DECLS
+
+#endif /* __GST_VDP_YUV_VIDEO_H__ */
diff --git a/sys/vdpau/mpegutil.c b/sys/vdpau/mpegutil.c
new file mode 100644
index 00000000..b52ab6f8
--- /dev/null
+++ b/sys/vdpau/mpegutil.c
@@ -0,0 +1,430 @@
+/* GStreamer
+ * Copyright (C) 2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#include <gst/base/gstbitreader.h>
+#include <string.h>
+
+#include "mpegutil.h"
+
+/* default intra quant matrix, in zig-zag order */
+static const guint8 default_intra_quantizer_matrix[64] = {
+ 8,
+ 16, 16,
+ 19, 16, 19,
+ 22, 22, 22, 22,
+ 22, 22, 26, 24, 26,
+ 27, 27, 27, 26, 26, 26,
+ 26, 27, 27, 27, 29, 29, 29,
+ 34, 34, 34, 29, 29, 29, 27, 27,
+ 29, 29, 32, 32, 34, 34, 37,
+ 38, 37, 35, 35, 34, 35,
+ 38, 38, 40, 40, 40,
+ 48, 48, 46, 46,
+ 56, 56, 58,
+ 69, 69,
+ 83
+};
+
+guint8 mpeg2_scan[64] = {
+ /* Zig-Zag scan pattern */
+ 0, 1, 8, 16, 9, 2, 3, 10,
+ 17, 24, 32, 25, 18, 11, 4, 5,
+ 12, 19, 26, 33, 40, 48, 41, 34,
+ 27, 20, 13, 6, 7, 14, 21, 28,
+ 35, 42, 49, 56, 57, 50, 43, 36,
+ 29, 22, 15, 23, 30, 37, 44, 51,
+ 58, 59, 52, 45, 38, 31, 39, 46,
+ 53, 60, 61, 54, 47, 55, 62, 63
+};
+
+static void
+set_fps_from_code (MPEGSeqHdr * hdr, guint8 fps_code)
+{
+ const gint framerates[][2] = {
+ {30, 1}, {24000, 1001}, {24, 1}, {25, 1},
+ {30000, 1001}, {30, 1}, {50, 1}, {60000, 1001},
+ {60, 1}, {30, 1}
+ };
+
+ if (fps_code < 10) {
+ hdr->fps_n = framerates[fps_code][0];
+ hdr->fps_d = framerates[fps_code][1];
+ } else {
+ /* Force a valid framerate */
+ hdr->fps_n = 30000;
+ hdr->fps_d = 1001;
+ }
+}
+
+/* Set the Pixel Aspect Ratio in our hdr from a DAR code in the data */
+static void
+set_par_from_dar (MPEGSeqHdr * hdr, guint8 asr_code)
+{
+ /* Pixel_width = DAR_width * display_vertical_size */
+ /* Pixel_height = DAR_height * display_horizontal_size */
+ switch (asr_code) {
+ case 0x02: /* 3:4 DAR = 4:3 pixels */
+ hdr->par_w = 4 * hdr->height;
+ hdr->par_h = 3 * hdr->width;
+ break;
+ case 0x03: /* 9:16 DAR */
+ hdr->par_w = 16 * hdr->height;
+ hdr->par_h = 9 * hdr->width;
+ break;
+ case 0x04: /* 1:2.21 DAR */
+ hdr->par_w = 221 * hdr->height;
+ hdr->par_h = 100 * hdr->width;
+ break;
+ case 0x01: /* Square pixels */
+ default:
+ hdr->par_w = hdr->par_h = 1;
+ break;
+ }
+}
+
+gboolean
+mpeg_util_parse_sequence_extension (MPEGSeqExtHdr * hdr, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);;
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* skip extension code */
+ if (!gst_bit_reader_skip (&reader, 4))
+ return FALSE;
+
+ /* skip profile and level escape bit */
+ if (!gst_bit_reader_skip (&reader, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->profile, 3))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->level, 4))
+ return FALSE;
+
+ /* progressive */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->progressive, 1))
+ return FALSE;
+
+ /* chroma format */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->chroma_format, 2))
+ return FALSE;
+
+ /* resolution extension */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->horiz_size_ext, 2))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->vert_size_ext, 2))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->bitrate_ext, 12))
+ return FALSE;
+
+ /* skip to framerate extension */
+ if (!gst_bit_reader_skip (&reader, 9))
+ return FALSE;
+
+ /* framerate extension */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->fps_n_ext, 2))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->fps_d_ext, 2))
+ return FALSE;
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_sequence_hdr (MPEGSeqHdr * hdr, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+ guint8 dar_idx, par_idx;
+ guint8 load_intra_flag, load_non_intra_flag;
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* resolution */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->width, 12))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->height, 12))
+ return FALSE;
+
+ /* aspect ratio */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &dar_idx, 4))
+ return FALSE;
+ set_par_from_dar (hdr, dar_idx);
+
+ /* framerate */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &par_idx, 4))
+ return FALSE;
+ set_fps_from_code (hdr, par_idx);
+
+ /* bitrate */
+ if (!gst_bit_reader_get_bits_uint32 (&reader, &hdr->bitrate, 18))
+ return FALSE;
+
+ if (!gst_bit_reader_skip (&reader, 1))
+ return FALSE;
+
+ /* VBV buffer size */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->vbv_buffer, 10))
+ return FALSE;
+
+ /* constrained parameters flag */
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->constrained_parameters_flag, 1))
+ return FALSE;
+
+ /* intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_intra_flag, 1))
+ return FALSE;
+ if (load_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memcpy (hdr->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
+
+ /* non intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_non_intra_flag, 1))
+ return FALSE;
+ if (load_non_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->non_intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memset (hdr->non_intra_quantizer_matrix, 16, 64);
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* temperal sequence number */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->tsn, 10))
+ return FALSE;
+
+ /* frame type */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->pic_type, 3))
+ return FALSE;
+
+ if (hdr->pic_type == 0 || hdr->pic_type > 4)
+ return FALSE; /* Corrupted picture packet */
+
+ /* VBV delay */
+ if (!gst_bit_reader_get_bits_uint16 (&reader, &hdr->vbv_delay, 16))
+ return FALSE;
+
+ if (hdr->pic_type == P_FRAME || hdr->pic_type == B_FRAME) {
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->full_pel_forward_vector,
+ 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->f_code[0][0], 3))
+ return FALSE;
+ hdr->f_code[0][1] = hdr->f_code[0][0];
+ } else {
+ hdr->full_pel_forward_vector = 0;
+ hdr->f_code[0][0] = hdr->f_code[0][1] = 0;
+ }
+
+ if (hdr->pic_type == B_FRAME) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &hdr->full_pel_backward_vector, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &hdr->f_code[1][0], 3))
+ return FALSE;
+ hdr->f_code[1][1] = hdr->f_code[1][0];
+ } else {
+ hdr->full_pel_backward_vector = 0;
+ hdr->f_code[1][0] = hdr->f_code[1][1] = 0;
+ }
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_picture_coding_extension (MPEGPictureExt * ext,
+ GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* skip extension code */
+ if (!gst_bit_reader_skip (&reader, 4))
+ return FALSE;
+
+ /* f_code */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[0][0], 4))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[0][1], 4))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[1][0], 4))
+ return FALSE;
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->f_code[1][1], 4))
+ return FALSE;
+
+ /* intra DC precision */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->intra_dc_precision, 2))
+ return FALSE;
+
+ /* picture structure */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->picture_structure, 2))
+ return FALSE;
+
+ /* top field first */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->top_field_first, 1))
+ return FALSE;
+
+ /* frame pred frame dct */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->frame_pred_frame_dct, 1))
+ return FALSE;
+
+ /* concealment motion vectors */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->concealment_motion_vectors,
+ 1))
+ return FALSE;
+
+ /* q scale type */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->q_scale_type, 1))
+ return FALSE;
+
+ /* intra vlc format */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->intra_vlc_format, 1))
+ return FALSE;
+
+ /* alternate scan */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->alternate_scan, 1))
+ return FALSE;
+
+ /* repeat first field */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->repeat_first_field, 1))
+ return FALSE;
+
+ /* chroma_420_type */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->chroma_420_type, 1))
+ return FALSE;
+
+ /* progressive_frame */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &ext->progressive_frame, 1))
+ return FALSE;
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_gop (MPEGGop * gop, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->drop_frame_flag, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->hour, 5))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->minute, 6))
+ return FALSE;
+
+ /* skip unused bit */
+ if (!gst_bit_reader_skip (&reader, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->second, 6))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->frame, 6))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->closed_gop, 1))
+ return FALSE;
+
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &gop->broken_gop, 1))
+ return FALSE;
+
+ return TRUE;
+}
+
+gboolean
+mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, GstBuffer * buffer)
+{
+ GstBitReader reader = GST_BIT_READER_INIT_FROM_BUFFER (buffer);
+ guint8 load_intra_flag, load_non_intra_flag;
+
+ /* skip sync word */
+ if (!gst_bit_reader_skip (&reader, 8 * 4))
+ return FALSE;
+
+ /* skip extension code */
+ if (!gst_bit_reader_skip (&reader, 4))
+ return FALSE;
+
+ /* intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_intra_flag, 1))
+ return FALSE;
+ if (load_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &qm->intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memcpy (qm->intra_quantizer_matrix, default_intra_quantizer_matrix, 64);
+
+ /* non intra quantizer matrix */
+ if (!gst_bit_reader_get_bits_uint8 (&reader, &load_non_intra_flag, 1))
+ return FALSE;
+ if (load_non_intra_flag) {
+ gint i;
+ for (i = 0; i < 64; i++) {
+ if (!gst_bit_reader_get_bits_uint8 (&reader,
+ &qm->non_intra_quantizer_matrix[mpeg2_scan[i]], 8))
+ return FALSE;
+ }
+ } else
+ memset (qm->non_intra_quantizer_matrix, 16, 64);
+
+ return TRUE;
+}
diff --git a/sys/vdpau/mpegutil.h b/sys/vdpau/mpegutil.h
new file mode 100644
index 00000000..aaaa15f8
--- /dev/null
+++ b/sys/vdpau/mpegutil.h
@@ -0,0 +1,150 @@
+/* GStreamer
+ * Copyright (C) 2007 Jan Schmidt <thaytan@mad.scientist.com>
+ * Copyright (C) 2009 Carl-Anton Ingmarsson <ca.ingmarsson@gmail.com>
+ *
+ * This library is free software; you can redistribute it and/or
+ * modify it under the terms of the GNU Library General Public
+ * License as published by the Free Software Foundation; either
+ * version 2 of the License, or (at your option) any later version.
+ *
+ * This library is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ * Library General Public License for more details.
+ *
+ * You should have received a copy of the GNU Library General Public
+ * License along with this library; if not, write to the
+ * Free Software Foundation, Inc., 59 Temple Place - Suite 330,
+ * Boston, MA 02111-1307, USA.
+ */
+
+#ifndef __MPEGUTIL_H__
+#define __MPEGUTIL_H__
+
+#include <gst/gst.h>
+
+typedef struct MPEGSeqHdr MPEGSeqHdr;
+typedef struct MPEGSeqExtHdr MPEGSeqExtHdr;
+typedef struct MPEGPictureHdr MPEGPictureHdr;
+typedef struct MPEGPictureExt MPEGPictureExt;
+typedef struct MPEGGop MPEGGop;
+typedef struct MPEGQuantMatrix MPEGQuantMatrix;
+
+/* Packet ID codes for different packet types we
+ * care about */
+#define MPEG_PACKET_PICTURE 0x00
+#define MPEG_PACKET_SLICE_MIN 0x01
+#define MPEG_PACKET_SLICE_MAX 0xaf
+#define MPEG_PACKET_SEQUENCE 0xb3
+#define MPEG_PACKET_EXTENSION 0xb5
+#define MPEG_PACKET_SEQUENCE_END 0xb7
+#define MPEG_PACKET_GOP 0xb8
+#define MPEG_PACKET_NONE 0xff
+
+/* Extension codes we care about */
+#define MPEG_PACKET_EXT_SEQUENCE 0x01
+#define MPEG_PACKET_EXT_SEQUENCE_DISPLAY 0x02
+#define MPEG_PACKET_EXT_QUANT_MATRIX 0x03
+#define MPEG_PACKET_EXT_PICTURE_CODING 0x08
+
+/* frame types */
+#define I_FRAME 1
+#define P_FRAME 2
+#define B_FRAME 3
+
+struct MPEGSeqHdr
+{
+ /* Pixel-Aspect Ratio from DAR code via set_par_from_dar */
+ guint par_w, par_h;
+ /* Width and Height of the video */
+ guint16 width, height;
+ /* Framerate */
+ guint fps_n, fps_d;
+
+ guint32 bitrate;
+ guint16 vbv_buffer;
+
+ guint8 constrained_parameters_flag;
+
+ guint8 intra_quantizer_matrix[64];
+ guint8 non_intra_quantizer_matrix[64];
+};
+
+struct MPEGSeqExtHdr
+{
+
+ /* mpeg2 decoder profile */
+ guint8 profile;
+ /* mpeg2 decoder level */
+ guint8 level;
+
+ guint8 progressive;
+ guint8 chroma_format;
+
+ guint8 horiz_size_ext, vert_size_ext;
+
+ guint16 bitrate_ext;
+ guint8 fps_n_ext, fps_d_ext;
+
+};
+
+struct MPEGPictureHdr
+{
+ guint16 tsn;
+ guint8 pic_type;
+ guint16 vbv_delay;
+
+ guint8 full_pel_forward_vector, full_pel_backward_vector;
+
+ guint8 f_code[2][2];
+};
+
+struct MPEGPictureExt
+{
+ guint8 f_code[2][2];
+
+ guint8 intra_dc_precision;
+ guint8 picture_structure;
+ guint8 top_field_first;
+ guint8 frame_pred_frame_dct;
+ guint8 concealment_motion_vectors;
+ guint8 q_scale_type;
+ guint8 intra_vlc_format;
+ guint8 alternate_scan;
+ guint8 repeat_first_field;
+ guint8 chroma_420_type;
+ guint8 progressive_frame;
+};
+
+struct MPEGGop
+{
+ guint8 drop_frame_flag;
+
+ guint8 hour, minute, second, frame;
+
+ guint8 closed_gop;
+ guint8 broken_gop;
+};
+
+struct MPEGQuantMatrix
+{
+ guint8 intra_quantizer_matrix[64];
+ guint8 non_intra_quantizer_matrix[64];
+};
+
+gboolean mpeg_util_parse_sequence_hdr (MPEGSeqHdr *hdr, GstBuffer *buffer);
+
+gboolean mpeg_util_parse_sequence_extension (MPEGSeqExtHdr *hdr,
+ GstBuffer *buffer);
+
+gboolean mpeg_util_parse_picture_hdr (MPEGPictureHdr * hdr, GstBuffer *buffer);
+
+gboolean mpeg_util_parse_picture_coding_extension (MPEGPictureExt *ext,
+ GstBuffer *buffer);
+
+gboolean mpeg_util_parse_gop (MPEGGop * gop, GstBuffer *buffer);
+
+gboolean mpeg_util_parse_quant_matrix (MPEGQuantMatrix * qm, GstBuffer *buffer);
+
+#endif
+