From cf2b2b017f0995ecab2822804537f193835e35e9 Mon Sep 17 00:00:00 2001 From: David Schleef Date: Wed, 1 Jul 2009 10:55:12 -0700 Subject: basevideo: move from -base Also move schroedinger plugin. This creates a new library, gstbasevideo-0.10, which will probably be merged back into gstvideo-0.10 when this is moved back to -base. --- gst-libs/gst/Makefile.am | 4 +- gst-libs/gst/video/Makefile.am | 24 + gst-libs/gst/video/gstbasevideocodec.c | 565 ++++++++++++++ gst-libs/gst/video/gstbasevideocodec.h | 145 ++++ gst-libs/gst/video/gstbasevideodecoder.c | 1173 ++++++++++++++++++++++++++++++ gst-libs/gst/video/gstbasevideodecoder.h | 161 ++++ gst-libs/gst/video/gstbasevideoencoder.c | 502 +++++++++++++ gst-libs/gst/video/gstbasevideoencoder.h | 118 +++ gst-libs/gst/video/gstbasevideoparse.c | 870 ++++++++++++++++++++++ gst-libs/gst/video/gstbasevideoparse.h | 136 ++++ gst-libs/gst/video/gstbasevideoutils.c | 162 +++++ gst-libs/gst/video/gstbasevideoutils.h | 90 +++ 12 files changed, 3948 insertions(+), 2 deletions(-) create mode 100644 gst-libs/gst/video/Makefile.am create mode 100644 gst-libs/gst/video/gstbasevideocodec.c create mode 100644 gst-libs/gst/video/gstbasevideocodec.h create mode 100644 gst-libs/gst/video/gstbasevideodecoder.c create mode 100644 gst-libs/gst/video/gstbasevideodecoder.h create mode 100644 gst-libs/gst/video/gstbasevideoencoder.c create mode 100644 gst-libs/gst/video/gstbasevideoencoder.h create mode 100644 gst-libs/gst/video/gstbasevideoparse.c create mode 100644 gst-libs/gst/video/gstbasevideoparse.h create mode 100644 gst-libs/gst/video/gstbasevideoutils.c create mode 100644 gst-libs/gst/video/gstbasevideoutils.h (limited to 'gst-libs/gst') diff --git a/gst-libs/gst/Makefile.am b/gst-libs/gst/Makefile.am index 3471a759..b123a4c6 100644 --- a/gst-libs/gst/Makefile.am +++ b/gst-libs/gst/Makefile.am @@ -1,5 +1,5 @@ -SUBDIRS = interfaces signalprocessor +SUBDIRS = interfaces signalprocessor video noinst_HEADERS = gst-i18n-plugin.h gettext.h -DIST_SUBDIRS = dshow interfaces signalprocessor +DIST_SUBDIRS = dshow interfaces signalprocessor video diff --git a/gst-libs/gst/video/Makefile.am b/gst-libs/gst/video/Makefile.am new file mode 100644 index 00000000..2b7263b2 --- /dev/null +++ b/gst-libs/gst/video/Makefile.am @@ -0,0 +1,24 @@ + +lib_LTLIBRARIES = libgstbasevideo-@GST_MAJORMINOR@.la + +CLEANFILES = $(BUILT_SOURCES) + +libgstbasevideo_@GST_MAJORMINOR@_la_SOURCES = \ + gstbasevideoutils.c \ + gstbasevideocodec.c \ + gstbasevideodecoder.c \ + gstbasevideoencoder.c \ + gstbasevideoparse.c + +libgstbasevideo_@GST_MAJORMINOR@includedir = $(includedir)/gstreamer-@GST_MAJORMINOR@/gst/video +libgstbasevideo_@GST_MAJORMINOR@include_HEADERS = \ + gstbasevideoutils.h \ + gstbasevideocodec.h \ + gstbasevideodecoder.h \ + gstbasevideoencoder.h \ + gstbasevideoparse.h + +libgstbasevideo_@GST_MAJORMINOR@_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) +libgstbasevideo_@GST_MAJORMINOR@_la_LIBADD = $(GST_BASE_LIBS) $(GST_LIBS) +libgstbasevideo_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_ALL_LDFLAGS) $(GST_LT_LDFLAGS) + diff --git a/gst-libs/gst/video/gstbasevideocodec.c b/gst-libs/gst/video/gstbasevideocodec.c new file mode 100644 index 00000000..9cddc3bf --- /dev/null +++ b/gst-libs/gst/video/gstbasevideocodec.c @@ -0,0 +1,565 @@ +/* Schrodinger + * Copyright (C) 2006 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstbasevideocodec.h" + +#include +#include + +GST_DEBUG_CATEGORY (basevideo_debug); +#define GST_CAT_DEFAULT basevideo_debug + +/* GstBaseVideoCodec signals and args */ +enum +{ + LAST_SIGNAL +}; + +enum +{ + ARG_0 +}; + +static void gst_base_video_codec_finalize (GObject * object); + +static GstStateChangeReturn gst_base_video_codec_change_state (GstElement * + element, GstStateChange transition); + + +GST_BOILERPLATE (GstBaseVideoCodec, gst_base_video_codec, GstElement, + GST_TYPE_ELEMENT); + +static void +gst_base_video_codec_base_init (gpointer g_class) +{ + GST_DEBUG_CATEGORY_INIT (basevideo_debug, "basevideo", 0, + "Base Video Classes"); + +} + +static void +gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + + gobject_class = G_OBJECT_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); + + gobject_class->finalize = gst_base_video_codec_finalize; + + element_class->change_state = gst_base_video_codec_change_state; +} + +static void +gst_base_video_codec_init (GstBaseVideoCodec * base_video_codec, + GstBaseVideoCodecClass * klass) +{ + GstPadTemplate *pad_template; + + GST_DEBUG ("gst_base_video_codec_init"); + + pad_template = + gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink"); + g_return_if_fail (pad_template != NULL); + + base_video_codec->sinkpad = gst_pad_new_from_template (pad_template, "sink"); + gst_element_add_pad (GST_ELEMENT (base_video_codec), + base_video_codec->sinkpad); + + pad_template = + gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src"); + g_return_if_fail (pad_template != NULL); + + base_video_codec->srcpad = gst_pad_new_from_template (pad_template, "src"); + gst_pad_use_fixed_caps (base_video_codec->srcpad); + gst_element_add_pad (GST_ELEMENT (base_video_codec), + base_video_codec->srcpad); + + base_video_codec->input_adapter = gst_adapter_new (); + base_video_codec->output_adapter = gst_adapter_new (); + +} + +static void +gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec) +{ + GST_DEBUG ("reset"); + + base_video_codec->system_frame_number = 0; + + gst_adapter_clear (base_video_codec->input_adapter); + gst_adapter_clear (base_video_codec->output_adapter); + +} + +static void +gst_base_video_codec_finalize (GObject * object) +{ + GstBaseVideoCodec *base_video_codec; + + g_return_if_fail (GST_IS_BASE_VIDEO_CODEC (object)); + base_video_codec = GST_BASE_VIDEO_CODEC (object); + + if (base_video_codec->input_adapter) { + g_object_unref (base_video_codec->input_adapter); + } + if (base_video_codec->output_adapter) { + g_object_unref (base_video_codec->output_adapter); + } + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +#ifdef unused +static const GstQueryType * +gst_base_video_codec_get_query_types (GstPad * pad) +{ + static const GstQueryType query_types[] = { + GST_QUERY_POSITION, + GST_QUERY_DURATION, + GST_QUERY_CONVERT, + 0 + }; + + return query_types; +} +#endif + +#if 0 +static gboolean +gst_base_video_codec_src_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res; + GstBaseVideoCodec *dec; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + dec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); + + if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) { + if (dec->fps_d != 0) { + *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value), + dec->fps_d * GST_SECOND, dec->fps_n); + res = TRUE; + } else { + res = FALSE; + } + } else { + GST_WARNING ("unhandled conversion from %d to %d", src_format, + *dest_format); + res = FALSE; + } + + gst_object_unref (dec); + + return res; +} + +static gboolean +gst_base_video_codec_sink_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstBaseVideoCodec *dec; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + dec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); + + /* FIXME: check if we are in a decoding state */ + + switch (src_format) { + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_TIME: + *dest_value = gst_util_uint64_scale (src_value, + dec->fps_d * GST_SECOND, dec->fps_n); + break; + default: + res = FALSE; + } + break; + case GST_FORMAT_TIME: + switch (*dest_format) { + case GST_FORMAT_DEFAULT: + { + *dest_value = gst_util_uint64_scale (src_value, + dec->fps_n, dec->fps_d * GST_SECOND); + break; + } + default: + res = FALSE; + break; + } + break; + default: + res = FALSE; + break; + } + + gst_object_unref (dec); + + return res; +} +#endif + +#ifdef unused +static gboolean +gst_base_video_codec_src_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoCodec *base_codec; + gboolean res = FALSE; + + base_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_POSITION: + { + GstFormat format; + gint64 time; + gint64 value; + + gst_query_parse_position (query, &format, NULL); + + time = gst_util_uint64_scale (base_codec->system_frame_number, + base_codec->state.fps_n, base_codec->state.fps_d); + time += base_codec->state.segment.time; + GST_DEBUG ("query position %lld", time); + res = gst_base_video_encoded_video_convert (&base_codec->state, + GST_FORMAT_TIME, time, &format, &value); + if (!res) + goto error; + + gst_query_set_position (query, format, value); + break; + } + case GST_QUERY_DURATION: + res = gst_pad_query (GST_PAD_PEER (base_codec->sinkpad), query); + if (!res) + goto error; + break; + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + GST_DEBUG ("query convert"); + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = gst_base_video_encoded_video_convert (&base_codec->state, + src_fmt, src_val, &dest_fmt, &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } +done: + gst_object_unref (base_codec); + + return res; +error: + GST_DEBUG_OBJECT (base_codec, "query failed"); + goto done; +} +#endif + +#ifdef unused +static gboolean +gst_base_video_codec_sink_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoCodec *base_video_codec; + gboolean res = FALSE; + + base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = gst_base_video_encoded_video_convert (&base_video_codec->state, + src_fmt, src_val, &dest_fmt, &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } +done: + gst_object_unref (base_video_codec); + + return res; +error: + GST_DEBUG_OBJECT (base_video_codec, "query failed"); + goto done; +} +#endif + +#ifdef unused +static gboolean +gst_base_video_codec_src_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoCodec *base_video_codec; + gboolean res = FALSE; + + base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_SEEK: + { + GstFormat format, tformat; + gdouble rate; + GstEvent *real_seek; + GstSeekFlags flags; + GstSeekType cur_type, stop_type; + gint64 cur, stop; + gint64 tcur, tstop; + + gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, + &cur, &stop_type, &stop); + gst_event_unref (event); + + tformat = GST_FORMAT_TIME; + res = gst_base_video_encoded_video_convert (&base_video_codec->state, + format, cur, &tformat, &tcur); + if (!res) + goto convert_error; + res = gst_base_video_encoded_video_convert (&base_video_codec->state, + format, stop, &tformat, &tstop); + if (!res) + goto convert_error; + + real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME, + flags, cur_type, tcur, stop_type, tstop); + + res = gst_pad_push_event (base_video_codec->sinkpad, real_seek); + + break; + } +#if 0 + case GST_EVENT_QOS: + { + gdouble proportion; + GstClockTimeDiff diff; + GstClockTime timestamp; + + gst_event_parse_qos (event, &proportion, &diff, ×tamp); + + GST_OBJECT_LOCK (base_video_codec); + base_video_codec->proportion = proportion; + base_video_codec->earliest_time = timestamp + diff; + GST_OBJECT_UNLOCK (base_video_codec); + + GST_DEBUG_OBJECT (base_video_codec, + "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT, + GST_TIME_ARGS (timestamp), diff); + + res = gst_pad_push_event (base_video_codec->sinkpad, event); + break; + } +#endif + default: + res = gst_pad_push_event (base_video_codec->sinkpad, event); + break; + } +done: + gst_object_unref (base_video_codec); + return res; + +convert_error: + GST_DEBUG_OBJECT (base_video_codec, "could not convert format"); + goto done; +} +#endif + +#ifdef unused +static gboolean +gst_base_video_codec_sink_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoCodec *base_video_codec; + gboolean ret = FALSE; + + base_video_codec = GST_BASE_VIDEO_CODEC (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_FLUSH_START: + ret = gst_pad_push_event (base_video_codec->srcpad, event); + break; + case GST_EVENT_FLUSH_STOP: + gst_base_video_codec_reset (base_video_codec); + ret = gst_pad_push_event (base_video_codec->srcpad, event); + break; + case GST_EVENT_EOS: + if (gst_base_video_codec_push_all (base_video_codec, + FALSE) == GST_FLOW_ERROR) { + gst_event_unref (event); + return FALSE; + } + + ret = gst_pad_push_event (base_video_codec->srcpad, event); + break; + case GST_EVENT_NEWSEGMENT: + { + gboolean update; + GstFormat format; + gdouble rate; + gint64 start, stop, time; + + gst_event_parse_new_segment (event, &update, &rate, &format, &start, + &stop, &time); + + if (format != GST_FORMAT_TIME) + goto newseg_wrong_format; + + if (rate <= 0.0) + goto newseg_wrong_rate; + + GST_DEBUG ("newsegment %lld %lld", start, time); + gst_segment_set_newsegment (&base_video_codec->state.segment, update, + rate, format, start, stop, time); + + ret = gst_pad_push_event (base_video_codec->srcpad, event); + break; + } + default: + ret = gst_pad_push_event (base_video_codec->srcpad, event); + break; + } +done: + gst_object_unref (base_video_codec); + return ret; + +newseg_wrong_format: + GST_DEBUG_OBJECT (base_video_codec, "received non TIME newsegment"); + gst_event_unref (event); + goto done; + +newseg_wrong_rate: + GST_DEBUG_OBJECT (base_video_codec, "negative rates not supported"); + gst_event_unref (event); + goto done; +} +#endif + + +static GstStateChangeReturn +gst_base_video_codec_change_state (GstElement * element, + GstStateChange transition) +{ + GstBaseVideoCodec *base_video_codec = GST_BASE_VIDEO_CODEC (element); + GstStateChangeReturn ret; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + gst_base_video_codec_reset (base_video_codec); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + break; + default: + break; + } + + ret = parent_class->change_state (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_base_video_codec_reset (base_video_codec); + break; + case GST_STATE_CHANGE_READY_TO_NULL: + break; + default: + break; + } + + return ret; +} + +#if 0 +guint64 +gst_base_video_codec_get_timestamp (GstBaseVideoCodec * base_video_codec, + int picture_number) +{ + if (picture_number < 0) { + return base_video_codec->timestamp_offset - + (gint64) gst_util_uint64_scale (-picture_number, + base_video_codec->state.fps_d * GST_SECOND, + base_video_codec->state.fps_n); + } else { + return base_video_codec->timestamp_offset + + gst_util_uint64_scale (picture_number, + base_video_codec->state.fps_d * GST_SECOND, + base_video_codec->state.fps_n); + } +} +#endif + +GstVideoFrame * +gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec) +{ + GstVideoFrame *frame; + + frame = g_malloc0 (sizeof (GstVideoFrame)); + + frame->system_frame_number = base_video_codec->system_frame_number; + base_video_codec->system_frame_number++; + + return frame; +} + +void +gst_base_video_codec_free_frame (GstVideoFrame * frame) +{ + if (frame->sink_buffer) { + gst_buffer_unref (frame->sink_buffer); + } +#if 0 + if (frame->src_buffer) { + gst_buffer_unref (frame->src_buffer); + } +#endif + + g_free (frame); +} diff --git a/gst-libs/gst/video/gstbasevideocodec.h b/gst-libs/gst/video/gstbasevideocodec.h new file mode 100644 index 00000000..f4a1cf95 --- /dev/null +++ b/gst-libs/gst/video/gstbasevideocodec.h @@ -0,0 +1,145 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_CODEC_H_ +#define _GST_BASE_VIDEO_CODEC_H_ + +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_BASE_VIDEO_CODEC \ + (gst_base_video_codec_get_type()) +#define GST_BASE_VIDEO_CODEC(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodec)) +#define GST_BASE_VIDEO_CODEC_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodecClass)) +#define GST_BASE_VIDEO_CODEC_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodecClass)) +#define GST_IS_BASE_VIDEO_CODEC(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_CODEC)) +#define GST_IS_BASE_VIDEO_CODEC_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_CODEC)) + +/** + * GST_BASE_VIDEO_CODEC_SINK_NAME: + * + * The name of the templates for the sink pad. + */ +#define GST_BASE_VIDEO_CODEC_SINK_NAME "sink" +/** + * GST_BASE_VIDEO_CODEC_SRC_NAME: + * + * The name of the templates for the source pad. + */ +#define GST_BASE_VIDEO_CODEC_SRC_NAME "src" + +/** + * GST_BASE_VIDEO_CODEC_SRC_PAD: + * @obj: base video codec instance + * + * Gives the pointer to the source #GstPad object of the element. + */ +#define GST_BASE_VIDEO_CODEC_SRC_PAD(obj) (((GstBaseVideoCodec *) (obj))->srcpad) + +/** + * GST_BASE_VIDEO_CODEC_SINK_PAD: + * @obj: base video codec instance + * + * Gives the pointer to the sink #GstPad object of the element. + */ +#define GST_BASE_VIDEO_CODEC_SINK_PAD(obj) (((GstBaseVideoCodec *) (obj))->sinkpad) + +/** + * GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA: + * + */ +#define GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS + +typedef struct _GstBaseVideoCodec GstBaseVideoCodec; +typedef struct _GstBaseVideoCodecClass GstBaseVideoCodecClass; + +struct _GstBaseVideoCodec +{ + GstElement element; + + /*< private >*/ + GstPad *sinkpad; + GstPad *srcpad; + GstAdapter *input_adapter; + GstAdapter *output_adapter; + +#if 0 + /* FIXME need to move from subclasses */ + GstVideoState state; +#endif + + //int reorder_depth; + + //gboolean have_sync; + //gboolean discont; + //gboolean started; + + //GstVideoFrame *current_frame; + //int distance_from_sync; + + //gboolean sink_clipping; + + //guint64 presentation_frame_number; + guint64 system_frame_number; + + //GstCaps *caps; + //gboolean set_output_caps; + + //GstClockTime buffer_timestamp; + + GstClockTime timestamp_offset; +}; + +struct _GstBaseVideoCodecClass +{ + GstElementClass element_class; + + gboolean (*start) (GstBaseVideoCodec *codec); + gboolean (*stop) (GstBaseVideoCodec *codec); + gboolean (*reset) (GstBaseVideoCodec *codec); + GstFlowReturn (*parse_data) (GstBaseVideoCodec *codec, gboolean at_eos); + int (*scan_for_sync) (GstAdapter *adapter, gboolean at_eos, + int offset, int n); + GstFlowReturn (*shape_output) (GstBaseVideoCodec *codec, GstVideoFrame *frame); + GstCaps *(*get_caps) (GstBaseVideoCodec *codec); + +}; + +GType gst_base_video_codec_get_type (void); + +#if 0 +guint64 gst_base_video_codec_get_timestamp (GstBaseVideoCodec *codec, + int picture_number); +#endif + +GstVideoFrame * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec); +void gst_base_video_codec_free_frame (GstVideoFrame *frame); + + +G_END_DECLS + +#endif + diff --git a/gst-libs/gst/video/gstbasevideodecoder.c b/gst-libs/gst/video/gstbasevideodecoder.c new file mode 100644 index 00000000..9aa79eee --- /dev/null +++ b/gst-libs/gst/video/gstbasevideodecoder.c @@ -0,0 +1,1173 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstbasevideodecoder.h" + +#include + +GST_DEBUG_CATEGORY_EXTERN (basevideo_debug); +#define GST_CAT_DEFAULT basevideo_debug + +static void gst_base_video_decoder_finalize (GObject * object); + +static gboolean gst_base_video_decoder_sink_setcaps (GstPad * pad, + GstCaps * caps); +static gboolean gst_base_video_decoder_sink_event (GstPad * pad, + GstEvent * event); +static gboolean gst_base_video_decoder_src_event (GstPad * pad, + GstEvent * event); +static GstFlowReturn gst_base_video_decoder_chain (GstPad * pad, + GstBuffer * buf); +static gboolean gst_base_video_decoder_sink_query (GstPad * pad, + GstQuery * query); +static GstStateChangeReturn gst_base_video_decoder_change_state (GstElement * + element, GstStateChange transition); +static const GstQueryType *gst_base_video_decoder_get_query_types (GstPad * + pad); +static gboolean gst_base_video_decoder_src_query (GstPad * pad, + GstQuery * query); +static gboolean gst_base_video_decoder_src_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, GstFormat * dest_format, + gint64 * dest_value); +static void gst_base_video_decoder_reset (GstBaseVideoDecoder * + base_video_decoder); + +static guint64 +gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder, + int picture_number); +static guint64 +gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder * + base_video_decoder, int field_offset); +static GstVideoFrame *gst_base_video_decoder_new_frame (GstBaseVideoDecoder * + base_video_decoder); +static void gst_base_video_decoder_free_frame (GstVideoFrame * frame); + +GST_BOILERPLATE (GstBaseVideoDecoder, gst_base_video_decoder, + GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC); + +static void +gst_base_video_decoder_base_init (gpointer g_class) +{ + +} + +static void +gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = G_OBJECT_CLASS (klass); + gstelement_class = GST_ELEMENT_CLASS (klass); + + gobject_class->finalize = gst_base_video_decoder_finalize; + + gstelement_class->change_state = gst_base_video_decoder_change_state; + + parent_class = g_type_class_peek_parent (klass); +} + +static void +gst_base_video_decoder_init (GstBaseVideoDecoder * base_video_decoder, + GstBaseVideoDecoderClass * klass) +{ + GstPad *pad; + + GST_DEBUG ("gst_base_video_decoder_init"); + + pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_decoder); + + gst_pad_set_chain_function (pad, gst_base_video_decoder_chain); + gst_pad_set_event_function (pad, gst_base_video_decoder_sink_event); + gst_pad_set_setcaps_function (pad, gst_base_video_decoder_sink_setcaps); + gst_pad_set_query_function (pad, gst_base_video_decoder_sink_query); + + pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder); + + gst_pad_set_event_function (pad, gst_base_video_decoder_src_event); + gst_pad_set_query_type_function (pad, gst_base_video_decoder_get_query_types); + gst_pad_set_query_function (pad, gst_base_video_decoder_src_query); + + base_video_decoder->input_adapter = gst_adapter_new (); + base_video_decoder->output_adapter = gst_adapter_new (); + + gst_segment_init (&base_video_decoder->state.segment, GST_FORMAT_TIME); + gst_base_video_decoder_reset (base_video_decoder); + + base_video_decoder->current_frame = + gst_base_video_decoder_new_frame (base_video_decoder); + + base_video_decoder->sink_clipping = TRUE; +} + +static gboolean +gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + GstStructure *structure; + const GValue *codec_data; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG ("setcaps %" GST_PTR_FORMAT, caps); + + if (base_video_decoder->codec_data) { + gst_buffer_unref (base_video_decoder->codec_data); + base_video_decoder->codec_data = NULL; + } + + structure = gst_caps_get_structure (caps, 0); + + codec_data = gst_structure_get_value (structure, "codec_data"); + if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) { + base_video_decoder->codec_data = gst_value_get_buffer (codec_data); + } + + if (base_video_decoder_class->start) { + base_video_decoder_class->start (base_video_decoder); + } + + g_object_unref (base_video_decoder); + + return TRUE; +} + +static void +gst_base_video_decoder_finalize (GObject * object) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + + g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (object)); + base_video_decoder = GST_BASE_VIDEO_DECODER (object); + base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (object); + + gst_base_video_decoder_reset (base_video_decoder); + + GST_DEBUG_OBJECT (object, "finalize"); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static gboolean +gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + gboolean ret = FALSE; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_EOS: + { + GstVideoFrame *frame; + + frame = g_malloc0 (sizeof (GstVideoFrame)); + frame->presentation_frame_number = + base_video_decoder->presentation_frame_number; + frame->presentation_duration = 0; + base_video_decoder->presentation_frame_number++; + + base_video_decoder->frames = + g_list_append (base_video_decoder->frames, frame); + if (base_video_decoder_class->finish) { + base_video_decoder_class->finish (base_video_decoder, frame); + } + + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), + event); + } + break; + case GST_EVENT_NEWSEGMENT: + { + gboolean update; + double rate; + double applied_rate; + GstFormat format; + gint64 start; + gint64 stop; + gint64 position; + + gst_event_parse_new_segment_full (event, &update, &rate, + &applied_rate, &format, &start, &stop, &position); + + if (format != GST_FORMAT_TIME) + goto newseg_wrong_format; + + GST_DEBUG ("new segment %lld %lld", start, position); + + gst_segment_set_newsegment_full (&base_video_decoder->state.segment, + update, rate, applied_rate, format, start, stop, position); + + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), + event); + } + break; + default: + /* FIXME this changes the order of events */ + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), + event); + break; + } + +done: + gst_object_unref (base_video_decoder); + return ret; + +newseg_wrong_format: + { + GST_DEBUG_OBJECT (base_video_decoder, "received non TIME newsegment"); + gst_event_unref (event); + goto done; + } +} + +static gboolean +gst_base_video_decoder_src_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoDecoder *base_video_decoder; + gboolean res = FALSE; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_SEEK: + { + GstFormat format, tformat; + gdouble rate; + GstEvent *real_seek; + GstSeekFlags flags; + GstSeekType cur_type, stop_type; + gint64 cur, stop; + gint64 tcur, tstop; + + gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, + &cur, &stop_type, &stop); + gst_event_unref (event); + + tformat = GST_FORMAT_TIME; + res = + gst_base_video_decoder_src_convert (pad, format, cur, &tformat, + &tcur); + if (!res) + goto convert_error; + res = + gst_base_video_decoder_src_convert (pad, format, stop, &tformat, + &tstop); + if (!res) + goto convert_error; + + real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME, + flags, cur_type, tcur, stop_type, tstop); + + res = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD + (base_video_decoder), real_seek); + + break; + } + case GST_EVENT_QOS: + { + gdouble proportion; + GstClockTimeDiff diff; + GstClockTime timestamp; + + gst_event_parse_qos (event, &proportion, &diff, ×tamp); + + GST_OBJECT_LOCK (base_video_decoder); + base_video_decoder->proportion = proportion; + base_video_decoder->earliest_time = timestamp + diff; + GST_OBJECT_UNLOCK (base_video_decoder); + + GST_DEBUG_OBJECT (base_video_decoder, + "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g", + GST_TIME_ARGS (timestamp), diff, proportion); + + res = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD + (base_video_decoder), event); + break; + } + default: + res = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD + (base_video_decoder), event); + break; + } +done: + gst_object_unref (base_video_decoder); + return res; + +convert_error: + GST_DEBUG_OBJECT (base_video_decoder, "could not convert format"); + goto done; +} + + +#if 0 +static gboolean +gst_base_video_decoder_sink_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstBaseVideoDecoder *enc; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + /* FIXME: check if we are in a decoding state */ + + switch (src_format) { + case GST_FORMAT_BYTES: + switch (*dest_format) { +#if 0 + case GST_FORMAT_DEFAULT: + *dest_value = gst_util_uint64_scale_int (src_value, 1, + enc->bytes_per_picture); + break; +#endif + case GST_FORMAT_TIME: + /* seems like a rather silly conversion, implement me if you like */ + default: + res = FALSE; + } + break; + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_TIME: + *dest_value = gst_util_uint64_scale (src_value, + GST_SECOND * enc->fps_d, enc->fps_n); + break; +#if 0 + case GST_FORMAT_BYTES: + *dest_value = gst_util_uint64_scale_int (src_value, + enc->bytes_per_picture, 1); + break; +#endif + default: + res = FALSE; + } + break; + default: + res = FALSE; + break; + } +} +#endif + +static gboolean +gst_base_video_decoder_src_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstBaseVideoDecoder *enc; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + /* FIXME: check if we are in a encoding state */ + + GST_DEBUG ("src convert"); + switch (src_format) { +#if 0 + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_TIME: + *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value), + enc->fps_d * GST_SECOND, enc->fps_n); + break; + default: + res = FALSE; + } + break; + case GST_FORMAT_TIME: + switch (*dest_format) { + case GST_FORMAT_DEFAULT: + { + *dest_value = gst_util_uint64_scale (src_value, + enc->fps_n, enc->fps_d * GST_SECOND); + break; + } + default: + res = FALSE; + break; + } + break; +#endif + default: + res = FALSE; + break; + } + + gst_object_unref (enc); + + return res; +} + +static const GstQueryType * +gst_base_video_decoder_get_query_types (GstPad * pad) +{ + static const GstQueryType query_types[] = { + GST_QUERY_CONVERT, + 0 + }; + + return query_types; +} + +static gboolean +gst_base_video_decoder_src_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoDecoder *enc; + gboolean res; + + enc = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + switch GST_QUERY_TYPE + (query) { + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = + gst_base_video_decoder_src_convert (pad, src_fmt, src_val, &dest_fmt, + &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + } + gst_object_unref (enc); + return res; + +error: + GST_DEBUG_OBJECT (enc, "query failed"); + gst_object_unref (enc); + return res; +} + +static gboolean +gst_base_video_decoder_sink_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoDecoder *base_video_decoder; + gboolean res = FALSE; + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + + GST_DEBUG_OBJECT (base_video_decoder, "sink query fps=%d/%d", + base_video_decoder->state.fps_n, base_video_decoder->state.fps_d); + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = gst_base_video_rawvideo_convert (&base_video_decoder->state, + src_fmt, src_val, &dest_fmt, &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } +done: + gst_object_unref (base_video_decoder); + + return res; +error: + GST_DEBUG_OBJECT (base_video_decoder, "query failed"); + goto done; +} + + +#if 0 +static gboolean +gst_pad_is_negotiated (GstPad * pad) +{ + GstCaps *caps; + + g_return_val_if_fail (pad != NULL, FALSE); + + caps = gst_pad_get_negotiated_caps (pad); + if (caps) { + gst_caps_unref (caps); + return TRUE; + } + + return FALSE; +} +#endif + +static void +gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + GList *g; + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG ("reset"); + + base_video_decoder->started = FALSE; + + base_video_decoder->discont = TRUE; + base_video_decoder->have_sync = FALSE; + + base_video_decoder->timestamp_offset = GST_CLOCK_TIME_NONE; + base_video_decoder->system_frame_number = 0; + base_video_decoder->presentation_frame_number = 0; + base_video_decoder->last_sink_timestamp = GST_CLOCK_TIME_NONE; + base_video_decoder->last_sink_offset_end = GST_CLOCK_TIME_NONE; + base_video_decoder->base_picture_number = 0; + base_video_decoder->last_timestamp = GST_CLOCK_TIME_NONE; + + base_video_decoder->offset = 0; + + if (base_video_decoder->caps) { + gst_caps_unref (base_video_decoder->caps); + base_video_decoder->caps = NULL; + } + + if (base_video_decoder->current_frame) { + gst_base_video_decoder_free_frame (base_video_decoder->current_frame); + base_video_decoder->current_frame = NULL; + } + + base_video_decoder->have_src_caps = FALSE; + + for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) { + GstVideoFrame *frame = g->data; + gst_base_video_decoder_free_frame (frame); + } + g_list_free (base_video_decoder->frames); + base_video_decoder->frames = NULL; + + if (base_video_decoder_class->reset) { + base_video_decoder_class->reset (base_video_decoder); + } +} + +static GstBuffer * +gst_adapter_get_buffer (GstAdapter * adapter) +{ + return gst_buffer_ref (GST_BUFFER (adapter->buflist->data)); + +} + +static GstFlowReturn +gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *klass; + GstBuffer *buffer; + GstFlowReturn ret; + + GST_DEBUG ("chain %lld", GST_BUFFER_TIMESTAMP (buf)); + +#if 0 + /* requiring the pad to be negotiated makes it impossible to use + * oggdemux or filesrc ! decoder */ + if (!gst_pad_is_negotiated (pad)) { + GST_DEBUG ("not negotiated"); + return GST_FLOW_NOT_NEGOTIATED; + } +#endif + + base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad)); + klass = GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG_OBJECT (base_video_decoder, "chain"); + + if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { + GST_DEBUG_OBJECT (base_video_decoder, "received DISCONT buffer"); + if (base_video_decoder->started) { + gst_base_video_decoder_reset (base_video_decoder); + } + } + + if (!base_video_decoder->started) { + klass->start (base_video_decoder); + base_video_decoder->started = TRUE; + } + + if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { + GST_DEBUG ("timestamp %lld offset %lld", GST_BUFFER_TIMESTAMP (buf), + base_video_decoder->offset); + base_video_decoder->last_sink_timestamp = GST_BUFFER_TIMESTAMP (buf); + } + if (GST_BUFFER_OFFSET_END (buf) != -1) { + GST_DEBUG ("gp %lld", GST_BUFFER_OFFSET_END (buf)); + base_video_decoder->last_sink_offset_end = GST_BUFFER_OFFSET_END (buf); + } + base_video_decoder->offset += GST_BUFFER_SIZE (buf); + +#if 0 + if (base_video_decoder->timestamp_offset == GST_CLOCK_TIME_NONE && + GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { + GST_DEBUG ("got new offset %lld", GST_BUFFER_TIMESTAMP (buf)); + base_video_decoder->timestamp_offset = GST_BUFFER_TIMESTAMP (buf); + } +#endif + + if (base_video_decoder->current_frame == NULL) { + base_video_decoder->current_frame = + gst_base_video_decoder_new_frame (base_video_decoder); + } + + gst_adapter_push (base_video_decoder->input_adapter, buf); + + if (!base_video_decoder->have_sync) { + int n, m; + + GST_DEBUG ("no sync, scanning"); + + n = gst_adapter_available (base_video_decoder->input_adapter); + m = klass->scan_for_sync (base_video_decoder, FALSE, 0, n); + + if (m >= n) { + g_warning ("subclass scanned past end %d >= %d", m, n); + } + + gst_adapter_flush (base_video_decoder->input_adapter, m); + + if (m < n) { + GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); + + /* this is only "maybe" sync */ + base_video_decoder->have_sync = TRUE; + } + + if (!base_video_decoder->have_sync) { + gst_object_unref (base_video_decoder); + return GST_FLOW_OK; + } + } + + /* FIXME: use gst_adapter_prev_timestamp() here instead? */ + buffer = gst_adapter_get_buffer (base_video_decoder->input_adapter); + + base_video_decoder->buffer_timestamp = GST_BUFFER_TIMESTAMP (buffer); + gst_buffer_unref (buffer); + + do { + ret = klass->parse_data (base_video_decoder, FALSE); + } while (ret == GST_FLOW_OK); + + if (ret == GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA) { + gst_object_unref (base_video_decoder); + return GST_FLOW_OK; + } + + gst_object_unref (base_video_decoder); + return ret; +} + +static GstStateChangeReturn +gst_base_video_decoder_change_state (GstElement * element, + GstStateChange transition) +{ + GstBaseVideoDecoder *base_video_decoder; + GstBaseVideoDecoderClass *base_video_decoder_class; + GstStateChangeReturn ret; + + base_video_decoder = GST_BASE_VIDEO_DECODER (element); + base_video_decoder_class = GST_BASE_VIDEO_DECODER_GET_CLASS (element); + + switch (transition) { + default: + break; + } + + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_PAUSED_TO_READY: + if (base_video_decoder_class->stop) { + base_video_decoder_class->stop (base_video_decoder); + } + break; + default: + break; + } + + return ret; +} + +static void +gst_base_video_decoder_free_frame (GstVideoFrame * frame) +{ + g_return_if_fail (frame != NULL); + + if (frame->sink_buffer) { + gst_buffer_unref (frame->sink_buffer); + } +#if 0 + if (frame->src_buffer) { + gst_buffer_unref (frame->src_buffer); + } +#endif + + g_free (frame); +} + +static GstVideoFrame * +gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder) +{ + GstVideoFrame *frame; + + frame = g_malloc0 (sizeof (GstVideoFrame)); + + frame->system_frame_number = base_video_decoder->system_frame_number; + base_video_decoder->system_frame_number++; + + frame->decode_frame_number = frame->system_frame_number - + base_video_decoder->reorder_depth; + + frame->decode_timestamp = -1; + frame->presentation_timestamp = -1; + frame->presentation_duration = -1; + frame->n_fields = 2; + + return frame; +} + +GstFlowReturn +gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder, + GstVideoFrame * frame) +{ + GstBaseVideoDecoderClass *base_video_decoder_class; + GstBuffer *src_buffer; + + GST_DEBUG ("finish frame"); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + GST_DEBUG ("finish frame sync=%d pts=%lld", frame->is_sync_point, + frame->presentation_timestamp); + + if (frame->is_sync_point) { + if (GST_CLOCK_TIME_IS_VALID (frame->presentation_timestamp)) { + if (frame->presentation_timestamp != base_video_decoder->timestamp_offset) { + GST_DEBUG ("sync timestamp %lld diff %lld", + frame->presentation_timestamp, + frame->presentation_timestamp - + base_video_decoder->state.segment.start); + base_video_decoder->timestamp_offset = frame->presentation_timestamp; + base_video_decoder->field_index = 0; + } else { + /* This case is for one initial timestamp and no others, e.g., + * filesrc ! decoder ! xvimagesink */ + GST_WARNING ("sync timestamp didn't change, ignoring"); + frame->presentation_timestamp = GST_CLOCK_TIME_NONE; + } + } else { + GST_WARNING ("sync point doesn't have timestamp"); + if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->timestamp_offset)) { + GST_ERROR ("No base timestamp. Assuming frames start at 0"); + base_video_decoder->timestamp_offset = 0; + base_video_decoder->field_index = 0; + } + } + } + frame->field_index = base_video_decoder->field_index; + base_video_decoder->field_index += frame->n_fields; + + if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { + frame->presentation_timestamp = + gst_base_video_decoder_get_field_timestamp (base_video_decoder, + frame->field_index); + frame->presentation_duration = GST_CLOCK_TIME_NONE; + frame->decode_timestamp = + gst_base_video_decoder_get_timestamp (base_video_decoder, + frame->decode_frame_number); + } + if (frame->presentation_duration == GST_CLOCK_TIME_NONE) { + frame->presentation_duration = + gst_base_video_decoder_get_field_timestamp (base_video_decoder, + frame->field_index + frame->n_fields) - frame->presentation_timestamp; + } + + if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_timestamp)) { + if (frame->presentation_timestamp < base_video_decoder->last_timestamp) { + GST_WARNING ("decreasing timestamp (%lld < %lld)", + frame->presentation_timestamp, base_video_decoder->last_timestamp); + } + } + base_video_decoder->last_timestamp = frame->presentation_timestamp; + + GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); + if (base_video_decoder->state.interlaced) { +#ifndef GST_VIDEO_BUFFER_TFF +#define GST_VIDEO_BUFFER_TFF (GST_MINI_OBJECT_FLAG_LAST << 5) +#endif +#ifndef GST_VIDEO_BUFFER_RFF +#define GST_VIDEO_BUFFER_RFF (GST_MINI_OBJECT_FLAG_LAST << 6) +#endif +#ifndef GST_VIDEO_BUFFER_ONEFIELD +#define GST_VIDEO_BUFFER_ONEFIELD (GST_MINI_OBJECT_FLAG_LAST << 7) +#endif + int tff = base_video_decoder->state.top_field_first; + + if (frame->field_index & 1) { + tff ^= 1; + } + if (tff) { + GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_TFF); + } else { + GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_TFF); + } + GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_RFF); + GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD); + if (frame->n_fields == 3) { + GST_BUFFER_FLAG_SET (frame->src_buffer, GST_VIDEO_BUFFER_RFF); + } else if (frame->n_fields == 1) { + GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_VIDEO_BUFFER_ONEFIELD); + } + } + + GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp; + GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration; + GST_BUFFER_OFFSET (frame->src_buffer) = -1; + GST_BUFFER_OFFSET_END (frame->src_buffer) = -1; + + GST_DEBUG ("pushing frame %lld", frame->presentation_timestamp); + + base_video_decoder->frames = + g_list_remove (base_video_decoder->frames, frame); + + gst_base_video_decoder_set_src_caps (base_video_decoder); + + src_buffer = frame->src_buffer; + frame->src_buffer = NULL; + + gst_base_video_decoder_free_frame (frame); + + if (base_video_decoder->sink_clipping) { + gint64 start = GST_BUFFER_TIMESTAMP (src_buffer); + gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) + + GST_BUFFER_DURATION (src_buffer); + + if (gst_segment_clip (&base_video_decoder->state.segment, GST_FORMAT_TIME, + start, stop, &start, &stop)) { + GST_BUFFER_TIMESTAMP (src_buffer) = start; + GST_BUFFER_DURATION (src_buffer) = stop - start; + } else { + GST_DEBUG ("dropping buffer outside segment"); + gst_buffer_unref (src_buffer); + return GST_FLOW_OK; + } + } + + return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), + src_buffer); +} + +int +gst_base_video_decoder_get_height (GstBaseVideoDecoder * base_video_decoder) +{ + return base_video_decoder->state.height; +} + +int +gst_base_video_decoder_get_width (GstBaseVideoDecoder * base_video_decoder) +{ + return base_video_decoder->state.width; +} + +GstFlowReturn +gst_base_video_decoder_end_of_stream (GstBaseVideoDecoder * base_video_decoder, + GstBuffer * buffer) +{ + + if (base_video_decoder->frames) { + GST_DEBUG ("EOS with frames left over"); + } + + return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), + buffer); +} + +void +gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder * base_video_decoder, + int n_bytes) +{ + GstBuffer *buf; + + GST_DEBUG ("add to frame"); + +#if 0 + if (gst_adapter_available (base_video_decoder->output_adapter) == 0) { + GstBuffer *buffer; + + buffer = + gst_adapter_get_orig_buffer_at_offset + (base_video_decoder->input_adapter, 0); + if (buffer) { + base_video_decoder->current_frame->presentation_timestamp = + GST_BUFFER_TIMESTAMP (buffer); + gst_buffer_unref (buffer); + } + } +#endif + + if (n_bytes == 0) + return; + + buf = gst_adapter_take_buffer (base_video_decoder->input_adapter, n_bytes); + + gst_adapter_push (base_video_decoder->output_adapter, buf); +} + +static guint64 +gst_base_video_decoder_get_timestamp (GstBaseVideoDecoder * base_video_decoder, + int picture_number) +{ + if (base_video_decoder->state.fps_d == 0) { + return -1; + } + if (picture_number < base_video_decoder->base_picture_number) { + return base_video_decoder->timestamp_offset - + (gint64) gst_util_uint64_scale (base_video_decoder->base_picture_number + - picture_number, base_video_decoder->state.fps_d * GST_SECOND, + base_video_decoder->state.fps_n); + } else { + return base_video_decoder->timestamp_offset + + gst_util_uint64_scale (picture_number - + base_video_decoder->base_picture_number, + base_video_decoder->state.fps_d * GST_SECOND, + base_video_decoder->state.fps_n); + } +} + +static guint64 +gst_base_video_decoder_get_field_timestamp (GstBaseVideoDecoder * + base_video_decoder, int field_offset) +{ + if (base_video_decoder->state.fps_d == 0) { + return GST_CLOCK_TIME_NONE; + } + if (field_offset < 0) { + GST_WARNING ("field offset < 0"); + return GST_CLOCK_TIME_NONE; + } + return base_video_decoder->timestamp_offset + + gst_util_uint64_scale (field_offset, + base_video_decoder->state.fps_d * GST_SECOND, + base_video_decoder->state.fps_n * 2); +} + + +GstFlowReturn +gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder) +{ + GstVideoFrame *frame = base_video_decoder->current_frame; + GstBuffer *buffer; + GstBaseVideoDecoderClass *base_video_decoder_class; + GstFlowReturn ret = GST_FLOW_OK; + int n_available; + + GST_DEBUG ("have_frame"); + + base_video_decoder_class = + GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder); + + n_available = gst_adapter_available (base_video_decoder->output_adapter); + if (n_available) { + buffer = gst_adapter_take_buffer (base_video_decoder->output_adapter, + n_available); + } else { + buffer = gst_buffer_new_and_alloc (0); + } + + frame->distance_from_sync = base_video_decoder->distance_from_sync; + base_video_decoder->distance_from_sync++; + +#if 0 + if (frame->presentation_timestamp == GST_CLOCK_TIME_NONE) { + frame->presentation_timestamp = + gst_base_video_decoder_get_timestamp (base_video_decoder, + frame->presentation_frame_number); + frame->presentation_duration = + gst_base_video_decoder_get_timestamp (base_video_decoder, + frame->presentation_frame_number + 1) - frame->presentation_timestamp; + frame->decode_timestamp = + gst_base_video_decoder_get_timestamp (base_video_decoder, + frame->decode_frame_number); + } +#endif + +#if 0 + GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp; + GST_BUFFER_DURATION (buffer) = frame->presentation_duration; + if (frame->decode_frame_number < 0) { + GST_BUFFER_OFFSET (buffer) = 0; + } else { + GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp; + } + GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE; +#endif + + GST_DEBUG ("pts %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp)); + GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp)); + GST_DEBUG ("dist %d", frame->distance_from_sync); + + if (frame->is_sync_point) { + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } else { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } + if (base_video_decoder->discont) { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); + base_video_decoder->discont = FALSE; + } + + frame->sink_buffer = buffer; + + base_video_decoder->frames = g_list_append (base_video_decoder->frames, + frame); + + /* do something with frame */ + ret = base_video_decoder_class->handle_frame (base_video_decoder, frame); + if (!GST_FLOW_IS_SUCCESS (ret)) { + GST_DEBUG ("flow error!"); + } + + /* create new frame */ + base_video_decoder->current_frame = + gst_base_video_decoder_new_frame (base_video_decoder); + + return ret; +} + +GstVideoState * +gst_base_video_decoder_get_state (GstBaseVideoDecoder * base_video_decoder) +{ + return &base_video_decoder->state; + +} + +void +gst_base_video_decoder_set_state (GstBaseVideoDecoder * base_video_decoder, + GstVideoState * state) +{ + memcpy (&base_video_decoder->state, state, sizeof (*state)); + +} + +void +gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder) +{ + g_return_if_fail (GST_IS_BASE_VIDEO_DECODER (base_video_decoder)); + + GST_DEBUG ("lost_sync"); + + if (gst_adapter_available (base_video_decoder->input_adapter) >= 1) { + gst_adapter_flush (base_video_decoder->input_adapter, 1); + } + + base_video_decoder->have_sync = FALSE; +} + +void +gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder * base_video_decoder) +{ + GST_DEBUG ("set_sync_point"); + + base_video_decoder->current_frame->is_sync_point = TRUE; + base_video_decoder->distance_from_sync = 0; + + base_video_decoder->current_frame->presentation_timestamp = + base_video_decoder->last_sink_timestamp; + + +} + +GstVideoFrame * +gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder, + int frame_number) +{ + GList *g; + + for (g = g_list_first (base_video_decoder->frames); g; g = g_list_next (g)) { + GstVideoFrame *frame = g->data; + + if (frame->system_frame_number == frame_number) { + return frame; + } + } + + return NULL; +} + +void +gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder) +{ + GstCaps *caps; + GstVideoState *state = &base_video_decoder->state; + + if (base_video_decoder->have_src_caps) + return; + + caps = gst_video_format_new_caps (state->format, + state->width, state->height, + state->fps_n, state->fps_d, state->par_n, state->par_d); + gst_caps_set_simple (caps, "interlaced", + G_TYPE_BOOLEAN, state->interlaced, NULL); + + GST_DEBUG ("setting caps %" GST_PTR_FORMAT, caps); + + gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder), caps); + + base_video_decoder->have_src_caps = TRUE; +} diff --git a/gst-libs/gst/video/gstbasevideodecoder.h b/gst-libs/gst/video/gstbasevideodecoder.h new file mode 100644 index 00000000..5e734a2a --- /dev/null +++ b/gst-libs/gst/video/gstbasevideodecoder.h @@ -0,0 +1,161 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_DECODER_H_ +#define _GST_BASE_VIDEO_DECODER_H_ + +#include + +G_BEGIN_DECLS + +#define GST_TYPE_BASE_VIDEO_DECODER \ + (gst_base_video_decoder_get_type()) +#define GST_BASE_VIDEO_DECODER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoder)) +#define GST_BASE_VIDEO_DECODER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass)) +#define GST_BASE_VIDEO_DECODER_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass)) +#define GST_IS_BASE_VIDEO_DECODER(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_DECODER)) +#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_DECODER)) + +/** + * GST_BASE_VIDEO_DECODER_SINK_NAME: + * + * The name of the templates for the sink pad. + */ +#define GST_BASE_VIDEO_DECODER_SINK_NAME "sink" +/** + * GST_BASE_VIDEO_DECODER_SRC_NAME: + * + * The name of the templates for the source pad. + */ +#define GST_BASE_VIDEO_DECODER_SRC_NAME "src" + +/** + * GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA: + * + * Custom GstFlowReturn value indicating that more data is needed. + */ +#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS + + +typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder; +typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass; + +struct _GstBaseVideoDecoder +{ + GstBaseVideoCodec base_video_codec; + + /*< private >*/ + GstAdapter *input_adapter; + GstAdapter *output_adapter; + + GList *frames; + + gboolean have_sync; + gboolean discont; + gboolean started; + + GstVideoState state; + + gboolean sink_clipping; + + guint64 presentation_frame_number; + guint64 system_frame_number; + + GstCaps *caps; + gboolean have_src_caps; + + GstVideoFrame *current_frame; + + int distance_from_sync; + int reorder_depth; + + GstClockTime buffer_timestamp; + + GstClockTime timestamp_offset; + + gdouble proportion; + GstClockTime earliest_time; + + GstBuffer *codec_data; + + guint64 offset; + GstClockTime last_timestamp; + + GstClockTime last_sink_timestamp; + GstClockTime last_sink_offset_end; + guint64 base_picture_number; + + int field_index; +}; + +struct _GstBaseVideoDecoderClass +{ + GstBaseVideoCodecClass base_video_codec_class; + + gboolean (*set_format) (GstBaseVideoDecoder *coder, GstVideoFormat, + int width, int height, int fps_n, int fps_d, + int par_n, int par_d); + gboolean (*start) (GstBaseVideoDecoder *coder); + gboolean (*stop) (GstBaseVideoDecoder *coder); + gboolean (*reset) (GstBaseVideoDecoder *coder); + int (*scan_for_sync) (GstBaseVideoDecoder *decoder, gboolean at_eos, + int offset, int n); + GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder, gboolean at_eos); + gboolean (*finish) (GstBaseVideoDecoder *coder, GstVideoFrame *frame); + GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrame *frame); + GstFlowReturn (*shape_output) (GstBaseVideoDecoder *coder, GstVideoFrame *frame); + GstCaps *(*get_caps) (GstBaseVideoDecoder *coder); + +}; + +GType gst_base_video_decoder_get_type (void); + +int gst_base_video_decoder_get_width (GstBaseVideoDecoder *coder); +int gst_base_video_decoder_get_height (GstBaseVideoDecoder *coder); + +guint64 gst_base_video_decoder_get_timestamp_offset (GstBaseVideoDecoder *coder); + +GstVideoFrame *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder, + int frame_number); +void gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder *base_video_decoder, + int n_bytes); +GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder, + GstVideoFrame *frame); +GstFlowReturn gst_base_video_decoder_end_of_stream (GstBaseVideoDecoder *base_video_decoder, + GstBuffer *buffer); +GstFlowReturn +gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder); +GstVideoState * gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder); +void gst_base_video_decoder_set_state (GstBaseVideoDecoder *base_video_decoder, + GstVideoState *state); +void gst_base_video_decoder_lost_sync (GstBaseVideoDecoder *base_video_decoder); +void gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder *base_video_decoder); + +void gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder *base_video_decoder); + + +G_END_DECLS + +#endif + diff --git a/gst-libs/gst/video/gstbasevideoencoder.c b/gst-libs/gst/video/gstbasevideoencoder.c new file mode 100644 index 00000000..b45846c6 --- /dev/null +++ b/gst-libs/gst/video/gstbasevideoencoder.c @@ -0,0 +1,502 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstbasevideoencoder.h" +#include "gstbasevideoutils.h" + +GST_DEBUG_CATEGORY_EXTERN (basevideo_debug); +#define GST_CAT_DEFAULT basevideo_debug + +static void gst_base_video_encoder_finalize (GObject * object); + +static gboolean gst_base_video_encoder_sink_setcaps (GstPad * pad, + GstCaps * caps); +static gboolean gst_base_video_encoder_sink_event (GstPad * pad, + GstEvent * event); +static GstFlowReturn gst_base_video_encoder_chain (GstPad * pad, + GstBuffer * buf); +static GstStateChangeReturn gst_base_video_encoder_change_state (GstElement * + element, GstStateChange transition); +static const GstQueryType *gst_base_video_encoder_get_query_types (GstPad * + pad); +static gboolean gst_base_video_encoder_src_query (GstPad * pad, + GstQuery * query); + +static void +_do_init (GType object_type) +{ + const GInterfaceInfo preset_interface_info = { + NULL, /* interface_init */ + NULL, /* interface_finalize */ + NULL /* interface_data */ + }; + + g_type_add_interface_static (object_type, GST_TYPE_PRESET, + &preset_interface_info); +} + +GST_BOILERPLATE_FULL (GstBaseVideoEncoder, gst_base_video_encoder, + GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC, _do_init); + +static void +gst_base_video_encoder_base_init (gpointer g_class) +{ + +} + +static void +gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *gstelement_class; + + gobject_class = G_OBJECT_CLASS (klass); + gstelement_class = GST_ELEMENT_CLASS (klass); + + gobject_class->finalize = gst_base_video_encoder_finalize; + + gstelement_class->change_state = gst_base_video_encoder_change_state; + + parent_class = g_type_class_peek_parent (klass); +} + +static void +gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder, + GstBaseVideoEncoderClass * klass) +{ + GstPad *pad; + + GST_DEBUG ("gst_base_video_encoder_init"); + + pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_encoder); + + gst_pad_set_chain_function (pad, gst_base_video_encoder_chain); + gst_pad_set_event_function (pad, gst_base_video_encoder_sink_event); + gst_pad_set_setcaps_function (pad, gst_base_video_encoder_sink_setcaps); + + pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder); + + gst_pad_set_query_type_function (pad, gst_base_video_encoder_get_query_types); + gst_pad_set_query_function (pad, gst_base_video_encoder_src_query); +} + +static gboolean +gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps) +{ + GstBaseVideoEncoder *base_video_encoder; + GstBaseVideoEncoderClass *base_video_encoder_class; + + base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); + base_video_encoder_class = + GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); + + GST_DEBUG ("setcaps"); + + gst_base_video_state_from_caps (&base_video_encoder->state, caps); + + base_video_encoder_class->set_format (base_video_encoder, + &base_video_encoder->state); + + base_video_encoder_class->start (base_video_encoder); + + g_object_unref (base_video_encoder); + + return TRUE; +} + +static void +gst_base_video_encoder_finalize (GObject * object) +{ + GstBaseVideoEncoder *base_video_encoder; + GstBaseVideoEncoderClass *base_video_encoder_class; + + g_return_if_fail (GST_IS_BASE_VIDEO_ENCODER (object)); + base_video_encoder = GST_BASE_VIDEO_ENCODER (object); + base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (object); + + GST_DEBUG ("finalize"); + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static gboolean +gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoEncoder *base_video_encoder; + GstBaseVideoEncoderClass *base_video_encoder_class; + gboolean ret = FALSE; + + base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); + base_video_encoder_class = + GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_EOS: + { + GstVideoFrame *frame; + + frame = g_malloc0 (sizeof (GstVideoFrame)); + frame->presentation_frame_number = + base_video_encoder->presentation_frame_number; + frame->presentation_duration = 0; + frame->is_eos = TRUE; + base_video_encoder->presentation_frame_number++; + + base_video_encoder->frames = + g_list_append (base_video_encoder->frames, frame); + base_video_encoder_class->finish (base_video_encoder, frame); + + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), + event); + } + break; + case GST_EVENT_NEWSEGMENT: + { + gboolean update; + double rate; + double applied_rate; + GstFormat format; + gint64 start; + gint64 stop; + gint64 position; + + gst_event_parse_new_segment_full (event, &update, &rate, + &applied_rate, &format, &start, &stop, &position); + + if (format != GST_FORMAT_TIME) + goto newseg_wrong_format; + + GST_DEBUG ("new segment %lld %lld", start, position); + + gst_segment_set_newsegment_full (&base_video_encoder->state.segment, + update, rate, applied_rate, format, start, stop, position); + + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), + event); + } + break; + default: + /* FIXME this changes the order of events */ + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), + event); + break; + } + +done: + gst_object_unref (base_video_encoder); + return ret; + +newseg_wrong_format: + { + GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment"); + gst_event_unref (event); + goto done; + } +} + +static const GstQueryType * +gst_base_video_encoder_get_query_types (GstPad * pad) +{ + static const GstQueryType query_types[] = { + GST_QUERY_CONVERT, + GST_QUERY_LATENCY, + 0 + }; + + return query_types; +} + +static gboolean +gst_base_video_encoder_src_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoEncoder *enc; + gboolean res; + GstPad *peerpad; + + enc = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); + peerpad = gst_pad_get_peer (GST_BASE_VIDEO_CODEC_SINK_PAD (enc)); + + switch GST_QUERY_TYPE + (query) { + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = + gst_base_video_encoded_video_convert (&enc->state, src_fmt, src_val, + &dest_fmt, &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + case GST_QUERY_LATENCY: + { + gboolean live; + GstClockTime min_latency, max_latency; + + res = gst_pad_query (peerpad, query); + if (res) { + gst_query_parse_latency (query, &live, &min_latency, &max_latency); + + min_latency += enc->min_latency; + if (max_latency != GST_CLOCK_TIME_NONE) { + max_latency += enc->max_latency; + } + + gst_query_set_latency (query, live, min_latency, max_latency); + } + } + break; + default: + res = gst_pad_query_default (pad, query); + } + gst_object_unref (peerpad); + gst_object_unref (enc); + return res; + +error: + GST_DEBUG_OBJECT (enc, "query failed"); + gst_object_unref (peerpad); + gst_object_unref (enc); + return res; +} + +static gboolean +gst_pad_is_negotiated (GstPad * pad) +{ + GstCaps *caps; + + g_return_val_if_fail (pad != NULL, FALSE); + + caps = gst_pad_get_negotiated_caps (pad); + if (caps) { + gst_caps_unref (caps); + return TRUE; + } + + return FALSE; +} + +static GstFlowReturn +gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf) +{ + GstBaseVideoEncoder *base_video_encoder; + GstBaseVideoEncoderClass *klass; + GstVideoFrame *frame; + + if (!gst_pad_is_negotiated (pad)) { + return GST_FLOW_NOT_NEGOTIATED; + } + + base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad)); + klass = GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); + + if (base_video_encoder->sink_clipping) { + gint64 start = GST_BUFFER_TIMESTAMP (buf); + gint64 stop = start + GST_BUFFER_DURATION (buf); + gint64 clip_start; + gint64 clip_stop; + + if (!gst_segment_clip (&base_video_encoder->state.segment, + GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) { + GST_DEBUG ("clipping to segment dropped frame"); + goto done; + } + } + + frame = g_malloc0 (sizeof (GstVideoFrame)); + frame->sink_buffer = buf; + frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf); + frame->presentation_duration = GST_BUFFER_DURATION (buf); + frame->presentation_frame_number = + base_video_encoder->presentation_frame_number; + base_video_encoder->presentation_frame_number++; + + base_video_encoder->frames = + g_list_append (base_video_encoder->frames, frame); + + klass->handle_frame (base_video_encoder, frame); + +done: + g_object_unref (base_video_encoder); + + return GST_FLOW_OK; +} + +static GstStateChangeReturn +gst_base_video_encoder_change_state (GstElement * element, + GstStateChange transition) +{ + GstBaseVideoEncoder *base_video_encoder; + GstBaseVideoEncoderClass *base_video_encoder_class; + GstStateChangeReturn ret; + + base_video_encoder = GST_BASE_VIDEO_ENCODER (element); + base_video_encoder_class = GST_BASE_VIDEO_ENCODER_GET_CLASS (element); + + switch (transition) { + default: + break; + } + + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_PAUSED_TO_READY: + if (base_video_encoder_class->stop) { + base_video_encoder_class->stop (base_video_encoder); + } + break; + default: + break; + } + + return ret; +} + +GstFlowReturn +gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder, + GstVideoFrame * frame) +{ + GstFlowReturn ret; + GstBaseVideoEncoderClass *base_video_encoder_class; + + base_video_encoder_class = + GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder); + + frame->system_frame_number = base_video_encoder->system_frame_number; + base_video_encoder->system_frame_number++; + + if (frame->is_sync_point) { + base_video_encoder->distance_from_sync = 0; + GST_BUFFER_FLAG_UNSET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } else { + GST_BUFFER_FLAG_SET (frame->src_buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } + + frame->distance_from_sync = base_video_encoder->distance_from_sync; + base_video_encoder->distance_from_sync++; + + frame->decode_frame_number = frame->system_frame_number - 1; + if (frame->decode_frame_number < 0) { + frame->decode_timestamp = 0; + } else { + frame->decode_timestamp = gst_util_uint64_scale (frame->decode_frame_number, + GST_SECOND * base_video_encoder->state.fps_d, + base_video_encoder->state.fps_n); + } + + GST_BUFFER_TIMESTAMP (frame->src_buffer) = frame->presentation_timestamp; + GST_BUFFER_DURATION (frame->src_buffer) = frame->presentation_duration; + GST_BUFFER_OFFSET (frame->src_buffer) = frame->decode_timestamp; + + base_video_encoder->frames = + g_list_remove (base_video_encoder->frames, frame); + + if (!base_video_encoder->set_output_caps) { + GstCaps *caps; + + if (base_video_encoder_class->get_caps) { + caps = base_video_encoder_class->get_caps (base_video_encoder); + } else { + caps = gst_caps_new_simple ("video/unknown", NULL); + } + base_video_encoder->caps = gst_caps_ref (caps); + gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), caps); + base_video_encoder->set_output_caps = TRUE; + } + + if (base_video_encoder_class->shape_output) { + ret = base_video_encoder_class->shape_output (base_video_encoder, frame); + } else { + ret = + gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), + frame->src_buffer); + } + + g_free (frame); + + return ret; +} + +int +gst_base_video_encoder_get_height (GstBaseVideoEncoder * base_video_encoder) +{ + return base_video_encoder->state.height; +} + +int +gst_base_video_encoder_get_width (GstBaseVideoEncoder * base_video_encoder) +{ + return base_video_encoder->state.width; +} + +const GstVideoState * +gst_base_video_encoder_get_state (GstBaseVideoEncoder * base_video_encoder) +{ + return &base_video_encoder->state; +} + +GstFlowReturn +gst_base_video_encoder_end_of_stream (GstBaseVideoEncoder * base_video_encoder, + GstBuffer * buffer) +{ + + if (base_video_encoder->frames) { + GST_WARNING ("EOS with frames left over"); + } + + return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), + buffer); +} + +void +gst_base_video_encoder_set_latency (GstBaseVideoEncoder * base_video_encoder, + GstClockTime min_latency, GstClockTime max_latency) +{ + g_return_if_fail (min_latency >= 0); + g_return_if_fail (max_latency >= min_latency); + + base_video_encoder->min_latency = min_latency; + base_video_encoder->max_latency = max_latency; +} + +void +gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder * + base_video_encoder, int n_fields) +{ + gint64 latency; + + latency = gst_util_uint64_scale (n_fields, + base_video_encoder->state.fps_d * GST_SECOND, + 2 * base_video_encoder->state.fps_n); + + gst_base_video_encoder_set_latency (base_video_encoder, latency, latency); + +} diff --git a/gst-libs/gst/video/gstbasevideoencoder.h b/gst-libs/gst/video/gstbasevideoencoder.h new file mode 100644 index 00000000..a3b6e3d7 --- /dev/null +++ b/gst-libs/gst/video/gstbasevideoencoder.h @@ -0,0 +1,118 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_ENCODER_H_ +#define _GST_BASE_VIDEO_ENCODER_H_ + +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_BASE_VIDEO_ENCODER \ + (gst_base_video_encoder_get_type()) +#define GST_BASE_VIDEO_ENCODER(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoder)) +#define GST_BASE_VIDEO_ENCODER_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoderClass)) +#define GST_BASE_VIDEO_ENCODER_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoderClass)) +#define GST_IS_BASE_VIDEO_ENCODER(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_ENCODER)) +#define GST_IS_BASE_VIDEO_ENCODER_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_ENCODER)) + +/** + * GST_BASE_VIDEO_ENCODER_SINK_NAME: + * + * The name of the templates for the sink pad. + */ +#define GST_BASE_VIDEO_ENCODER_SINK_NAME "sink" +/** + * GST_BASE_VIDEO_ENCODER_SRC_NAME: + * + * The name of the templates for the source pad. + */ +#define GST_BASE_VIDEO_ENCODER_SRC_NAME "src" + + +typedef struct _GstBaseVideoEncoder GstBaseVideoEncoder; +typedef struct _GstBaseVideoEncoderClass GstBaseVideoEncoderClass; + +struct _GstBaseVideoEncoder +{ + GstBaseVideoCodec base_video_codec; + + /*< private >*/ + GList *frames; + + GstVideoState state; + + gboolean sink_clipping; + + guint64 presentation_frame_number; + guint64 system_frame_number; + int distance_from_sync; + + GstCaps *caps; + gboolean set_output_caps; + + gint64 min_latency; + gint64 max_latency; +}; + +struct _GstBaseVideoEncoderClass +{ + GstBaseVideoCodecClass base_video_codec_class; + + gboolean (*set_format) (GstBaseVideoEncoder *coder, GstVideoState *state); + gboolean (*start) (GstBaseVideoEncoder *coder); + gboolean (*stop) (GstBaseVideoEncoder *coder); + gboolean (*finish) (GstBaseVideoEncoder *coder, GstVideoFrame *frame); + gboolean (*handle_frame) (GstBaseVideoEncoder *coder, GstVideoFrame *frame); + GstFlowReturn (*shape_output) (GstBaseVideoEncoder *coder, GstVideoFrame *frame); + GstCaps *(*get_caps) (GstBaseVideoEncoder *coder); + +}; + +GType gst_base_video_encoder_get_type (void); + +int gst_base_video_encoder_get_width (GstBaseVideoEncoder *coder); +int gst_base_video_encoder_get_height (GstBaseVideoEncoder *coder); +const GstVideoState *gst_base_video_encoder_get_state (GstBaseVideoEncoder *coder); + +guint64 gst_base_video_encoder_get_timestamp_offset (GstBaseVideoEncoder *coder); + +GstVideoFrame *gst_base_video_encoder_get_frame (GstBaseVideoEncoder *coder, + int frame_number); +GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder *base_video_encoder, + GstVideoFrame *frame); +GstFlowReturn gst_base_video_encoder_end_of_stream (GstBaseVideoEncoder *base_video_encoder, + GstBuffer *buffer); + +void gst_base_video_encoder_set_latency (GstBaseVideoEncoder *base_video_encoder, + GstClockTime min_latency, GstClockTime max_latency); +void gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder *base_video_encoder, + int n_fields); + + +G_END_DECLS + +#endif + diff --git a/gst-libs/gst/video/gstbasevideoparse.c b/gst-libs/gst/video/gstbasevideoparse.c new file mode 100644 index 00000000..66695c7b --- /dev/null +++ b/gst-libs/gst/video/gstbasevideoparse.c @@ -0,0 +1,870 @@ +/* Schrodinger + * Copyright (C) 2006 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstbasevideoparse.h" + +#include +#include + +GST_DEBUG_CATEGORY_EXTERN (basevideo_debug); +#define GST_CAT_DEFAULT basevideo_debug + + + +/* GstBaseVideoParse signals and args */ +enum +{ + LAST_SIGNAL +}; + +enum +{ + ARG_0 +}; + +static void gst_base_video_parse_finalize (GObject * object); + +static const GstQueryType *gst_base_video_parse_get_query_types (GstPad * pad); +static gboolean gst_base_video_parse_src_query (GstPad * pad, GstQuery * query); +static gboolean gst_base_video_parse_sink_query (GstPad * pad, + GstQuery * query); +static gboolean gst_base_video_parse_src_event (GstPad * pad, GstEvent * event); +static gboolean gst_base_video_parse_sink_event (GstPad * pad, + GstEvent * event); +static GstStateChangeReturn gst_base_video_parse_change_state (GstElement * + element, GstStateChange transition); +static GstFlowReturn gst_base_video_parse_push_all (GstBaseVideoParse * + base_video_parse, gboolean at_eos); +static GstFlowReturn gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf); +static void gst_base_video_parse_free_frame (GstVideoFrame * frame); +static GstVideoFrame *gst_base_video_parse_new_frame (GstBaseVideoParse * + base_video_parse); + + +GST_BOILERPLATE (GstBaseVideoParse, gst_base_video_parse, + GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC); + +static void +gst_base_video_parse_base_init (gpointer g_class) +{ + +} + +static void +gst_base_video_parse_class_init (GstBaseVideoParseClass * klass) +{ + GObjectClass *gobject_class; + GstElementClass *element_class; + + gobject_class = G_OBJECT_CLASS (klass); + element_class = GST_ELEMENT_CLASS (klass); + + gobject_class->finalize = gst_base_video_parse_finalize; + + element_class->change_state = gst_base_video_parse_change_state; +} + +static void +gst_base_video_parse_init (GstBaseVideoParse * base_video_parse, + GstBaseVideoParseClass * klass) +{ + GstPad *pad; + + GST_DEBUG ("gst_base_video_parse_init"); + + pad = GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse); + + gst_pad_set_chain_function (pad, gst_base_video_parse_chain); + gst_pad_set_query_function (pad, gst_base_video_parse_sink_query); + gst_pad_set_event_function (pad, gst_base_video_parse_sink_event); + + pad = GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse); + + gst_pad_set_query_type_function (pad, gst_base_video_parse_get_query_types); + gst_pad_set_query_function (pad, gst_base_video_parse_src_query); + gst_pad_set_event_function (pad, gst_base_video_parse_src_event); + + base_video_parse->input_adapter = gst_adapter_new (); + base_video_parse->output_adapter = gst_adapter_new (); + + base_video_parse->reorder_depth = 1; + + base_video_parse->current_frame = + gst_base_video_parse_new_frame (base_video_parse); +} + +static void +gst_base_video_parse_reset (GstBaseVideoParse * base_video_parse) +{ + GST_DEBUG ("reset"); + + base_video_parse->discont = TRUE; + base_video_parse->have_sync = FALSE; + + base_video_parse->system_frame_number = 0; + base_video_parse->presentation_frame_number = 0; + + if (base_video_parse->caps) { + gst_caps_unref (base_video_parse->caps); + base_video_parse->caps = NULL; + } + + gst_segment_init (&base_video_parse->state.segment, GST_FORMAT_TIME); + gst_adapter_clear (base_video_parse->input_adapter); + gst_adapter_clear (base_video_parse->output_adapter); + + if (base_video_parse->current_frame) { + gst_base_video_parse_free_frame (base_video_parse->current_frame); + } + base_video_parse->current_frame = + gst_base_video_parse_new_frame (base_video_parse); + +} + +static void +gst_base_video_parse_finalize (GObject * object) +{ + GstBaseVideoParse *base_video_parse; + + g_return_if_fail (GST_IS_BASE_VIDEO_PARSE (object)); + base_video_parse = GST_BASE_VIDEO_PARSE (object); + + if (base_video_parse->input_adapter) { + g_object_unref (base_video_parse->input_adapter); + } + if (base_video_parse->output_adapter) { + g_object_unref (base_video_parse->output_adapter); + } + + G_OBJECT_CLASS (parent_class)->finalize (object); +} + +static const GstQueryType * +gst_base_video_parse_get_query_types (GstPad * pad) +{ + static const GstQueryType query_types[] = { + GST_QUERY_POSITION, + GST_QUERY_DURATION, + GST_QUERY_CONVERT, + 0 + }; + + return query_types; +} + +#if 0 +static gboolean +gst_base_video_parse_src_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res; + GstBaseVideoParse *dec; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + dec = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad)); + + if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) { + if (dec->fps_d != 0) { + *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value), + dec->fps_d * GST_SECOND, dec->fps_n); + res = TRUE; + } else { + res = FALSE; + } + } else { + GST_WARNING ("unhandled conversion from %d to %d", src_format, + *dest_format); + res = FALSE; + } + + gst_object_unref (dec); + + return res; +} + +static gboolean +gst_base_video_parse_sink_convert (GstPad * pad, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = TRUE; + GstBaseVideoParse *dec; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + dec = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad)); + + /* FIXME: check if we are in a decoding state */ + + switch (src_format) { + case GST_FORMAT_DEFAULT: + switch (*dest_format) { + case GST_FORMAT_TIME: + *dest_value = gst_util_uint64_scale (src_value, + dec->fps_d * GST_SECOND, dec->fps_n); + break; + default: + res = FALSE; + } + break; + case GST_FORMAT_TIME: + switch (*dest_format) { + case GST_FORMAT_DEFAULT: + { + *dest_value = gst_util_uint64_scale (src_value, + dec->fps_n, dec->fps_d * GST_SECOND); + break; + } + default: + res = FALSE; + break; + } + break; + default: + res = FALSE; + break; + } + + gst_object_unref (dec); + + return res; +} +#endif + +static gboolean +gst_base_video_parse_src_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoParse *base_parse; + gboolean res = FALSE; + + base_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad)); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_POSITION: + { + GstFormat format; + gint64 time; + gint64 value; + + gst_query_parse_position (query, &format, NULL); + + time = gst_util_uint64_scale (base_parse->presentation_frame_number, + base_parse->state.fps_n, base_parse->state.fps_d); + time += base_parse->state.segment.time; + GST_DEBUG ("query position %lld", time); + res = gst_base_video_encoded_video_convert (&base_parse->state, + GST_FORMAT_TIME, time, &format, &value); + if (!res) + goto error; + + gst_query_set_position (query, format, value); + break; + } + case GST_QUERY_DURATION: + res = + gst_pad_query (GST_PAD_PEER (GST_BASE_VIDEO_CODEC_SINK_PAD + (base_parse)), query); + if (!res) + goto error; + break; + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + GST_WARNING ("query convert"); + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = gst_base_video_encoded_video_convert (&base_parse->state, + src_fmt, src_val, &dest_fmt, &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } +done: + gst_object_unref (base_parse); + + return res; +error: + GST_DEBUG_OBJECT (base_parse, "query failed"); + goto done; +} + +static gboolean +gst_base_video_parse_sink_query (GstPad * pad, GstQuery * query) +{ + GstBaseVideoParse *base_video_parse; + gboolean res = FALSE; + + base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad)); + + switch (GST_QUERY_TYPE (query)) { + case GST_QUERY_CONVERT: + { + GstFormat src_fmt, dest_fmt; + gint64 src_val, dest_val; + + gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val); + res = gst_base_video_encoded_video_convert (&base_video_parse->state, + src_fmt, src_val, &dest_fmt, &dest_val); + if (!res) + goto error; + gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val); + break; + } + default: + res = gst_pad_query_default (pad, query); + break; + } +done: + gst_object_unref (base_video_parse); + + return res; +error: + GST_DEBUG_OBJECT (base_video_parse, "query failed"); + goto done; +} + +static gboolean +gst_base_video_parse_src_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoParse *base_video_parse; + gboolean res = FALSE; + + base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_SEEK: + { + GstFormat format, tformat; + gdouble rate; + GstEvent *real_seek; + GstSeekFlags flags; + GstSeekType cur_type, stop_type; + gint64 cur, stop; + gint64 tcur, tstop; + + gst_event_parse_seek (event, &rate, &format, &flags, &cur_type, + &cur, &stop_type, &stop); + gst_event_unref (event); + + tformat = GST_FORMAT_TIME; + res = gst_base_video_encoded_video_convert (&base_video_parse->state, + format, cur, &tformat, &tcur); + if (!res) + goto convert_error; + res = gst_base_video_encoded_video_convert (&base_video_parse->state, + format, stop, &tformat, &tstop); + if (!res) + goto convert_error; + + real_seek = gst_event_new_seek (rate, GST_FORMAT_TIME, + flags, cur_type, tcur, stop_type, tstop); + + res = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse), + real_seek); + + break; + } +#if 0 + case GST_EVENT_QOS: + { + gdouble proportion; + GstClockTimeDiff diff; + GstClockTime timestamp; + + gst_event_parse_qos (event, &proportion, &diff, ×tamp); + + GST_OBJECT_LOCK (base_video_parse); + base_video_parse->proportion = proportion; + base_video_parse->earliest_time = timestamp + diff; + GST_OBJECT_UNLOCK (base_video_parse); + + GST_DEBUG_OBJECT (base_video_parse, + "got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT, + GST_TIME_ARGS (timestamp), diff); + + res = gst_pad_push_event (base_video_parse->sinkpad, event); + break; + } +#endif + default: + res = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SINK_PAD (base_video_parse), + event); + break; + } +done: + gst_object_unref (base_video_parse); + return res; + +convert_error: + GST_DEBUG_OBJECT (base_video_parse, "could not convert format"); + goto done; +} + +static gboolean +gst_base_video_parse_sink_event (GstPad * pad, GstEvent * event) +{ + GstBaseVideoParse *base_video_parse; + gboolean ret = FALSE; + + base_video_parse = GST_BASE_VIDEO_PARSE (gst_pad_get_parent (pad)); + + switch (GST_EVENT_TYPE (event)) { + case GST_EVENT_FLUSH_START: + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), + event); + break; + case GST_EVENT_FLUSH_STOP: + gst_base_video_parse_reset (base_video_parse); + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), + event); + break; + case GST_EVENT_EOS: + if (gst_base_video_parse_push_all (base_video_parse, + FALSE) == GST_FLOW_ERROR) { + gst_event_unref (event); + return FALSE; + } + + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), + event); + break; + case GST_EVENT_NEWSEGMENT: + { + gboolean update; + GstFormat format; + gdouble rate; + gint64 start, stop, time; + + gst_event_parse_new_segment (event, &update, &rate, &format, &start, + &stop, &time); + + if (format != GST_FORMAT_TIME) + goto newseg_wrong_format; + + if (rate <= 0.0) + goto newseg_wrong_rate; + + GST_DEBUG ("newsegment %lld %lld", start, time); + gst_segment_set_newsegment (&base_video_parse->state.segment, update, + rate, format, start, stop, time); + + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), + event); + break; + } + default: + ret = + gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), + event); + break; + } +done: + gst_object_unref (base_video_parse); + return ret; + +newseg_wrong_format: + GST_DEBUG_OBJECT (base_video_parse, "received non TIME newsegment"); + gst_event_unref (event); + goto done; + +newseg_wrong_rate: + GST_DEBUG_OBJECT (base_video_parse, "negative rates not supported"); + gst_event_unref (event); + goto done; +} + + +static GstStateChangeReturn +gst_base_video_parse_change_state (GstElement * element, + GstStateChange transition) +{ + GstBaseVideoParse *base_parse = GST_BASE_VIDEO_PARSE (element); + GstStateChangeReturn ret; + + switch (transition) { + case GST_STATE_CHANGE_NULL_TO_READY: + break; + case GST_STATE_CHANGE_READY_TO_PAUSED: + gst_base_video_parse_reset (base_parse); + break; + case GST_STATE_CHANGE_PAUSED_TO_PLAYING: + break; + default: + break; + } + + ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition); + + switch (transition) { + case GST_STATE_CHANGE_PLAYING_TO_PAUSED: + break; + case GST_STATE_CHANGE_PAUSED_TO_READY: + gst_base_video_parse_reset (base_parse); + break; + case GST_STATE_CHANGE_READY_TO_NULL: + break; + default: + break; + } + + return ret; +} + +static guint64 +gst_base_video_parse_get_timestamp (GstBaseVideoParse * base_video_parse, + int picture_number) +{ + if (picture_number < 0) { + return base_video_parse->timestamp_offset - + (gint64) gst_util_uint64_scale (-picture_number, + base_video_parse->state.fps_d * GST_SECOND, + base_video_parse->state.fps_n); + } else { + return base_video_parse->timestamp_offset + + gst_util_uint64_scale (picture_number, + base_video_parse->state.fps_d * GST_SECOND, + base_video_parse->state.fps_n); + } +} + +static GstFlowReturn +gst_base_video_parse_push_all (GstBaseVideoParse * base_video_parse, + gboolean at_eos) +{ + GstFlowReturn ret = GST_FLOW_OK; + + /* FIXME do stuff */ + + return ret; +} + +static GstBuffer * +gst_adapter_get_buffer (GstAdapter * adapter) +{ + return gst_buffer_ref (GST_BUFFER (adapter->buflist->data)); + +} + +static GstFlowReturn +gst_base_video_parse_chain (GstPad * pad, GstBuffer * buf) +{ + GstBaseVideoParse *base_video_parse; + GstBaseVideoParseClass *klass; + GstBuffer *buffer; + GstFlowReturn ret; + + GST_DEBUG ("chain with %d bytes", GST_BUFFER_SIZE (buf)); + + base_video_parse = GST_BASE_VIDEO_PARSE (GST_PAD_PARENT (pad)); + klass = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse); + + if (!base_video_parse->started) { + klass->start (base_video_parse); + base_video_parse->started = TRUE; + } + + if (G_UNLIKELY (GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DISCONT))) { + GST_DEBUG_OBJECT (base_video_parse, "received DISCONT buffer"); + gst_base_video_parse_reset (base_video_parse); + base_video_parse->discont = TRUE; + base_video_parse->have_sync = FALSE; + } + + if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE) { + base_video_parse->last_timestamp = GST_BUFFER_TIMESTAMP (buf); + } + gst_adapter_push (base_video_parse->input_adapter, buf); + + if (!base_video_parse->have_sync) { + int n, m; + + GST_DEBUG ("no sync, scanning"); + + n = gst_adapter_available (base_video_parse->input_adapter); + m = klass->scan_for_sync (base_video_parse->input_adapter, FALSE, 0, n); + + gst_adapter_flush (base_video_parse->input_adapter, m); + + if (m < n) { + GST_DEBUG ("found possible sync after %d bytes (of %d)", m, n); + + /* this is only "maybe" sync */ + base_video_parse->have_sync = TRUE; + } + + if (!base_video_parse->have_sync) { + return GST_FLOW_OK; + } + } + + /* FIXME: use gst_adapter_prev_timestamp() here instead? */ + buffer = gst_adapter_get_buffer (base_video_parse->input_adapter); + + gst_buffer_unref (buffer); + + /* FIXME check klass->parse_data */ + + do { + ret = klass->parse_data (base_video_parse, FALSE); + } while (ret == GST_FLOW_OK); + + if (ret == GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA) { + return GST_FLOW_OK; + } + return ret; +} + +GstVideoState * +gst_base_video_parse_get_state (GstBaseVideoParse * base_video_parse) +{ + return &base_video_parse->state; +} + +void +gst_base_video_parse_set_state (GstBaseVideoParse * base_video_parse, + GstVideoState * state) +{ + GST_DEBUG ("set_state"); + + memcpy (&base_video_parse->state, state, sizeof (GstVideoState)); + + /* FIXME set caps */ + +} + + +gboolean +gst_base_video_parse_set_src_caps (GstBaseVideoParse * base_video_parse, + GstCaps * caps) +{ + g_return_val_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse), FALSE); + + GST_DEBUG ("set_src_caps"); + + return gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), + caps); +} + +void +gst_base_video_parse_lost_sync (GstBaseVideoParse * base_video_parse) +{ + g_return_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse)); + + GST_DEBUG ("lost_sync"); + + if (gst_adapter_available (base_video_parse->input_adapter) >= 1) { + gst_adapter_flush (base_video_parse->input_adapter, 1); + } + + base_video_parse->have_sync = FALSE; +} + +GstVideoFrame * +gst_base_video_parse_get_frame (GstBaseVideoParse * base_video_parse) +{ + g_return_val_if_fail (GST_IS_BASE_VIDEO_PARSE (base_video_parse), NULL); + + return base_video_parse->current_frame; +} + +void +gst_base_video_parse_add_to_frame (GstBaseVideoParse * base_video_parse, + int n_bytes) +{ + GstBuffer *buf; + + GST_DEBUG ("add_to_frame"); + + buf = gst_adapter_take_buffer (base_video_parse->input_adapter, n_bytes); + + gst_adapter_push (base_video_parse->output_adapter, buf); +} + +GstFlowReturn +gst_base_video_parse_finish_frame (GstBaseVideoParse * base_video_parse) +{ + GstVideoFrame *frame = base_video_parse->current_frame; + GstBuffer *buffer; + GstBaseVideoParseClass *base_video_parse_class; + GstFlowReturn ret; + + GST_DEBUG ("finish_frame"); + + base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse); + + buffer = gst_adapter_take_buffer (base_video_parse->output_adapter, + gst_adapter_available (base_video_parse->output_adapter)); + + if (frame->is_sync_point) { + base_video_parse->timestamp_offset = base_video_parse->last_timestamp - + gst_util_uint64_scale (frame->presentation_frame_number, + base_video_parse->state.fps_d * GST_SECOND, + base_video_parse->state.fps_n); + base_video_parse->distance_from_sync = 0; + } + + frame->distance_from_sync = base_video_parse->distance_from_sync; + base_video_parse->distance_from_sync++; + + frame->presentation_timestamp = + gst_base_video_parse_get_timestamp (base_video_parse, + frame->presentation_frame_number); + frame->presentation_duration = + gst_base_video_parse_get_timestamp (base_video_parse, + frame->presentation_frame_number + 1) - frame->presentation_timestamp; + frame->decode_timestamp = + gst_base_video_parse_get_timestamp (base_video_parse, + frame->decode_frame_number); + + GST_BUFFER_TIMESTAMP (buffer) = frame->presentation_timestamp; + GST_BUFFER_DURATION (buffer) = frame->presentation_duration; + if (frame->decode_frame_number < 0) { + GST_BUFFER_OFFSET (buffer) = 0; + } else { + GST_BUFFER_OFFSET (buffer) = frame->decode_timestamp; + } + GST_BUFFER_OFFSET_END (buffer) = GST_CLOCK_TIME_NONE; + + GST_DEBUG ("pts %" GST_TIME_FORMAT, + GST_TIME_ARGS (frame->presentation_timestamp)); + GST_DEBUG ("dts %" GST_TIME_FORMAT, GST_TIME_ARGS (frame->decode_timestamp)); + GST_DEBUG ("dist %d", frame->distance_from_sync); + + if (frame->is_sync_point) { + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } else { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DELTA_UNIT); + } + + frame->src_buffer = buffer; + ret = base_video_parse_class->shape_output (base_video_parse, frame); + + gst_base_video_parse_free_frame (base_video_parse->current_frame); + + /* create new frame */ + base_video_parse->current_frame = + gst_base_video_parse_new_frame (base_video_parse); + + return ret; +} + +static void +gst_base_video_parse_free_frame (GstVideoFrame * frame) +{ + if (frame->sink_buffer) { + gst_buffer_unref (frame->sink_buffer); + } +#if 0 + if (frame->src_buffer) { + gst_buffer_unref (frame->src_buffer); + } +#endif + + g_free (frame); +} + +static GstVideoFrame * +gst_base_video_parse_new_frame (GstBaseVideoParse * base_video_parse) +{ + GstVideoFrame *frame; + + frame = g_malloc0 (sizeof (GstVideoFrame)); + + frame->system_frame_number = base_video_parse->system_frame_number; + base_video_parse->system_frame_number++; + + frame->decode_frame_number = frame->system_frame_number - + base_video_parse->reorder_depth; + + return frame; +} + +void +gst_base_video_parse_set_sync_point (GstBaseVideoParse * base_video_parse) +{ + GST_DEBUG ("set_sync_point"); + + base_video_parse->current_frame->is_sync_point = TRUE; + + base_video_parse->distance_from_sync = 0; +} + +GstFlowReturn +gst_base_video_parse_push (GstBaseVideoParse * base_video_parse, + GstBuffer * buffer) +{ + GstBaseVideoParseClass *base_video_parse_class; + + base_video_parse_class = GST_BASE_VIDEO_PARSE_GET_CLASS (base_video_parse); + + if (base_video_parse->caps == NULL) { + gboolean ret; + + base_video_parse->caps = + base_video_parse_class->get_caps (base_video_parse); + + ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), + base_video_parse->caps); + + if (!ret) { + GST_WARNING ("pad didn't accept caps"); + return GST_FLOW_ERROR; + } + } + gst_buffer_set_caps (buffer, base_video_parse->caps); + + GST_DEBUG ("pushing ts=%lld dur=%lld off=%lld off_end=%lld", + GST_BUFFER_TIMESTAMP (buffer), + GST_BUFFER_DURATION (buffer), + GST_BUFFER_OFFSET (buffer), GST_BUFFER_OFFSET_END (buffer)); + + if (base_video_parse->discont) { + GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT); + base_video_parse->discont = FALSE; + } else { + GST_BUFFER_FLAG_UNSET (buffer, GST_BUFFER_FLAG_DISCONT); + } + + return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_parse), buffer); +} diff --git a/gst-libs/gst/video/gstbasevideoparse.h b/gst-libs/gst/video/gstbasevideoparse.h new file mode 100644 index 00000000..ce69fe51 --- /dev/null +++ b/gst-libs/gst/video/gstbasevideoparse.h @@ -0,0 +1,136 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_PARSE_H_ +#define _GST_BASE_VIDEO_PARSE_H_ + +#include +#include + +G_BEGIN_DECLS + +#define GST_TYPE_BASE_VIDEO_PARSE \ + (gst_base_video_parse_get_type()) +#define GST_BASE_VIDEO_PARSE(obj) \ + (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_PARSE,GstBaseVideoParse)) +#define GST_BASE_VIDEO_PARSE_CLASS(klass) \ + (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_PARSE,GstBaseVideoParseClass)) +#define GST_BASE_VIDEO_PARSE_GET_CLASS(obj) \ + (G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_PARSE,GstBaseVideoParseClass)) +#define GST_IS_BASE_VIDEO_PARSE(obj) \ + (G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_PARSE)) +#define GST_IS_BASE_VIDEO_PARSE_CLASS(obj) \ + (G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_PARSE)) + +/** + * GST_BASE_VIDEO_PARSE_SINK_NAME: + * + * The name of the templates for the sink pad. + */ +#define GST_BASE_VIDEO_PARSE_SINK_NAME "sink" +/** + * GST_BASE_VIDEO_PARSE_SRC_NAME: + * + * The name of the templates for the source pad. + */ +#define GST_BASE_VIDEO_PARSE_SRC_NAME "src" + +/** + * GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA: + * + */ +#define GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS + +typedef struct _GstBaseVideoParse GstBaseVideoParse; +typedef struct _GstBaseVideoParseClass GstBaseVideoParseClass; + +struct _GstBaseVideoParse +{ + GstBaseVideoCodec base_video_codec; + + /*< private >*/ + GstAdapter *input_adapter; + GstAdapter *output_adapter; + + int reorder_depth; + + gboolean have_sync; + gboolean discont; + gboolean started; + + GstVideoFrame *current_frame; + GstVideoState state; + int distance_from_sync; + + gboolean sink_clipping; + + guint64 presentation_frame_number; + guint64 system_frame_number; + + GstCaps *caps; + gboolean set_output_caps; + + GstClockTime last_timestamp; + + gint64 timestamp_offset; +}; + +struct _GstBaseVideoParseClass +{ + GstBaseVideoCodecClass base_video_codec_class; + + gboolean (*start) (GstBaseVideoParse *parse); + gboolean (*stop) (GstBaseVideoParse *parse); + gboolean (*reset) (GstBaseVideoParse *parse); + GstFlowReturn (*parse_data) (GstBaseVideoParse *parse, gboolean at_eos); + int (*scan_for_sync) (GstAdapter *adapter, gboolean at_eos, + int offset, int n); + GstFlowReturn (*shape_output) (GstBaseVideoParse *parse, GstVideoFrame *frame); + GstCaps *(*get_caps) (GstBaseVideoParse *parse); + +}; + +GType gst_base_video_parse_get_type (void); + +int gst_base_video_parse_get_width (GstBaseVideoParse *parse); +int gst_base_video_parse_get_height (GstBaseVideoParse *parse); +GstVideoState *gst_base_video_parse_get_state (GstBaseVideoParse *parse); +void gst_base_video_parse_set_state (GstBaseVideoParse *parse, + GstVideoState *state); + +guint64 gst_base_video_parse_get_timestamp_offset (GstBaseVideoParse *parse); + +gboolean gst_base_video_parse_set_src_caps (GstBaseVideoParse *base_video_parse, GstCaps *caps); + +GstFlowReturn gst_base_video_parse_end_of_stream (GstBaseVideoParse *base_video_parse, + GstBuffer *buffer); + +void gst_base_video_parse_lost_sync (GstBaseVideoParse *base_video_parse); + +GstVideoFrame * gst_base_video_parse_get_frame (GstBaseVideoParse *base_video_parse); +void gst_base_video_parse_add_to_frame (GstBaseVideoParse *base_video_parse, int n_bytes); +GstFlowReturn gst_base_video_parse_finish_frame (GstBaseVideoParse *base_video_parse); +void gst_base_video_parse_set_sync_point (GstBaseVideoParse *base_video_parse); +GstFlowReturn gst_base_video_parse_push (GstBaseVideoParse *base_video_parse, + GstBuffer *buffer); + +G_END_DECLS + +#endif + diff --git a/gst-libs/gst/video/gstbasevideoutils.c b/gst-libs/gst/video/gstbasevideoutils.c new file mode 100644 index 00000000..3de6038c --- /dev/null +++ b/gst-libs/gst/video/gstbasevideoutils.c @@ -0,0 +1,162 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifdef HAVE_CONFIG_H +#include "config.h" +#endif + +#include "gstbasevideoutils.h" + +#include + +GST_DEBUG_CATEGORY_EXTERN (basevideo_debug); +#define GST_CAT_DEFAULT basevideo_debug + + +guint64 +gst_base_video_convert_bytes_to_frames (GstVideoState * state, guint64 bytes) +{ + return gst_util_uint64_scale_int (bytes, 1, state->bytes_per_picture); +} + +guint64 +gst_base_video_convert_frames_to_bytes (GstVideoState * state, guint64 frames) +{ + return frames * state->bytes_per_picture; +} + + +gboolean +gst_base_video_rawvideo_convert (GstVideoState * state, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = FALSE; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + if (src_format == GST_FORMAT_BYTES && + *dest_format == GST_FORMAT_DEFAULT && state->bytes_per_picture != 0) { + /* convert bytes to frames */ + *dest_value = gst_util_uint64_scale_int (src_value, 1, + state->bytes_per_picture); + res = TRUE; + } else if (src_format == GST_FORMAT_DEFAULT && + *dest_format == GST_FORMAT_BYTES && state->bytes_per_picture != 0) { + /* convert bytes to frames */ + *dest_value = src_value * state->bytes_per_picture; + res = TRUE; + } else if (src_format == GST_FORMAT_DEFAULT && + *dest_format == GST_FORMAT_TIME && state->fps_n != 0) { + /* convert frames to time */ + /* FIXME add segment time? */ + *dest_value = gst_util_uint64_scale (src_value, + GST_SECOND * state->fps_d, state->fps_n); + res = TRUE; + } else if (src_format == GST_FORMAT_TIME && + *dest_format == GST_FORMAT_DEFAULT && state->fps_d != 0) { + /* convert time to frames */ + /* FIXME subtract segment time? */ + *dest_value = gst_util_uint64_scale (src_value, state->fps_n, + GST_SECOND * state->fps_d); + res = TRUE; + } + + /* FIXME add bytes <--> time */ + + return res; +} + +gboolean +gst_base_video_encoded_video_convert (GstVideoState * state, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 * dest_value) +{ + gboolean res = FALSE; + + if (src_format == *dest_format) { + *dest_value = src_value; + return TRUE; + } + + GST_DEBUG ("src convert"); + +#if 0 + if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) { + if (dec->fps_d != 0) { + *dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value), + dec->fps_d * GST_SECOND, dec->fps_n); + res = TRUE; + } else { + res = FALSE; + } + } else { + GST_WARNING ("unhandled conversion from %d to %d", src_format, + *dest_format); + res = FALSE; + } +#endif + + return res; +} + +gboolean +gst_base_video_state_from_caps (GstVideoState * state, GstCaps * caps) +{ + + gst_video_format_parse_caps (caps, &state->format, + &state->width, &state->height); + + gst_video_parse_caps_framerate (caps, &state->fps_n, &state->fps_d); + + state->par_n = 1; + state->par_d = 1; + gst_video_parse_caps_pixel_aspect_ratio (caps, &state->par_n, &state->par_d); + + { + GstStructure *structure = gst_caps_get_structure (caps, 0); + state->interlaced = FALSE; + gst_structure_get_boolean (structure, "interlaced", &state->interlaced); + } + + state->clean_width = state->width; + state->clean_height = state->height; + state->clean_offset_left = 0; + state->clean_offset_top = 0; + + /* FIXME need better error handling */ + return TRUE; +} + +GstClockTime +gst_video_state_get_timestamp (const GstVideoState * state, int frame_number) +{ + if (frame_number < 0) { + return state->segment.start - + (gint64) gst_util_uint64_scale (-frame_number, + state->fps_d * GST_SECOND, state->fps_n); + } else { + return state->segment.start + + gst_util_uint64_scale (frame_number, + state->fps_d * GST_SECOND, state->fps_n); + } +} diff --git a/gst-libs/gst/video/gstbasevideoutils.h b/gst-libs/gst/video/gstbasevideoutils.h new file mode 100644 index 00000000..b259fc3e --- /dev/null +++ b/gst-libs/gst/video/gstbasevideoutils.h @@ -0,0 +1,90 @@ +/* GStreamer + * Copyright (C) 2008 David Schleef + * + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Library General Public + * License as published by the Free Software Foundation; either + * version 2 of the License, or (at your option) any later version. + * + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * Library General Public License for more details. + * + * You should have received a copy of the GNU Library General Public + * License along with this library; if not, write to the + * Free Software Foundation, Inc., 59 Temple Place - Suite 330, + * Boston, MA 02111-1307, USA. + */ + +#ifndef _GST_BASE_VIDEO_UTILS_H_ +#define _GST_BASE_VIDEO_UTILS_H_ + +#include +#include +#include + +G_BEGIN_DECLS + +typedef struct _GstVideoState GstVideoState; +typedef struct _GstVideoFrame GstVideoFrame; + +struct _GstVideoState +{ + GstVideoFormat format; + int width, height; + int fps_n, fps_d; + int par_n, par_d; + gboolean interlaced; + gboolean top_field_first; + + int clean_width, clean_height; + int clean_offset_left, clean_offset_top; + + int bytes_per_picture; + + GstSegment segment; + + int picture_number; +}; + +struct _GstVideoFrame +{ + guint64 decode_timestamp; + guint64 presentation_timestamp; + guint64 presentation_duration; + + gint system_frame_number; + gint decode_frame_number; + gint presentation_frame_number; + + int distance_from_sync; + gboolean is_sync_point; + gboolean is_eos; + + GstBuffer *sink_buffer; + GstBuffer *src_buffer; + + int field_index; + int n_fields; + + void *coder_hook; +}; + +gboolean gst_base_video_rawvideo_convert (GstVideoState *state, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 *dest_value); +gboolean gst_base_video_encoded_video_convert (GstVideoState *state, + GstFormat src_format, gint64 src_value, + GstFormat * dest_format, gint64 *dest_value); + +gboolean gst_base_video_state_from_caps (GstVideoState *state, + GstCaps *caps); + +GstClockTime gst_video_state_get_timestamp (const GstVideoState *state, + int frame_number); + +G_END_DECLS + +#endif + -- cgit v1.2.1 From 6a763f6a471e9fbbd86358b0175498a6af78666e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Tim-Philipp=20M=C3=BCller?= Date: Fri, 3 Jul 2009 16:08:38 +0100 Subject: basevideo, schroedinger: guard unstable API with GST_USE_UNSTABLE_API Add some guards and fat warnings to the header files with still unstable API, so people who just look at the installed headers know that it actually is unstable API. --- gst-libs/gst/video/Makefile.am | 2 +- gst-libs/gst/video/gstbasevideocodec.h | 5 +++++ gst-libs/gst/video/gstbasevideodecoder.h | 5 +++++ gst-libs/gst/video/gstbasevideoencoder.h | 5 +++++ gst-libs/gst/video/gstbasevideoparse.h | 5 +++++ gst-libs/gst/video/gstbasevideoutils.h | 5 +++++ 6 files changed, 26 insertions(+), 1 deletion(-) (limited to 'gst-libs/gst') diff --git a/gst-libs/gst/video/Makefile.am b/gst-libs/gst/video/Makefile.am index 2b7263b2..6836a058 100644 --- a/gst-libs/gst/video/Makefile.am +++ b/gst-libs/gst/video/Makefile.am @@ -18,7 +18,7 @@ libgstbasevideo_@GST_MAJORMINOR@include_HEADERS = \ gstbasevideoencoder.h \ gstbasevideoparse.h -libgstbasevideo_@GST_MAJORMINOR@_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) +libgstbasevideo_@GST_MAJORMINOR@_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS) -DGST_USE_UNSTABLE_API libgstbasevideo_@GST_MAJORMINOR@_la_LIBADD = $(GST_BASE_LIBS) $(GST_LIBS) libgstbasevideo_@GST_MAJORMINOR@_la_LDFLAGS = $(GST_LIB_LDFLAGS) $(GST_ALL_LDFLAGS) $(GST_LT_LDFLAGS) diff --git a/gst-libs/gst/video/gstbasevideocodec.h b/gst-libs/gst/video/gstbasevideocodec.h index f4a1cf95..b6acf62e 100644 --- a/gst-libs/gst/video/gstbasevideocodec.h +++ b/gst-libs/gst/video/gstbasevideocodec.h @@ -20,6 +20,11 @@ #ifndef _GST_BASE_VIDEO_CODEC_H_ #define _GST_BASE_VIDEO_CODEC_H_ +#ifndef GST_USE_UNSTABLE_API +#warning "GstBaseVideoCodec is unstable API and may change in future." +#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." +#endif + #include #include diff --git a/gst-libs/gst/video/gstbasevideodecoder.h b/gst-libs/gst/video/gstbasevideodecoder.h index 5e734a2a..1daf728c 100644 --- a/gst-libs/gst/video/gstbasevideodecoder.h +++ b/gst-libs/gst/video/gstbasevideodecoder.h @@ -20,6 +20,11 @@ #ifndef _GST_BASE_VIDEO_DECODER_H_ #define _GST_BASE_VIDEO_DECODER_H_ +#ifndef GST_USE_UNSTABLE_API +#warning "GstBaseVideoDecoder is unstable API and may change in future." +#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." +#endif + #include G_BEGIN_DECLS diff --git a/gst-libs/gst/video/gstbasevideoencoder.h b/gst-libs/gst/video/gstbasevideoencoder.h index a3b6e3d7..7b9f23b4 100644 --- a/gst-libs/gst/video/gstbasevideoencoder.h +++ b/gst-libs/gst/video/gstbasevideoencoder.h @@ -20,6 +20,11 @@ #ifndef _GST_BASE_VIDEO_ENCODER_H_ #define _GST_BASE_VIDEO_ENCODER_H_ +#ifndef GST_USE_UNSTABLE_API +#warning "GstBaseVideoEncoder is unstable API and may change in future." +#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." +#endif + #include #include diff --git a/gst-libs/gst/video/gstbasevideoparse.h b/gst-libs/gst/video/gstbasevideoparse.h index ce69fe51..cdbcd27a 100644 --- a/gst-libs/gst/video/gstbasevideoparse.h +++ b/gst-libs/gst/video/gstbasevideoparse.h @@ -20,6 +20,11 @@ #ifndef _GST_BASE_VIDEO_PARSE_H_ #define _GST_BASE_VIDEO_PARSE_H_ +#ifndef GST_USE_UNSTABLE_API +#warning "GstBaseVideoParse is unstable API and may change in future." +#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." +#endif + #include #include diff --git a/gst-libs/gst/video/gstbasevideoutils.h b/gst-libs/gst/video/gstbasevideoutils.h index b259fc3e..e832fe76 100644 --- a/gst-libs/gst/video/gstbasevideoutils.h +++ b/gst-libs/gst/video/gstbasevideoutils.h @@ -20,6 +20,11 @@ #ifndef _GST_BASE_VIDEO_UTILS_H_ #define _GST_BASE_VIDEO_UTILS_H_ +#ifndef GST_USE_UNSTABLE_API +#warning "The base video utils API is unstable and may change in future." +#warning "You can define GST_USE_UNSTABLE_API to avoid this warning." +#endif + #include #include #include -- cgit v1.2.1