summaryrefslogtreecommitdiffstats
path: root/gst
diff options
context:
space:
mode:
Diffstat (limited to 'gst')
-rw-r--r--gst/mpeg2sub/gstmpeg2subt.c1000
-rw-r--r--gst/mpeg2sub/gstmpeg2subt.h47
-rw-r--r--gst/y4m/gsty4mencode.c22
3 files changed, 843 insertions, 226 deletions
diff --git a/gst/mpeg2sub/gstmpeg2subt.c b/gst/mpeg2sub/gstmpeg2subt.c
index dbfb5d5c..d54669db 100644
--- a/gst/mpeg2sub/gstmpeg2subt.c
+++ b/gst/mpeg2sub/gstmpeg2subt.c
@@ -23,30 +23,61 @@
#include "config.h"
#endif
#include <gstmpeg2subt.h>
+#include <string.h>
static void gst_mpeg2subt_class_init (GstMpeg2SubtClass * klass);
static void gst_mpeg2subt_base_init (GstMpeg2SubtClass * klass);
static void gst_mpeg2subt_init (GstMpeg2Subt * mpeg2subt);
+static void gst_mpeg2subt_loop (GstElement * element);
-static void gst_mpeg2subt_chain_video (GstPad * pad, GstData * _data);
-static void gst_mpeg2subt_chain_subtitle (GstPad * pad, GstData * _data);
+static GstCaps *gst_mpeg2subt_getcaps_video (GstPad * pad);
+static GstPadLinkReturn gst_mpeg2subt_link_video (GstPad * pad,
+ const GstCaps * caps);
+static void gst_mpeg2subt_handle_video (GstMpeg2Subt * mpeg2subt,
+ GstData * _data);
+static gboolean gst_mpeg2subt_src_event (GstPad * pad, GstEvent * event);
+static void gst_mpeg2subt_handle_subtitle (GstMpeg2Subt * mpeg2subt,
+ GstData * _data);
static void gst_mpeg2subt_merge_title (GstMpeg2Subt * mpeg2subt,
GstBuffer * buf);
-
+static void gst_mpeg2subt_handle_dvd_event (GstMpeg2Subt * mpeg2subt,
+ GstEvent * event, gboolean from_sub_pad);
+static void gst_mpeg2subt_finalize (GObject * gobject);
static void gst_mpeg2subt_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_mpeg2subt_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
+static void gst_setup_palette (GstMpeg2Subt * mpeg2subt, guchar * indexes,
+ guchar * alpha);
+static void gst_update_still_frame (GstMpeg2Subt * mpeg2subt);
/* elementfactory information */
static GstElementDetails mpeg2subt_details = {
"MPEG2 subtitle Decoder",
"Codec/Decoder/Video",
"Decodes and merges MPEG2 subtitles into a video frame",
- "Wim Taymans <wim.taymans@chello.be>"
+ "Wim Taymans <wim.taymans@chello.be>\n"
+ "Jan Schmidt <thaytan@mad.scientist.com>"
};
+static GstStaticPadTemplate video_template = GST_STATIC_PAD_TEMPLATE ("video",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-raw-yuv, " "format = (fourcc) { I420 }, " /* YV12 later */
+ "width = (int) [ 16, 4096 ], " "height = (int) [ 16, 4096 ]")
+ );
+
+static GstStaticPadTemplate subtitle_template =
+GST_STATIC_PAD_TEMPLATE ("subtitle",
+ GST_PAD_SINK,
+ GST_PAD_ALWAYS,
+ GST_STATIC_CAPS ("video/x-dvd-subpicture")
+ );
+
+GST_DEBUG_CATEGORY_STATIC (mpeg2subt_debug);
+#define GST_CAT_DEFAULT (mpeg2subt_debug)
+
/* GstMpeg2Subt signals and args */
enum
{
@@ -61,27 +92,35 @@ enum
/* FILL ME */
};
-static guchar yuv_color[16] = {
- 0x99,
- 0x00,
- 0xFF,
- 0x00,
- 0x40,
- 0x50,
- 0x60,
- 0x70,
- 0x80,
- 0x90,
- 0xA0,
- 0xB0,
- 0xC0,
- 0xD0,
- 0xE0,
- 0xF0
+enum
+{
+ SPU_FORCE_DISPLAY = 0x00,
+ SPU_SHOW = 0x01,
+ SPU_HIDE = 0x02,
+ SPU_SET_PALETTE = 0x03,
+ SPU_SET_ALPHA = 0x04,
+ SPU_SET_SIZE = 0x05,
+ SPU_SET_OFFSETS = 0x06,
+ SPU_WIPE = 0x07,
+ SPU_END = 0xff
};
+typedef struct RLE_state
+{
+ gint id;
+ gint aligned;
+ gint offset[2];
+ gint clip_left;
+ gint clip_right;
+ guchar *target_Y;
+ guchar *target_U;
+ guchar *target_V;
+ guchar *target_A;
+ guchar next;
+}
+RLE_state;
static GstElementClass *parent_class = NULL;
@@ -108,7 +147,11 @@ gst_mpeg2subt_get_type (void)
mpeg2subt_type =
g_type_register_static (GST_TYPE_ELEMENT, "GstMpeg2Subt",
&mpeg2subt_info, 0);
+
+ GST_DEBUG_CATEGORY_INIT (mpeg2subt_debug, "mpeg2subt", 0,
+ "MPEG2 subtitle overlay element");
}
+
return mpeg2subt_type;
}
@@ -117,6 +160,11 @@ gst_mpeg2subt_base_init (GstMpeg2SubtClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&video_template));
+ gst_element_class_add_pad_template (element_class,
+ gst_static_pad_template_get (&subtitle_template));
+
gst_element_class_set_details (element_class, &mpeg2subt_details);
}
@@ -135,273 +183,811 @@ gst_mpeg2subt_class_init (GstMpeg2SubtClass * klass)
gobject_class->set_property = gst_mpeg2subt_set_property;
gobject_class->get_property = gst_mpeg2subt_get_property;
-
+ gobject_class->finalize = gst_mpeg2subt_finalize;
}
static void
gst_mpeg2subt_init (GstMpeg2Subt * mpeg2subt)
{
- mpeg2subt->videopad = gst_pad_new ("video", GST_PAD_SINK);
+ mpeg2subt->videopad =
+ gst_pad_new_from_template (gst_static_pad_template_get
+ (&video_template), "video");
gst_element_add_pad (GST_ELEMENT (mpeg2subt), mpeg2subt->videopad);
- gst_pad_set_chain_function (mpeg2subt->videopad, gst_mpeg2subt_chain_video);
-
- mpeg2subt->subtitlepad = gst_pad_new ("subtitle", GST_PAD_SINK);
+ gst_pad_set_link_function (mpeg2subt->videopad,
+ GST_DEBUG_FUNCPTR (gst_mpeg2subt_link_video));
+ gst_pad_set_getcaps_function (mpeg2subt->videopad,
+ GST_DEBUG_FUNCPTR (gst_mpeg2subt_getcaps_video));
+
+ mpeg2subt->subtitlepad =
+ gst_pad_new_from_template (gst_static_pad_template_get
+ (&subtitle_template), "subtitle");
gst_element_add_pad (GST_ELEMENT (mpeg2subt), mpeg2subt->subtitlepad);
- gst_pad_set_chain_function (mpeg2subt->subtitlepad,
- gst_mpeg2subt_chain_subtitle);
mpeg2subt->srcpad = gst_pad_new ("src", GST_PAD_SRC);
gst_element_add_pad (GST_ELEMENT (mpeg2subt), mpeg2subt->srcpad);
+ gst_pad_set_getcaps_function (mpeg2subt->srcpad,
+ GST_DEBUG_FUNCPTR (gst_mpeg2subt_getcaps_video));
+ gst_pad_set_link_function (mpeg2subt->srcpad,
+ GST_DEBUG_FUNCPTR (gst_mpeg2subt_link_video));
+ gst_pad_set_event_function (mpeg2subt->srcpad,
+ GST_DEBUG_FUNCPTR (gst_mpeg2subt_src_event));
+
+ gst_element_set_loop_function (GST_ELEMENT (mpeg2subt), gst_mpeg2subt_loop);
+ GST_FLAG_SET (GST_ELEMENT (mpeg2subt), GST_ELEMENT_EVENT_AWARE);
mpeg2subt->partialbuf = NULL;
+ mpeg2subt->hold_frame = NULL;
+ mpeg2subt->still_frame = NULL;
mpeg2subt->have_title = FALSE;
+ mpeg2subt->start_display_time = GST_CLOCK_TIME_NONE;
+ mpeg2subt->end_display_time = GST_CLOCK_TIME_NONE;
+ mpeg2subt->forced_display = FALSE;
+ memset (mpeg2subt->current_clut, 0, 16 * sizeof (guint32));
+ memset (mpeg2subt->subtitle_index, 0, sizeof (mpeg2subt->subtitle_index));
+ memset (mpeg2subt->menu_index, 0, sizeof (mpeg2subt->menu_index));
+ memset (mpeg2subt->subtitle_alpha, 0, sizeof (mpeg2subt->subtitle_alpha));
+ memset (mpeg2subt->menu_alpha, 0, sizeof (mpeg2subt->menu_alpha));
+ memset (mpeg2subt->out_buffers, 0, sizeof (mpeg2subt->out_buffers));
+ mpeg2subt->pending_video_buffer = NULL;
+ mpeg2subt->next_video_time = GST_CLOCK_TIME_NONE;
+ mpeg2subt->pending_subtitle_buffer = NULL;
+ mpeg2subt->next_subtitle_time = GST_CLOCK_TIME_NONE;
}
static void
-gst_mpeg2subt_chain_video (GstPad * pad, GstData * _data)
+gst_mpeg2subt_finalize (GObject * gobject)
+{
+ GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gobject);
+ gint i;
+
+ for (i = 0; i < 3; i++) {
+ if (mpeg2subt->out_buffers[i])
+ g_free (mpeg2subt->out_buffers[i]);
+ }
+
+ if (mpeg2subt->partialbuf)
+ gst_buffer_unref (mpeg2subt->partialbuf);
+}
+
+static GstCaps *
+gst_mpeg2subt_getcaps_video (GstPad * pad)
+{
+ GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gst_pad_get_parent (pad));
+ GstPad *otherpad;
+
+ otherpad =
+ (pad == mpeg2subt->srcpad) ? mpeg2subt->videopad : mpeg2subt->srcpad;
+
+ return gst_pad_get_allowed_caps (otherpad);
+}
+
+static GstPadLinkReturn
+gst_mpeg2subt_link_video (GstPad * pad, const GstCaps * caps)
{
- GstBuffer *buf = GST_BUFFER (_data);
- GstMpeg2Subt *mpeg2subt;
- guchar *data;
- glong size;
+ GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gst_pad_get_parent (pad));
+ GstPad *otherpad;
+ GstPadLinkReturn ret;
+ GstStructure *structure;
+ gint width, height;
+ gint i;
+
+ otherpad =
+ (pad == mpeg2subt->srcpad) ? mpeg2subt->videopad : mpeg2subt->srcpad;
+
+ ret = gst_pad_try_set_caps (otherpad, caps);
+ if (GST_PAD_LINK_FAILED (ret)) {
+ return ret;
+ }
- g_return_if_fail (pad != NULL);
- g_return_if_fail (GST_IS_PAD (pad));
- g_return_if_fail (buf != NULL);
+ structure = gst_caps_get_structure (caps, 0);
- mpeg2subt = GST_MPEG2SUBT (GST_OBJECT_PARENT (pad));
+ if (!gst_structure_get_int (structure, "width", &width) ||
+ !gst_structure_get_int (structure, "height", &height)) {
+ return GST_PAD_LINK_REFUSED;
+ }
- data = GST_BUFFER_DATA (buf);
- size = GST_BUFFER_SIZE (buf);
+ mpeg2subt->in_width = width;
+ mpeg2subt->in_height = height;
- if (mpeg2subt->have_title && mpeg2subt->duration != 0) {
- gst_mpeg2subt_merge_title (mpeg2subt, buf);
- mpeg2subt->duration--;
+ /* Allocate compositing buffers */
+ for (i = 0; i < 3; i++) {
+ if (mpeg2subt->out_buffers[i])
+ g_free (mpeg2subt->out_buffers[i]);
+ mpeg2subt->out_buffers[i] = g_malloc (sizeof (guint16) * width);
}
- gst_pad_push (mpeg2subt->srcpad, GST_DATA (buf));
+ return GST_PAD_LINK_OK;
}
+static void
+gst_mpeg2subt_handle_video (GstMpeg2Subt * mpeg2subt, GstData * _data)
+{
+ if (GST_IS_BUFFER (_data)) {
+ GstBuffer *buf = GST_BUFFER (_data);
+ guchar *data;
+ glong size;
+
+ data = GST_BUFFER_DATA (buf);
+ size = GST_BUFFER_SIZE (buf);
+
+ if (mpeg2subt->still_frame) {
+ gst_buffer_unref (mpeg2subt->still_frame);
+ mpeg2subt->still_frame = NULL;
+ }
+
+ if (!mpeg2subt->hold_frame) {
+ mpeg2subt->hold_frame = buf;
+ } else {
+ GstBuffer *out_buf;
+
+ out_buf = mpeg2subt->hold_frame;
+ mpeg2subt->hold_frame = buf;
+
+ if (mpeg2subt->have_title) {
+ if ((mpeg2subt->forced_display && (mpeg2subt->current_button != 0))
+ ||
+ ((mpeg2subt->start_display_time <= GST_BUFFER_TIMESTAMP (out_buf))
+ && (mpeg2subt->end_display_time >=
+ GST_BUFFER_TIMESTAMP (out_buf)))) {
+ out_buf = gst_buffer_copy_on_write (out_buf);
+ gst_mpeg2subt_merge_title (mpeg2subt, out_buf);
+ }
+ }
+
+ gst_pad_push (mpeg2subt->srcpad, GST_DATA (out_buf));
+ }
+ } else if (GST_IS_EVENT (_data)) {
+ switch (GST_EVENT_TYPE (GST_EVENT (_data))) {
+ case GST_EVENT_ANY:
+ gst_mpeg2subt_handle_dvd_event (mpeg2subt, GST_EVENT (_data), FALSE);
+ gst_data_unref (_data);
+ break;
+ case GST_EVENT_DISCONTINUOUS:
+ /* Turn off forced highlight display */
+ mpeg2subt->forced_display = 0;
+ if (mpeg2subt->still_frame) {
+ gst_buffer_unref (mpeg2subt->still_frame);
+ mpeg2subt->still_frame = NULL;
+ }
+ if (mpeg2subt->hold_frame) {
+ gst_buffer_unref (mpeg2subt->hold_frame);
+ mpeg2subt->hold_frame = NULL;
+ }
+ gst_pad_push (mpeg2subt->srcpad, _data);
+ break;
+ default:
+ gst_pad_push (mpeg2subt->srcpad, _data);
+ break;
+ }
+ } else
+ gst_data_unref (_data);
+}
+
+static gboolean
+gst_mpeg2subt_src_event (GstPad * pad, GstEvent * event)
+{
+ GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gst_pad_get_parent (pad));
+
+ return gst_pad_send_event (GST_PAD_PEER (mpeg2subt->videopad), event);
+}
static void
gst_mpeg2subt_parse_header (GstMpeg2Subt * mpeg2subt)
{
- guchar *buffer = GST_BUFFER_DATA (mpeg2subt->partialbuf);
- guchar dummy;
- guint i;
-
- i = mpeg2subt->data_size + 4;
- while (i < mpeg2subt->packet_size) {
- dummy = buffer[i];
- switch (dummy) {
- case 0x01: /* null packet ? */
- i++;
+#define PARSE_BYTES_NEEDED(x) if ((buf+(x)) >= end) \
+ { GST_WARNING("Subtitle stream broken parsing %d", *buf); \
+ broken = TRUE; break; }
+
+ guchar *buf;
+ guchar *start = GST_BUFFER_DATA (mpeg2subt->partialbuf);
+ guchar *end;
+ gboolean broken = FALSE;
+ gboolean last_seq = FALSE;
+ guchar *next_seq = NULL;
+ guint event_time;
+
+ mpeg2subt->forced_display = FALSE;
+ g_return_if_fail (mpeg2subt->packet_size >= 4);
+
+ buf = start + mpeg2subt->data_size;
+ end = buf + mpeg2subt->packet_size;
+ event_time = GUINT16_FROM_BE (*(guint16 *) (buf));
+ next_seq = start + GUINT16_FROM_BE (*(guint16 *) (buf + 2));
+ /* If the next control sequence is at the current offset, this is
+ * the last one */
+ last_seq = (next_seq == buf);
+ buf += 4;
+
+ while ((buf < end) && (!broken)) {
+ switch (*buf) {
+ case SPU_FORCE_DISPLAY: /* Forced display menu subtitle */
+ mpeg2subt->forced_display = TRUE;
+ buf++;
+ break;
+ case SPU_SHOW: /* Show the subtitle in this packet */
+ mpeg2subt->start_display_time =
+ GST_BUFFER_TIMESTAMP (mpeg2subt->partialbuf) +
+ ((GST_SECOND * event_time) / 90);
+ GST_DEBUG ("Subtitle starts at %" G_GUINT64_FORMAT,
+ mpeg2subt->end_display_time);
+ buf++;
break;
- case 0x02: /* 02 ff (ff) is the end of the packet */
- i = mpeg2subt->packet_size;
+ case SPU_HIDE: /* 02 ff (ff) is the end of the packet, hide the */
+ mpeg2subt->end_display_time =
+ GST_BUFFER_TIMESTAMP (mpeg2subt->partialbuf) +
+ ((GST_SECOND * event_time) / 90);
+ GST_DEBUG ("Subtitle ends at %" G_GUINT64_FORMAT,
+ mpeg2subt->end_display_time);
+ buf++;
break;
- case 0x03: /* palette */
- mpeg2subt->color[0] = yuv_color[buffer[i + 1] >> 4];
- mpeg2subt->color[1] = yuv_color[buffer[i + 1] & 0xf];
- mpeg2subt->color[2] = yuv_color[buffer[i + 2] >> 4];
- mpeg2subt->color[3] = yuv_color[buffer[i + 2] & 0xf];
- mpeg2subt->color[4] = yuv_color[0xf];
- GST_DEBUG ("mpeg2subt: colors %d %d %d %d", mpeg2subt->color[0],
- mpeg2subt->color[1], mpeg2subt->color[2], mpeg2subt->color[3]);
- i += 3;
+ case SPU_SET_PALETTE: /* palette */
+ PARSE_BYTES_NEEDED (3);
+
+ mpeg2subt->subtitle_index[3] = buf[1] >> 4;
+ mpeg2subt->subtitle_index[2] = buf[1] & 0xf;
+ mpeg2subt->subtitle_index[1] = buf[2] >> 4;
+ mpeg2subt->subtitle_index[0] = buf[2] & 0xf;
+ buf += 3;
break;
- case 0x04: /* transparency palette */
- mpeg2subt->trans[3] = buffer[i + 1] >> 4;
- mpeg2subt->trans[2] = buffer[i + 1] & 0xf;
- mpeg2subt->trans[1] = buffer[i + 2] >> 4;
- mpeg2subt->trans[0] = buffer[i + 2] & 0xf;
- GST_DEBUG ("mpeg2subt: transparency %d %d %d %d", mpeg2subt->trans[0],
- mpeg2subt->trans[1], mpeg2subt->trans[2], mpeg2subt->trans[3]);
- i += 3;
+ case SPU_SET_ALPHA: /* transparency palette */
+ PARSE_BYTES_NEEDED (3);
+
+ mpeg2subt->subtitle_alpha[3] = buf[1] >> 4;
+ mpeg2subt->subtitle_alpha[2] = buf[1] & 0xf;
+ mpeg2subt->subtitle_alpha[1] = buf[2] >> 4;
+ mpeg2subt->subtitle_alpha[0] = buf[2] & 0xf;
+ buf += 3;
break;
- case 0x05: /* image coordinates */
- mpeg2subt->width = 1 + (((buffer[i + 2] & 0x0f) << 8) + buffer[i + 3])
- - ((((unsigned int) buffer[i + 1]) << 4) + (buffer[i + 2] >> 4));
- mpeg2subt->height = 1 + (((buffer[i + 5] & 0x0f) << 8) + buffer[i + 6])
- - ((((unsigned int) buffer[i + 4]) << 4) + (buffer[i + 5] >> 4));
- i += 7;
+ case SPU_SET_SIZE: /* image coordinates */
+ PARSE_BYTES_NEEDED (7);
+
+ mpeg2subt->left =
+ CLAMP ((((unsigned int) buf[1]) << 4) | (buf[2] >> 4), 0,
+ (mpeg2subt->in_width - 1));
+ mpeg2subt->top =
+ CLAMP ((((unsigned int) buf[4]) << 4) | (buf[5] >> 4), 0,
+ (mpeg2subt->in_height - 1));
+ mpeg2subt->right =
+ CLAMP ((((buf[2] & 0x0f) << 8) | buf[3]), 0,
+ (mpeg2subt->in_width - 1));
+ mpeg2subt->bottom =
+ CLAMP ((((buf[5] & 0x0f) << 8) | buf[6]), 0,
+ (mpeg2subt->in_height - 1));
+
+ GST_DEBUG ("left %d, top %d, right %d, bottom %d", mpeg2subt->left,
+ mpeg2subt->top, mpeg2subt->right, mpeg2subt->bottom);
+ buf += 7;
break;
- case 0x06: /* image 1 / image 2 offsets */
- mpeg2subt->offset[0] =
- (((unsigned int) buffer[i + 1]) << 8) + buffer[i + 2];
- mpeg2subt->offset[1] =
- (((unsigned int) buffer[i + 3]) << 8) + buffer[i + 4];
- i += 5;
+ case SPU_SET_OFFSETS: /* image 1 / image 2 offsets */
+ PARSE_BYTES_NEEDED (5);
+ mpeg2subt->offset[0] = (((unsigned int) buf[1]) << 8) | buf[2];
+ mpeg2subt->offset[1] = (((unsigned int) buf[3]) << 8) | buf[4];
+ GST_DEBUG ("Offset1 %d, Offset2 %d", mpeg2subt->offset[0],
+ mpeg2subt->offset[1]);
+ buf += 5;
break;
- case 0xff: /* "ff xx yy zz uu" with 'zz uu' == start of control packet
- * xx and yy are the end time in 90th/sec
- */
- mpeg2subt->duration =
- (((buffer[i + 1] << 8) + buffer[i + 2]) * 25) / 90;
-
- GST_DEBUG ("duration %d", mpeg2subt->duration);
-
- if ((buffer[i + 3] != buffer[mpeg2subt->data_size + 2])
- || (buffer[i + 4] != buffer[mpeg2subt->data_size + 3])) {
- g_print
- ("mpeg2subt: invalid control header (%.2x%.2x != %.2x%.2x) !\n",
- buffer[i + 3], buffer[i + 4], buffer[mpeg2subt->data_size + 2],
- buffer[mpeg2subt->data_size + 3]);
-/* FIXME */
-/* exit(1); */
+ case SPU_WIPE:
+ {
+ guint length;
+
+ GST_WARNING ("SPU_WIPE not yet implemented");
+ PARSE_BYTES_NEEDED (3);
+
+ length = (buf[1] << 8) | (buf[2]);
+ buf += 1 + length;
+ }
+ break;
+ case SPU_END:
+ buf = (last_seq) ? end : next_seq;
+
+ /* Start a new control sequence */
+ if (buf + 4 < end) {
+ event_time = GUINT16_FROM_BE (*(guint16 *) (buf));
+ next_seq = start + GUINT16_FROM_BE (*(guint16 *) (buf + 2));
+ last_seq = (next_seq == buf);
}
- i += 5;
+ buf += 4;
break;
default:
- g_print ("mpeg2subt: invalid sequence in control header (%.2x) !\n",
- dummy);
+ GST_ERROR
+ ("Invalid sequence in subtitle packet header (%.2x). Skipping",
+ *buf);
+ broken = TRUE;
break;
}
}
+
+ if (!mpeg2subt->forced_display)
+ gst_setup_palette (mpeg2subt, mpeg2subt->subtitle_index,
+ mpeg2subt->subtitle_alpha);
}
-static int
-get_nibble (guchar * buffer, gint * offset, gint id, gint * aligned)
+inline int
+gst_get_nibble (guchar * buffer, RLE_state * state)
{
- static int next;
-
- if (*aligned) {
- next = buffer[offset[id]];
- offset[id]++;
-
- *aligned = 0;
- return next >> 4;
+ if (state->aligned) {
+ state->next = buffer[state->offset[state->id]++];
+ state->aligned = 0;
+ return state->next >> 4;
} else {
- *aligned = 1;
- return next & 0xf;
+ state->aligned = 1;
+ return state->next & 0xf;
}
}
+/* Premultiply the current lookup table into the palette_cache */
static void
-gst_mpeg2subt_merge_title (GstMpeg2Subt * mpeg2subt, GstBuffer * buf)
+gst_setup_palette (GstMpeg2Subt * mpeg2subt, guchar * indexes, guchar * alpha)
{
- gint x = 0, y = 0;
- gint width = mpeg2subt->width;
- gint height = mpeg2subt->height;
- guchar *buffer = GST_BUFFER_DATA (mpeg2subt->partialbuf);
- guchar *target = GST_BUFFER_DATA (buf);
- gint id = 0, aligned = 1;
- gint offset[2];
+ gint i;
+ YUVA_val *target = mpeg2subt->palette_cache;
- offset[0] = mpeg2subt->offset[0];
- offset[1] = mpeg2subt->offset[1];
-#define get_nibble() get_nibble (buffer, offset, id, &aligned)
+ for (i = 0; i < 4; i++, target++) {
+ guint32 col = mpeg2subt->current_clut[indexes[i]];
- GST_DEBUG ("mpeg2subt: merging subtitle");
+ target->Y = (guint16) ((col >> 16) & 0xff) * alpha[i];
+ target->U = (guint16) ((col >> 8) & 0xff) * alpha[i];
+ target->V = (guint16) (col & 0xff) * alpha[i];
+ target->A = alpha[i];
+ }
+}
- while ((offset[1] < mpeg2subt->data_size + 2) && (y < height)) {
- gint code;
- gint length, colorid;
+inline guint
+gst_get_rle_code (guchar * buffer, RLE_state * state)
+{
+ gint code;
+
+ code = gst_get_nibble (buffer, state);
+ if (code < 0x4) { /* 4 .. f */
+ code = (code << 4) | gst_get_nibble (buffer, state);
+ if (code < 0x10) { /* 1x .. 3x */
+ code = (code << 4) | gst_get_nibble (buffer, state);
+ if (code < 0x40) { /* 04x .. 0fx */
+ code = (code << 4) | gst_get_nibble (buffer, state);
+ }
+ }
+ }
+ return code;
+}
- code = get_nibble ();
- if (code >= 0x4) { /* 4 .. f */
- found_code:
- length = code >> 2;
- colorid = code & 3;
- while (length--)
- if (x++ < width) {
- if (mpeg2subt->trans[colorid] != 0x0) {
- *target++ = mpeg2subt->color[colorid];
- } else
- target++;
- }
+/*
+ * This function steps over each run-length segment, drawing
+ * into the YUVA buffers as it goes. UV are composited and then output
+ * at half width/height
+ */
+static void
+gst_draw_rle_line (GstMpeg2Subt * mpeg2subt, guchar * buffer, RLE_state * state)
+{
+ gint length, colourid;
+ gint right = mpeg2subt->right + 1;
+ YUVA_val *colour_entry;
+ guint code;
+ gint x;
+ gboolean in_clip = FALSE;
+ guchar *target_Y;
+ guint16 *target_U;
+ guint16 *target_V;
+ guint16 *target_A;
+
+ target_Y = state->target_Y;
+ target_U = mpeg2subt->out_buffers[0];
+ target_V = mpeg2subt->out_buffers[1];
+ target_A = mpeg2subt->out_buffers[2];
+ x = mpeg2subt->left;
+ while (x < right) {
+ code = gst_get_rle_code (buffer, state);
+ length = code >> 2;
+ colourid = code & 3;
+ colour_entry = mpeg2subt->palette_cache + colourid;
+
+ /* Length = 0 implies fill to the end of the line */
+ if (length == 0)
+ length = right - x;
+ else {
+ /* Restrict the colour run to the end of the line */
+ length = length < (right - x) ? length : (right - x);
+ }
- if (x >= width) {
- if (!aligned)
- get_nibble ();
- goto next_line;
+ /* Check if this run of colour crosses into the clip region */
+ in_clip = (((x + length) >= state->clip_left) && (x <= state->clip_right));
+
+ /* Draw YA onto the frame via target_Y, UVA into the composite buffers */
+ if ((in_clip) && (colour_entry->A)) {
+ guint16 inv_alpha = 0xf - colour_entry->A;
+ gint i;
+
+ for (i = 0; i < length; i++) {
+ *target_Y = ((inv_alpha * (*target_Y)) + colour_entry->Y) / 0xf;
+ *target_U += colour_entry->U;
+ *target_V += colour_entry->V;
+ *target_A += colour_entry->A;
+ target_Y++;
+ target_U++;
+ target_V++;
+ target_A++;
}
- continue;
+ } else {
+ target_Y += length;
+ target_U += length;
+ target_V += length;
+ target_A += length;
}
+ x += length;
+ }
+}
+
+inline void
+gst_merge_uv_data (GstMpeg2Subt * mpeg2subt, guchar * buffer, RLE_state * state)
+{
+ gint x;
+ guchar *target_V;
+ guchar *target_U;
+ gint width = mpeg2subt->right - mpeg2subt->left + 1;
+
+ guint16 *comp_U;
+ guint16 *comp_V;
+ guint16 *comp_A;
+
+ /* The compositing buffers should contain the results of accumulating 2 scanlines of
+ * U, V (premultiplied) and A data. Merge them back into their output buffers at
+ * half width/height.
+ */
+ target_U = state->target_U;
+ target_V = state->target_V;
+ comp_U = mpeg2subt->out_buffers[0];
+ comp_V = mpeg2subt->out_buffers[1];
+ comp_A = mpeg2subt->out_buffers[2];
+
+ for (x = 0; x < width; x += 2) {
+ guint16 temp1, temp2;
+
+ /* Average out the alpha accumulated to compute transparency */
+ guint16 alpha = (comp_A[0] + comp_A[1]);
+
+ if (alpha > 0) {
+ temp1 = (*target_U) * ((4 * 0xf) - alpha) + comp_U[0] + comp_U[1];
+ temp2 = (*target_V) * ((4 * 0xf) - alpha) + comp_V[0] + comp_V[1];
+ *target_U = temp1 / (4 * 0xf);
+ *target_V = temp2 / (4 * 0xf);
+ };
+ comp_U += 2;
+ comp_V += 2;
+ comp_A += 2;
+ target_U++;
+ target_V++;
+ }
+}
+
+/*
+ * Decode the RLE subtitle image and blend with the current
+ * frame buffer.
+ */
+static void
+gst_mpeg2subt_merge_title (GstMpeg2Subt * mpeg2subt, GstBuffer * buf)
+{
+ gint y;
+ gint width = mpeg2subt->right - mpeg2subt->left + 1;
+ gint Y_stride;
+ gint UV_stride;
- code = (code << 4) + get_nibble ();
- if (code >= 0x10) /* 1x .. 3x */
- goto found_code;
+ guchar *buffer = GST_BUFFER_DATA (mpeg2subt->partialbuf);
+ gint last_y;
+ gint first_y;
+ RLE_state state;
+
+ /* Set up the initial offsets, remembering the half-res size for UV in I420 packing
+ * see http://www.fourcc.org for details
+ */
+ Y_stride = mpeg2subt->in_width;
+ UV_stride = (mpeg2subt->in_width + 1) / 2;
+
+ GST_DEBUG ("Merging subtitle on frame at time %" G_GUINT64_FORMAT
+ " using %s colour table", GST_BUFFER_TIMESTAMP (buf),
+ mpeg2subt->forced_display ? "menu" : "subtitle");
+
+ state.id = 0;
+ state.aligned = 1;
+ state.offset[0] = mpeg2subt->offset[0];
+ state.offset[1] = mpeg2subt->offset[1];
+
+ /* skip over lines until we hit the clip region */
+ if (mpeg2subt->forced_display) {
+ state.clip_right = mpeg2subt->clip_right;
+ state.clip_left = mpeg2subt->clip_left;
+ last_y = mpeg2subt->clip_bottom;
+ first_y = mpeg2subt->clip_top;
+ } else {
+ state.clip_right = mpeg2subt->right;
+ state.clip_left = mpeg2subt->left;
+ last_y = mpeg2subt->bottom;
+ first_y = mpeg2subt->top;
+ }
- code = (code << 4) + get_nibble ();
- if (code >= 0x40) /* 04x .. 0fx */
- goto found_code;
+ for (y = mpeg2subt->top; y < first_y; y++) {
+ /* Skip a line of RLE data */
+ gint length;
+ guint code;
+ gint x = 0;
- code = (code << 4) + get_nibble ();
- if (code >= 0x100) /* 01xx .. 03xx */
- goto found_code;
+ while (x < width) {
+ code = gst_get_rle_code (buffer, &state);
+ length = code >> 2;
- /* 00xx - should only happen for 00 00 */
- if (!aligned)
- code = (code << 4) + get_nibble (); /* 0 0x xx */
+ /* Length = 0 implies fill to the end of the line so we're done */
+ if (length == 0)
+ break;
- if (code) {
- g_print
- ("mpeg2subt: got unknown code 00%x (offset %x side %x, x=%d, y=%d)\n",
- code, mpeg2subt->offset[id], id, x, y);
- goto next_line;
+ x += length;
}
- next_line:
- /* aligned 00 00 */
- if (y < height) {
- target += (width - x);
- x = 0;
- y++;
- id = 1 - id;
+ if (!state.aligned)
+ gst_get_nibble (buffer, &state);
+ state.id = !state.id;
+ }
+
+ state.target_Y = GST_BUFFER_DATA (buf) + mpeg2subt->left + (y * Y_stride);
+ state.target_V = GST_BUFFER_DATA (buf) + (Y_stride * mpeg2subt->in_height)
+ + ((mpeg2subt->left) / 2) + ((y / 2) * UV_stride);
+ state.target_U =
+ state.target_V + UV_stride * ((mpeg2subt->in_height + 1) / 2);
+
+ memset (mpeg2subt->out_buffers[0], 0, sizeof (guint16) * Y_stride);
+ memset (mpeg2subt->out_buffers[1], 0, sizeof (guint16) * Y_stride);
+ memset (mpeg2subt->out_buffers[2], 0, sizeof (guint16) * Y_stride);
+
+ /* Now draw scanlines until we hit last_y or end of RLE data */
+ for (; ((state.offset[1] < mpeg2subt->data_size + 2) && (y <= last_y)); y++) {
+ gst_draw_rle_line (mpeg2subt, buffer, &state);
+ if (state.id) {
+ gst_merge_uv_data (mpeg2subt, buffer, &state);
+
+ /* Clear the compositing buffers */
+ memset (mpeg2subt->out_buffers[0], 0, sizeof (guint16) * Y_stride);
+ memset (mpeg2subt->out_buffers[1], 0, sizeof (guint16) * Y_stride);
+ memset (mpeg2subt->out_buffers[2], 0, sizeof (guint16) * Y_stride);
+
+ state.target_U += UV_stride;
+ state.target_V += UV_stride;
}
+ state.target_Y += Y_stride;
+
+ /* Realign the RLE state for the next line */
+ if (!state.aligned)
+ gst_get_nibble (buffer, &state);
+ state.id = !state.id;
}
}
static void
-gst_mpeg2subt_chain_subtitle (GstPad * pad, GstData * _data)
+gst_update_still_frame (GstMpeg2Subt * mpeg2subt)
{
- GstBuffer *buf = GST_BUFFER (_data);
- GstMpeg2Subt *mpeg2subt;
- guchar *data;
- glong size = 0;
+ GstBuffer *out_buf;
+
+ if ((mpeg2subt->still_frame) &&
+ (mpeg2subt->have_title) &&
+ ((mpeg2subt->forced_display && (mpeg2subt->current_button != 0)))) {
+ gst_buffer_ref (mpeg2subt->still_frame);
+ out_buf = gst_buffer_copy_on_write (mpeg2subt->still_frame);
+ gst_mpeg2subt_merge_title (mpeg2subt, out_buf);
+ gst_pad_push (mpeg2subt->srcpad, GST_DATA (out_buf));
+ }
+}
- g_return_if_fail (pad != NULL);
- g_return_if_fail (GST_IS_PAD (pad));
- g_return_if_fail (buf != NULL);
-/* g_return_if_fail(GST_IS_BUFFER(buf)); */
+static void
+gst_mpeg2subt_handle_subtitle (GstMpeg2Subt * mpeg2subt, GstData * _data)
+{
+ g_return_if_fail (_data != NULL);
- mpeg2subt = GST_MPEG2SUBT (GST_OBJECT_PARENT (pad));
+ if (GST_IS_BUFFER (_data)) {
+ GstBuffer *buf = GST_BUFFER (_data);
+ guchar *data;
+ glong size = 0;
- if (mpeg2subt->have_title) {
- gst_buffer_unref (mpeg2subt->partialbuf);
- mpeg2subt->partialbuf = NULL;
- mpeg2subt->have_title = FALSE;
- }
+ if (mpeg2subt->have_title) {
+ gst_buffer_unref (mpeg2subt->partialbuf);
+ mpeg2subt->partialbuf = NULL;
+ mpeg2subt->have_title = FALSE;
+ }
- GST_DEBUG ("presentation time %" G_GUINT64_FORMAT,
- GST_BUFFER_TIMESTAMP (buf));
+ GST_DEBUG ("Got subtitle buffer, pts %" G_GUINT64_FORMAT,
+ GST_BUFFER_TIMESTAMP (buf));
- /* deal with partial frame from previous buffer */
- if (mpeg2subt->partialbuf) {
- GstBuffer *merge;
+ /* deal with partial frame from previous buffer */
+ if (mpeg2subt->partialbuf) {
+ GstBuffer *merge;
- merge = gst_buffer_merge (mpeg2subt->partialbuf, buf);
- gst_buffer_unref (mpeg2subt->partialbuf);
- gst_buffer_unref (buf);
- mpeg2subt->partialbuf = merge;
- } else {
- mpeg2subt->partialbuf = buf;
- }
+ merge = gst_buffer_merge (mpeg2subt->partialbuf, buf);
+ gst_buffer_unref (mpeg2subt->partialbuf);
+ gst_buffer_unref (buf);
+ mpeg2subt->partialbuf = merge;
+ } else {
+ mpeg2subt->partialbuf = buf;
+ }
+
+ data = GST_BUFFER_DATA (mpeg2subt->partialbuf);
+ size = GST_BUFFER_SIZE (mpeg2subt->partialbuf);
- data = GST_BUFFER_DATA (mpeg2subt->partialbuf);
- size = GST_BUFFER_SIZE (mpeg2subt->partialbuf);
+ if (size > 4) {
+ mpeg2subt->packet_size = GUINT16_FROM_BE (*(guint16 *) data);
- mpeg2subt->packet_size = GUINT16_FROM_BE (*(guint16 *) data);
+ if (mpeg2subt->packet_size == size) {
+ GST_LOG ("Subtitle packet size %d, current size %ld",
+ mpeg2subt->packet_size, size);
- if (mpeg2subt->packet_size == size) {
+ mpeg2subt->data_size = GUINT16_FROM_BE (*(guint16 *) (data + 2));
+ mpeg2subt->have_title = TRUE;
- GST_DEBUG ("mpeg2subt: subtitle packet size %d, current size %ld",
- mpeg2subt->packet_size, size);
+ gst_mpeg2subt_parse_header (mpeg2subt);
+ }
+ }
+ } else if (GST_IS_EVENT (_data)) {
+ switch (GST_EVENT_TYPE (GST_EVENT (_data))) {
+ case GST_EVENT_ANY:
+ GST_LOG ("DVD event on subtitle pad with timestamp %llu",
+ GST_EVENT_TIMESTAMP (GST_EVENT (_data)));
+ gst_mpeg2subt_handle_dvd_event (mpeg2subt, GST_EVENT (_data), TRUE);
+ break;
+ case GST_EVENT_EMPTY:
+ if (GST_CLOCK_TIME_IS_VALID (mpeg2subt->next_video_time) &&
+ (mpeg2subt->next_video_time > 0)) {
+ mpeg2subt->next_subtitle_time = mpeg2subt->next_video_time + 1;
+ GST_LOG ("Forwarding subtitle time to %llu",
+ mpeg2subt->next_subtitle_time);
+ }
+ gst_update_still_frame (mpeg2subt);
+ break;
+ default:
+ GST_LOG ("Got event of type %d on subtitle pad",
+ GST_EVENT_TYPE (GST_EVENT (_data)));
+ break;
+ }
+ gst_data_unref (_data);
+ } else
+ gst_data_unref (_data);
+}
- mpeg2subt->data_size = GUINT16_FROM_BE (*(guint16 *) (data + 2));
+static void
+gst_mpeg2subt_handle_dvd_event (GstMpeg2Subt * mpeg2subt, GstEvent * event,
+ gboolean from_sub_pad)
+{
+ GstStructure *structure;
+ const gchar *event_type;
+
+ structure = event->event_data.structure.structure;
+
+ event_type = gst_structure_get_string (structure, "event");
+ g_return_if_fail (event_type != NULL);
+
+ if (from_sub_pad && !strcmp (event_type, "dvd-spu-highlight")) {
+ gint button;
+ gint palette, sx, sy, ex, ey;
+ gint i;
+
+ /* Details for the highlight region to display */
+ if (!gst_structure_get_int (structure, "button", &button) ||
+ !gst_structure_get_int (structure, "palette", &palette) ||
+ !gst_structure_get_int (structure, "sx", &sx) ||
+ !gst_structure_get_int (structure, "sy", &sy) ||
+ !gst_structure_get_int (structure, "ex", &ex) ||
+ !gst_structure_get_int (structure, "ey", &ey)) {
+ GST_ERROR ("Invalid dvd-spu-highlight event received");
+ return;
+ }
+ mpeg2subt->current_button = button;
+ mpeg2subt->clip_left = sx;
+ mpeg2subt->clip_top = sy;
+ mpeg2subt->clip_right = ex;
+ mpeg2subt->clip_bottom = ey;
+ for (i = 0; i < 4; i++) {
+ mpeg2subt->menu_alpha[i] = ((guint32) (palette) >> (i * 4)) & 0x0f;
+ mpeg2subt->menu_index[i] = ((guint32) (palette) >> (16 + (i * 4))) & 0x0f;
+ }
- gst_mpeg2subt_parse_header (mpeg2subt);
- mpeg2subt->have_title = TRUE;
+ GST_DEBUG ("New button activated clip=(%d,%d) to (%d,%d) palette 0x%x", sx,
+ sy, ex, ey, palette);
+ gst_setup_palette (mpeg2subt, mpeg2subt->menu_index, mpeg2subt->menu_alpha);
+
+ gst_update_still_frame (mpeg2subt);
+ } else if (from_sub_pad && !strcmp (event_type, "dvd-spu-clut-change")) {
+ /* Take a copy of the colour table */
+ guchar name[16];
+ int i;
+ gint value;
+
+ GST_LOG ("New colour table recieved");
+ for (i = 0; i < 16; i++) {
+ sprintf (name, "clut%02d", i);
+ if (!gst_structure_get_int (structure, name, &value)) {
+ GST_ERROR ("dvd-spu-clut-change event did not contain %s field", name);
+ break;
+ }
+ mpeg2subt->current_clut[i] = (guint32) (value);
+ }
+
+ if (mpeg2subt->forced_display)
+ gst_setup_palette (mpeg2subt, mpeg2subt->menu_index,
+ mpeg2subt->menu_alpha);
+ else
+ gst_setup_palette (mpeg2subt, mpeg2subt->subtitle_index,
+ mpeg2subt->subtitle_alpha);
+
+ gst_update_still_frame (mpeg2subt);
+ } else if ((from_sub_pad && !strcmp (event_type, "dvd-spu-stream-change"))
+ || (from_sub_pad && !strcmp (event_type, "dvd-spu-reset-highlight"))) {
+ /* Turn off forced highlight display */
+ mpeg2subt->current_button = 0;
+ mpeg2subt->clip_left = mpeg2subt->left;
+ mpeg2subt->clip_top = mpeg2subt->top;
+ mpeg2subt->clip_right = mpeg2subt->right;
+ mpeg2subt->clip_bottom = mpeg2subt->bottom;
+ GST_LOG ("Clearing button state");
+ gst_update_still_frame (mpeg2subt);
+ } else if (!from_sub_pad && !strcmp (event_type, "dvd-spu-still-frame")) {
+ /* Handle a still frame */
+ GST_LOG ("Received still frame notification");
+ if (mpeg2subt->still_frame)
+ gst_buffer_unref (mpeg2subt->still_frame);
+ mpeg2subt->still_frame = mpeg2subt->hold_frame;
+ mpeg2subt->hold_frame = NULL;
+ gst_update_still_frame (mpeg2subt);
+ } else {
+ /* Ignore all other unknown events */
+ GST_LOG ("Ignoring DVD event %s from %s pad", event_type,
+ from_sub_pad ? "sub" : "video");
+ }
+}
+
+static void
+gst_mpeg2subt_loop (GstElement * element)
+{
+ GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (element);
+ GstData *data;
+ GstClockTime timestamp = 0;
+
+ /* Process any pending video buffer */
+ if (mpeg2subt->pending_video_buffer) {
+ gst_mpeg2subt_handle_video (mpeg2subt, mpeg2subt->pending_video_buffer);
+ mpeg2subt->pending_video_buffer = NULL;
+ }
+ data = mpeg2subt->pending_video_buffer = gst_pad_pull (mpeg2subt->videopad);
+ if (!data)
+ return;
+
+ if (GST_IS_BUFFER (data)) {
+ timestamp = GST_BUFFER_TIMESTAMP (GST_BUFFER (data));
+ } else if (GST_IS_EVENT (data)) {
+ timestamp = GST_EVENT_TIMESTAMP (GST_EVENT (data));
+ } else {
+ GST_WARNING ("Got GstData of unknown type %d", GST_DATA_TYPE (data));
+ }
+ if (timestamp && GST_CLOCK_TIME_IS_VALID (timestamp) && (timestamp > 0)) {
+ mpeg2subt->next_video_time = timestamp;
+ GST_LOG ("next_video_time = %llu, next_subtitle_time = %llu",
+ mpeg2subt->next_video_time, mpeg2subt->next_subtitle_time);
+ }
+
+ /* Process subtitle buffers until we get one beyond 'next_video_time' */
+ if (mpeg2subt->pending_subtitle_buffer) {
+ gst_mpeg2subt_handle_subtitle (mpeg2subt,
+ mpeg2subt->pending_subtitle_buffer);
+ mpeg2subt->pending_subtitle_buffer = NULL;
+ }
+ data = mpeg2subt->pending_subtitle_buffer =
+ gst_pad_pull (mpeg2subt->subtitlepad);
+ if (!data) {
+ return;
+ }
+
+ if (GST_IS_BUFFER (data)) {
+ timestamp = GST_BUFFER_TIMESTAMP (GST_BUFFER (data));
+ } else if (GST_IS_EVENT (data)) {
+ timestamp = GST_EVENT_TIMESTAMP (GST_EVENT (data));
+ } else {
+ GST_WARNING ("Got GstData of unknown type %d", GST_DATA_TYPE (data));
+ }
+ if (GST_CLOCK_TIME_IS_VALID (timestamp) && (timestamp > 0)) {
+ mpeg2subt->next_subtitle_time = timestamp;
+ GST_LOG ("next_subtitle_time = %llu, next_video_time = %llu",
+ mpeg2subt->next_subtitle_time, mpeg2subt->next_video_time);
}
}
diff --git a/gst/mpeg2sub/gstmpeg2subt.h b/gst/mpeg2sub/gstmpeg2subt.h
index 29f60b27..8a078171 100644
--- a/gst/mpeg2sub/gstmpeg2subt.h
+++ b/gst/mpeg2sub/gstmpeg2subt.h
@@ -44,26 +44,59 @@ extern "C" {
typedef struct _GstMpeg2Subt GstMpeg2Subt;
typedef struct _GstMpeg2SubtClass GstMpeg2SubtClass;
+/* Hold premultimplied colour values */
+typedef struct YUVA_val {
+ guint16 Y;
+ guint16 U;
+ guint16 V;
+ guint16 A;
+} YUVA_val;
+
struct _GstMpeg2Subt {
GstElement element;
GstPad *videopad,*subtitlepad,*srcpad;
- GstBuffer *partialbuf; /* previous buffer (if carryover) */
-
- gboolean have_title;
+ GstBuffer *partialbuf; /* Collect together subtitle buffers until we have a full control sequence */
+ GstBuffer *hold_frame; /* Hold back one frame of video */
+ GstBuffer *still_frame;
guint16 packet_size;
guint16 data_size;
gint offset[2];
- guchar color[5];
- guchar trans[4];
- guint duration;
+ YUVA_val palette_cache[4];
- gint width, height;
+ /*
+ * Store 1 line width of U, V and A respectively.
+ * Y is composited direct onto the frame.
+ */
+ guint16 *out_buffers[3];
+ guchar subtitle_index[4];
+ guchar menu_index[4];
+ guchar subtitle_alpha[4];
+ guchar menu_alpha[4];
+ guint32 current_clut[16];
+
+ gboolean have_title;
+ gboolean forced_display;
+
+ GstClockTime start_display_time;
+ GstClockTime end_display_time;
+ gint left, top,
+ right, bottom;
+ gint clip_left, clip_top,
+ clip_right, clip_bottom;
+
+ gint in_width, in_height;
+ gint current_button;
+
+ GstData *pending_video_buffer;
+ GstClockTime next_video_time;
+ GstData *pending_subtitle_buffer;
+ GstClockTime next_subtitle_time;
};
struct _GstMpeg2SubtClass {
diff --git a/gst/y4m/gsty4mencode.c b/gst/y4m/gsty4mencode.c
index 773f96b8..25e1f94e 100644
--- a/gst/y4m/gsty4mencode.c
+++ b/gst/y4m/gsty4mencode.c
@@ -148,21 +148,19 @@ gst_y4mencode_sinkconnect (GstPad * pad, const GstCaps * caps)
structure = gst_caps_get_structure (caps, 0);
- gst_structure_get_int (structure, "width", &filter->width);
- gst_structure_get_int (structure, "height", &filter->height);
- gst_structure_get_double (structure, "framerate", &fps);
+ if (!gst_structure_get_int (structure, "width", &filter->width) ||
+ !gst_structure_get_int (structure, "height", &filter->height) ||
+ !gst_structure_get_double (structure, "framerate", &fps))
+ return GST_PAD_LINK_REFUSED;
/* find fps idx */
+ idx = 0;
for (i = 1; i < 9; i++) {
- if (idx == -1) {
- idx = i;
- } else {
- gdouble old_diff = fabs (framerates[idx] - fps),
- new_diff = fabs (framerates[i] - fps);
+ gdouble old_diff = fabs (framerates[idx] - fps),
+ new_diff = fabs (framerates[i] - fps);
- if (new_diff < old_diff) {
- idx = i;
- }
+ if (new_diff < old_diff) {
+ idx = i;
}
}
filter->fps_idx = idx;
@@ -209,7 +207,7 @@ gst_y4mencode_chain (GstPad * pad, GstData * _data)
GST_BUFFER_DATA (outbuf) = g_malloc (GST_BUFFER_SIZE (buf) + 256);
if (filter->init) {
- header = "YUV4MPEG %d %d %d\nFRAME\n";
+ header = "YUV4MPEG W%d H%d I? %d\nFRAME\n";
filter->init = FALSE;
} else {
header = "FRAME\n";