gstreamer/gst/mpeg2sub/gstmpeg2subt.c

1055 lines
32 KiB
C
Raw Normal View History

/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/*#define DEBUG_ENABLED */
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstmpeg2subt.h"
#include <string.h>
static void gst_mpeg2subt_class_init (GstMpeg2SubtClass * klass);
static void gst_mpeg2subt_base_init (GstMpeg2SubtClass * klass);
static void gst_mpeg2subt_init (GstMpeg2Subt * mpeg2subt);
static void gst_mpeg2subt_loop (GstElement * element);
static GstCaps *gst_mpeg2subt_getcaps_video (GstPad * pad);
static GstPadLinkReturn gst_mpeg2subt_link_video (GstPad * pad,
const GstCaps * caps);
static void gst_mpeg2subt_handle_video (GstMpeg2Subt * mpeg2subt,
GstData * _data);
static gboolean gst_mpeg2subt_src_event (GstPad * pad, GstEvent * event);
static void gst_mpeg2subt_handle_subtitle (GstMpeg2Subt * mpeg2subt,
GstData * _data);
static void gst_mpeg2subt_merge_title (GstMpeg2Subt * mpeg2subt,
GstBuffer * buf);
static void gst_mpeg2subt_handle_dvd_event (GstMpeg2Subt * mpeg2subt,
GstEvent * event, gboolean from_sub_pad);
static void gst_mpeg2subt_finalize (GObject * gobject);
static void gst_mpeg2subt_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_mpeg2subt_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static void gst_setup_palette (GstMpeg2Subt * mpeg2subt, guchar * indexes,
guchar * alpha);
static void gst_update_still_frame (GstMpeg2Subt * mpeg2subt);
/* elementfactory information */
static GstElementDetails mpeg2subt_details = {
"MPEG2 subtitle Decoder",
"Codec/Decoder/Video",
"Decodes and merges MPEG2 subtitles into a video frame",
"Wim Taymans <wim.taymans@chello.be>\n"
"Jan Schmidt <thaytan@mad.scientist.com>"
};
static GstStaticPadTemplate video_template = GST_STATIC_PAD_TEMPLATE ("video",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv, " "format = (fourcc) { I420 }, " /* YV12 later */
"width = (int) [ 16, 4096 ], " "height = (int) [ 16, 4096 ]")
);
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv, " "format = (fourcc) { I420 }, " /* YV12 later */
"width = (int) [ 16, 4096 ], " "height = (int) [ 16, 4096 ]")
);
static GstStaticPadTemplate subtitle_template =
GST_STATIC_PAD_TEMPLATE ("subtitle",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-dvd-subpicture")
);
GST_DEBUG_CATEGORY_STATIC (mpeg2subt_debug);
#define GST_CAT_DEFAULT (mpeg2subt_debug)
/* GstMpeg2Subt signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum
{
ARG_0,
ARG_SKIP
/* FILL ME */
};
enum
{
SPU_FORCE_DISPLAY = 0x00,
SPU_SHOW = 0x01,
SPU_HIDE = 0x02,
SPU_SET_PALETTE = 0x03,
SPU_SET_ALPHA = 0x04,
SPU_SET_SIZE = 0x05,
SPU_SET_OFFSETS = 0x06,
SPU_WIPE = 0x07,
SPU_END = 0xff
};
ext/a52dec/gsta52dec.c: Add some debug output. Check that a discont has a valid time associated. Original commit message from CVS: * ext/a52dec/gsta52dec.c: (gst_a52dec_push), (gst_a52dec_handle_event), (gst_a52dec_chain): Add some debug output. Check that a discont has a valid time associated. * ext/alsa/gstalsasink.c: (gst_alsa_sink_check_event), (gst_alsa_sink_loop): Ignore TAG events. A little extra debug for broken timestamps. * ext/dvdnav/dvdnavsrc.c: (dvdnavsrc_init), (dvdnavsrc_loop), (dvdnavsrc_change_state): Ensure we send a discont to engage the link before we send any other events. * ext/dvdread/dvdreadsrc.c: (dvdreadsrc_init), (dvdreadsrc_finalize), (_close), (_open), (_seek_title), (_seek_chapter), (seek_sector), (dvdreadsrc_get), (dvdreadsrc_uri_get_uri), (dvdreadsrc_uri_set_uri): Handle URI of the form dvd://title[,chapter[,angle]]. Currently only dvd://title works in totem because typefinding sends a seek that ends up going back to chapter 1 regardless. * ext/mpeg2dec/gstmpeg2dec.c: * ext/mpeg2dec/gstmpeg2dec.h: Output correct timestamps and handle disconts. * ext/ogg/gstoggdemux.c: (get_relative): Small guard against a null dereference. * ext/pango/gsttextoverlay.c: (gst_textoverlay_finalize), (gst_textoverlay_set_property): Free memory when done. Don't call gst_event_filler_get_duration on EOS events. Use GST_LOG and GST_WARNING instead of g_message and g_warning. * ext/smoothwave/gstsmoothwave.c: (gst_smoothwave_init), (draw_line), (gst_smoothwave_dispose), (gst_sw_sinklink), (gst_sw_srclink), (gst_smoothwave_chain): Draw solid lines, prettier colours. * gst/mpeg2sub/gstmpeg2subt.c: (gst_mpeg2subt_init): Add a default palette that'll work for some movies. * gst/mpegstream/gstdvddemux.c: (gst_dvd_demux_init), (gst_dvd_demux_handle_dvd_event), (gst_dvd_demux_send_discont), (gst_dvd_demux_send_subbuffer), (gst_dvd_demux_reset): * gst/mpegstream/gstdvddemux.h: * gst/mpegstream/gstmpegdemux.c: (gst_mpeg_demux_send_discont), (gst_mpeg_demux_parse_syshead), (gst_mpeg_demux_parse_pes): * gst/mpegstream/gstmpegparse.c: (gst_mpeg_parse_init), (gst_mpeg_parse_handle_discont), (gst_mpeg_parse_parse_packhead): * gst/mpegstream/gstmpegparse.h: Use PTM/NAV events when for timestamp adjustment when connected to dvdnavsrc. Don't use many discont events where one suffices. * gst/playback/gstplaybasebin.c: (group_destroy), (gen_preroll_element), (gst_play_base_bin_add_element): * gst/playback/gstplaybasebin.h: Make sure we remove subtitles from the same bin we put them in. * gst/subparse/gstsubparse.c: (convert_encoding), (parse_subrip), (gst_subparse_buffer_format_autodetect), (gst_subparse_change_state): Fix some memleaks and invalid accesses. * gst/typefind/gsttypefindfunctions.c: (ogganx_type_find), (oggskel_type_find), (cmml_type_find), (plugin_init): Some typefind functions for Annodex v3.0 files * gst/wavparse/gstwavparse.h: GstRiffReadClass is the correct parent class.
2005-01-25 15:34:08 +00:00
static guint32 default_clut[16] = {
0xb48080, 0x248080, 0x628080, 0xd78080,
0x808080, 0x808080, 0x808080, 0x808080,
0x808080, 0x808080, 0x808080, 0x808080,
0x808080, 0x808080, 0x808080, 0x808080
};
typedef struct RLE_state
{
gint id;
gint aligned;
gint offset[2];
gint clip_left;
gint clip_right;
guchar *target_Y;
guchar *target_U;
guchar *target_V;
guchar *target_A;
guchar next;
}
RLE_state;
static GstElementClass *parent_class = NULL;
/*static guint gst_mpeg2subt_signals[LAST_SIGNAL] = { 0 };*/
GType
gst_mpeg2subt_get_type (void)
{
static GType mpeg2subt_type = 0;
if (!mpeg2subt_type) {
static const GTypeInfo mpeg2subt_info = {
sizeof (GstMpeg2SubtClass),
(GBaseInitFunc) gst_mpeg2subt_base_init,
NULL,
(GClassInitFunc) gst_mpeg2subt_class_init,
NULL,
NULL,
sizeof (GstMpeg2Subt),
0,
(GInstanceInitFunc) gst_mpeg2subt_init,
};
mpeg2subt_type =
g_type_register_static (GST_TYPE_ELEMENT, "GstMpeg2Subt",
&mpeg2subt_info, 0);
GST_DEBUG_CATEGORY_INIT (mpeg2subt_debug, "mpeg2subt", 0,
"MPEG2 subtitle overlay element");
}
return mpeg2subt_type;
}
static void
gst_mpeg2subt_base_init (GstMpeg2SubtClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&video_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&subtitle_template));
gst_element_class_set_details (element_class, &mpeg2subt_details);
}
static void
gst_mpeg2subt_class_init (GstMpeg2SubtClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_SKIP, g_param_spec_int ("skip", "skip", "skip", G_MININT, G_MAXINT, 0, G_PARAM_READWRITE)); /* CHECKME */
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
gobject_class->set_property = gst_mpeg2subt_set_property;
gobject_class->get_property = gst_mpeg2subt_get_property;
gobject_class->finalize = gst_mpeg2subt_finalize;
}
static void
gst_mpeg2subt_init (GstMpeg2Subt * mpeg2subt)
{
mpeg2subt->videopad =
gst_pad_new_from_template (gst_static_pad_template_get
(&video_template), "video");
gst_element_add_pad (GST_ELEMENT (mpeg2subt), mpeg2subt->videopad);
gst_pad_set_link_function (mpeg2subt->videopad,
GST_DEBUG_FUNCPTR (gst_mpeg2subt_link_video));
gst_pad_set_getcaps_function (mpeg2subt->videopad,
GST_DEBUG_FUNCPTR (gst_mpeg2subt_getcaps_video));
mpeg2subt->subtitlepad =
gst_pad_new_from_template (gst_static_pad_template_get
(&subtitle_template), "subtitle");
gst_element_add_pad (GST_ELEMENT (mpeg2subt), mpeg2subt->subtitlepad);
mpeg2subt->srcpad =
gst_pad_new_from_template (gst_static_pad_template_get
(&src_template), "src");
gst_element_add_pad (GST_ELEMENT (mpeg2subt), mpeg2subt->srcpad);
gst_pad_set_getcaps_function (mpeg2subt->srcpad,
GST_DEBUG_FUNCPTR (gst_mpeg2subt_getcaps_video));
gst_pad_set_link_function (mpeg2subt->srcpad,
GST_DEBUG_FUNCPTR (gst_mpeg2subt_link_video));
gst_pad_set_event_function (mpeg2subt->srcpad,
GST_DEBUG_FUNCPTR (gst_mpeg2subt_src_event));
gst_element_set_loop_function (GST_ELEMENT (mpeg2subt), gst_mpeg2subt_loop);
renamed GST_FLAGS macros to GST_OBJECT_FLAGS moved bitshift from macro to enum definition Original commit message from CVS: * examples/indexing/indexmpeg.c: (main): * ext/artsd/gstartsdsink.c: (gst_artsdsink_open_audio), (gst_artsdsink_close_audio), (gst_artsdsink_change_state): * ext/artsd/gstartsdsink.h: * ext/audiofile/gstafparse.c: (gst_afparse_open_file), (gst_afparse_close_file): * ext/audiofile/gstafparse.h: * ext/audiofile/gstafsink.c: (gst_afsink_open_file), (gst_afsink_close_file), (gst_afsink_chain), (gst_afsink_change_state): * ext/audiofile/gstafsink.h: * ext/audiofile/gstafsrc.c: (gst_afsrc_open_file), (gst_afsrc_close_file), (gst_afsrc_change_state): * ext/audiofile/gstafsrc.h: * ext/cdaudio/gstcdaudio.c: (gst_cdaudio_init): * ext/directfb/directfbvideosink.c: (gst_directfbvideosink_init): * ext/dts/gstdtsdec.c: (gst_dtsdec_init): * ext/jack/gstjack.h: * ext/jack/gstjackbin.c: (gst_jack_bin_init), (gst_jack_bin_change_state): * ext/musepack/gstmusepackdec.c: (gst_musepackdec_init): * ext/musicbrainz/gsttrm.c: (gst_musicbrainz_init): * ext/nas/nassink.c: (gst_nassink_open_audio), (gst_nassink_close_audio), (gst_nassink_change_state): * ext/nas/nassink.h: * ext/polyp/polypsink.c: (gst_polypsink_init): * ext/sdl/sdlvideosink.c: (gst_sdlvideosink_change_state): * ext/sdl/sdlvideosink.h: * ext/smoothwave/gstsmoothwave.c: (gst_smoothwave_init): * ext/sndfile/gstsf.c: (gst_sf_set_property), (gst_sf_change_state), (gst_sf_release_request_pad), (gst_sf_open_file), (gst_sf_close_file), (gst_sf_loop): * ext/sndfile/gstsf.h: * ext/swfdec/gstswfdec.c: (gst_swfdec_init): * ext/tarkin/gsttarkindec.c: (gst_tarkindec_init): * gst/apetag/apedemux.c: (gst_ape_demux_init): * gst/cdxaparse/gstcdxaparse.c: (gst_cdxaparse_init): * gst/cdxaparse/gstcdxastrip.c: (gst_cdxastrip_init): * gst/festival/gstfestival.c: (gst_festival_change_state): * gst/festival/gstfestival.h: * gst/mpeg2sub/gstmpeg2subt.c: (gst_mpeg2subt_init): * gst/multifilesink/gstmultifilesink.c: (gst_multifilesink_init), (gst_multifilesink_set_location), (gst_multifilesink_open_file), (gst_multifilesink_close_file), (gst_multifilesink_next_file), (gst_multifilesink_pad_query), (gst_multifilesink_handle_event), (gst_multifilesink_chain), (gst_multifilesink_change_state): * gst/multifilesink/gstmultifilesink.h: * gst/videodrop/gstvideodrop.c: (gst_videodrop_init): * sys/cdrom/gstcdplayer.c: (cdplayer_init): * sys/dxr3/dxr3audiosink.c: (dxr3audiosink_init), (dxr3audiosink_open), (dxr3audiosink_close), (dxr3audiosink_chain_pcm), (dxr3audiosink_chain_ac3), (dxr3audiosink_change_state): * sys/dxr3/dxr3audiosink.h: * sys/dxr3/dxr3spusink.c: (dxr3spusink_init), (dxr3spusink_open), (dxr3spusink_close), (dxr3spusink_chain), (dxr3spusink_change_state): * sys/dxr3/dxr3spusink.h: * sys/dxr3/dxr3videosink.c: (dxr3videosink_init), (dxr3videosink_open), (dxr3videosink_close), (dxr3videosink_write_data), (dxr3videosink_change_state): * sys/dxr3/dxr3videosink.h: * sys/glsink/glimagesink.c: (gst_glimagesink_init): * sys/qcam/gstqcamsrc.c: (gst_qcamsrc_change_state), (gst_qcamsrc_open), (gst_qcamsrc_close): * sys/qcam/gstqcamsrc.h: * sys/v4l2/gstv4l2src.c: (gst_v4l2src_init): * sys/vcd/vcdsrc.c: (gst_vcdsrc_set_property), (gst_vcdsrc_get), (gst_vcdsrc_open_file), (gst_vcdsrc_close_file), (gst_vcdsrc_change_state), (gst_vcdsrc_recalculate): * sys/vcd/vcdsrc.h: renamed GST_FLAGS macros to GST_OBJECT_FLAGS moved bitshift from macro to enum definition
2005-10-12 14:29:55 +00:00
GST_OBJECT_FLAG_SET (GST_ELEMENT (mpeg2subt), GST_ELEMENT_EVENT_AWARE);
mpeg2subt->partialbuf = NULL;
mpeg2subt->hold_frame = NULL;
mpeg2subt->still_frame = NULL;
mpeg2subt->have_title = FALSE;
mpeg2subt->start_display_time = GST_CLOCK_TIME_NONE;
mpeg2subt->end_display_time = GST_CLOCK_TIME_NONE;
mpeg2subt->forced_display = FALSE;
ext/a52dec/gsta52dec.c: Add some debug output. Check that a discont has a valid time associated. Original commit message from CVS: * ext/a52dec/gsta52dec.c: (gst_a52dec_push), (gst_a52dec_handle_event), (gst_a52dec_chain): Add some debug output. Check that a discont has a valid time associated. * ext/alsa/gstalsasink.c: (gst_alsa_sink_check_event), (gst_alsa_sink_loop): Ignore TAG events. A little extra debug for broken timestamps. * ext/dvdnav/dvdnavsrc.c: (dvdnavsrc_init), (dvdnavsrc_loop), (dvdnavsrc_change_state): Ensure we send a discont to engage the link before we send any other events. * ext/dvdread/dvdreadsrc.c: (dvdreadsrc_init), (dvdreadsrc_finalize), (_close), (_open), (_seek_title), (_seek_chapter), (seek_sector), (dvdreadsrc_get), (dvdreadsrc_uri_get_uri), (dvdreadsrc_uri_set_uri): Handle URI of the form dvd://title[,chapter[,angle]]. Currently only dvd://title works in totem because typefinding sends a seek that ends up going back to chapter 1 regardless. * ext/mpeg2dec/gstmpeg2dec.c: * ext/mpeg2dec/gstmpeg2dec.h: Output correct timestamps and handle disconts. * ext/ogg/gstoggdemux.c: (get_relative): Small guard against a null dereference. * ext/pango/gsttextoverlay.c: (gst_textoverlay_finalize), (gst_textoverlay_set_property): Free memory when done. Don't call gst_event_filler_get_duration on EOS events. Use GST_LOG and GST_WARNING instead of g_message and g_warning. * ext/smoothwave/gstsmoothwave.c: (gst_smoothwave_init), (draw_line), (gst_smoothwave_dispose), (gst_sw_sinklink), (gst_sw_srclink), (gst_smoothwave_chain): Draw solid lines, prettier colours. * gst/mpeg2sub/gstmpeg2subt.c: (gst_mpeg2subt_init): Add a default palette that'll work for some movies. * gst/mpegstream/gstdvddemux.c: (gst_dvd_demux_init), (gst_dvd_demux_handle_dvd_event), (gst_dvd_demux_send_discont), (gst_dvd_demux_send_subbuffer), (gst_dvd_demux_reset): * gst/mpegstream/gstdvddemux.h: * gst/mpegstream/gstmpegdemux.c: (gst_mpeg_demux_send_discont), (gst_mpeg_demux_parse_syshead), (gst_mpeg_demux_parse_pes): * gst/mpegstream/gstmpegparse.c: (gst_mpeg_parse_init), (gst_mpeg_parse_handle_discont), (gst_mpeg_parse_parse_packhead): * gst/mpegstream/gstmpegparse.h: Use PTM/NAV events when for timestamp adjustment when connected to dvdnavsrc. Don't use many discont events where one suffices. * gst/playback/gstplaybasebin.c: (group_destroy), (gen_preroll_element), (gst_play_base_bin_add_element): * gst/playback/gstplaybasebin.h: Make sure we remove subtitles from the same bin we put them in. * gst/subparse/gstsubparse.c: (convert_encoding), (parse_subrip), (gst_subparse_buffer_format_autodetect), (gst_subparse_change_state): Fix some memleaks and invalid accesses. * gst/typefind/gsttypefindfunctions.c: (ogganx_type_find), (oggskel_type_find), (cmml_type_find), (plugin_init): Some typefind functions for Annodex v3.0 files * gst/wavparse/gstwavparse.h: GstRiffReadClass is the correct parent class.
2005-01-25 15:34:08 +00:00
memcpy (mpeg2subt->current_clut, default_clut, sizeof (guint32) * 16);
gst_setup_palette (mpeg2subt, mpeg2subt->menu_index, mpeg2subt->menu_alpha);
gst_setup_palette (mpeg2subt, mpeg2subt->subtitle_index,
mpeg2subt->subtitle_alpha);
memset (mpeg2subt->out_buffers, 0, sizeof (mpeg2subt->out_buffers));
mpeg2subt->pending_video_buffer = NULL;
mpeg2subt->next_video_time = GST_CLOCK_TIME_NONE;
mpeg2subt->pending_subtitle_buffer = NULL;
mpeg2subt->next_subtitle_time = GST_CLOCK_TIME_NONE;
}
static void
gst_mpeg2subt_finalize (GObject * gobject)
{
GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gobject);
gint i;
for (i = 0; i < 3; i++) {
if (mpeg2subt->out_buffers[i])
g_free (mpeg2subt->out_buffers[i]);
}
if (mpeg2subt->partialbuf)
gst_buffer_unref (mpeg2subt->partialbuf);
}
static GstCaps *
gst_mpeg2subt_getcaps_video (GstPad * pad)
{
GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gst_pad_get_parent (pad));
GstPad *otherpad;
otherpad =
(pad == mpeg2subt->srcpad) ? mpeg2subt->videopad : mpeg2subt->srcpad;
return gst_pad_get_allowed_caps (otherpad);
}
static GstPadLinkReturn
gst_mpeg2subt_link_video (GstPad * pad, const GstCaps * caps)
{
GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gst_pad_get_parent (pad));
GstPad *otherpad;
GstPadLinkReturn ret;
GstStructure *structure;
gint width, height;
gint i;
otherpad =
(pad == mpeg2subt->srcpad) ? mpeg2subt->videopad : mpeg2subt->srcpad;
ret = gst_pad_try_set_caps (otherpad, caps);
if (GST_PAD_LINK_FAILED (ret)) {
return ret;
}
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_int (structure, "width", &width) ||
!gst_structure_get_int (structure, "height", &height)) {
return GST_PAD_LINK_REFUSED;
}
mpeg2subt->in_width = width;
mpeg2subt->in_height = height;
/* Allocate compositing buffers */
for (i = 0; i < 3; i++) {
if (mpeg2subt->out_buffers[i])
g_free (mpeg2subt->out_buffers[i]);
mpeg2subt->out_buffers[i] = g_malloc (sizeof (guint16) * width);
}
return GST_PAD_LINK_OK;
}
static void
gst_mpeg2subt_handle_video (GstMpeg2Subt * mpeg2subt, GstData * _data)
{
if (GST_IS_BUFFER (_data)) {
GstBuffer *buf = GST_BUFFER (_data);
guchar *data;
glong size;
data = GST_BUFFER_DATA (buf);
size = GST_BUFFER_SIZE (buf);
if (mpeg2subt->still_frame) {
gst_buffer_unref (mpeg2subt->still_frame);
mpeg2subt->still_frame = NULL;
}
if (!mpeg2subt->hold_frame) {
mpeg2subt->hold_frame = buf;
} else {
GstBuffer *out_buf;
out_buf = mpeg2subt->hold_frame;
mpeg2subt->hold_frame = buf;
if (mpeg2subt->have_title) {
if ((mpeg2subt->forced_display && (mpeg2subt->current_button != 0))
||
((mpeg2subt->start_display_time <= GST_BUFFER_TIMESTAMP (out_buf))
&& (mpeg2subt->end_display_time >=
GST_BUFFER_TIMESTAMP (out_buf)))) {
out_buf = gst_buffer_copy_on_write (out_buf);
gst_mpeg2subt_merge_title (mpeg2subt, out_buf);
}
}
gst_pad_push (mpeg2subt->srcpad, GST_DATA (out_buf));
}
} else if (GST_IS_EVENT (_data)) {
switch (GST_EVENT_TYPE (GST_EVENT (_data))) {
case GST_EVENT_ANY:
gst_mpeg2subt_handle_dvd_event (mpeg2subt, GST_EVENT (_data), FALSE);
gst_data_unref (_data);
break;
case GST_EVENT_DISCONTINUOUS:
/* Turn off forced highlight display */
mpeg2subt->forced_display = 0;
if (mpeg2subt->still_frame) {
gst_buffer_unref (mpeg2subt->still_frame);
mpeg2subt->still_frame = NULL;
}
if (mpeg2subt->hold_frame) {
gst_buffer_unref (mpeg2subt->hold_frame);
mpeg2subt->hold_frame = NULL;
}
gst_pad_push (mpeg2subt->srcpad, _data);
break;
default:
gst_pad_push (mpeg2subt->srcpad, _data);
break;
}
} else
gst_data_unref (_data);
}
static gboolean
gst_mpeg2subt_src_event (GstPad * pad, GstEvent * event)
{
GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (gst_pad_get_parent (pad));
return gst_pad_send_event (GST_PAD_PEER (mpeg2subt->videopad), event);
}
static void
gst_mpeg2subt_parse_header (GstMpeg2Subt * mpeg2subt)
{
#define PARSE_BYTES_NEEDED(x) if ((buf+(x)) >= end) \
{ GST_WARNING("Subtitle stream broken parsing %d", *buf); \
broken = TRUE; break; }
guchar *buf;
guchar *start = GST_BUFFER_DATA (mpeg2subt->partialbuf);
guchar *end;
gboolean broken = FALSE;
gboolean last_seq = FALSE;
guchar *next_seq = NULL;
guint event_time;
mpeg2subt->forced_display = FALSE;
g_return_if_fail (mpeg2subt->packet_size >= 4);
buf = start + mpeg2subt->data_size;
end = buf + mpeg2subt->packet_size;
event_time = GUINT16_FROM_BE (*(guint16 *) (buf));
next_seq = start + GUINT16_FROM_BE (*(guint16 *) (buf + 2));
/* If the next control sequence is at the current offset, this is
* the last one */
last_seq = (next_seq == buf);
buf += 4;
while ((buf < end) && (!broken)) {
switch (*buf) {
case SPU_FORCE_DISPLAY: /* Forced display menu subtitle */
mpeg2subt->forced_display = TRUE;
buf++;
break;
case SPU_SHOW: /* Show the subtitle in this packet */
mpeg2subt->start_display_time =
GST_BUFFER_TIMESTAMP (mpeg2subt->partialbuf) +
((GST_SECOND * event_time) / 90);
GST_DEBUG ("Subtitle starts at %" G_GUINT64_FORMAT,
mpeg2subt->end_display_time);
buf++;
break;
case SPU_HIDE: /* 02 ff (ff) is the end of the packet, hide the */
mpeg2subt->end_display_time =
GST_BUFFER_TIMESTAMP (mpeg2subt->partialbuf) +
((GST_SECOND * event_time) / 90);
GST_DEBUG ("Subtitle ends at %" G_GUINT64_FORMAT,
mpeg2subt->end_display_time);
buf++;
break;
case SPU_SET_PALETTE: /* palette */
PARSE_BYTES_NEEDED (3);
mpeg2subt->subtitle_index[3] = buf[1] >> 4;
mpeg2subt->subtitle_index[2] = buf[1] & 0xf;
mpeg2subt->subtitle_index[1] = buf[2] >> 4;
mpeg2subt->subtitle_index[0] = buf[2] & 0xf;
buf += 3;
break;
case SPU_SET_ALPHA: /* transparency palette */
PARSE_BYTES_NEEDED (3);
mpeg2subt->subtitle_alpha[3] = buf[1] >> 4;
mpeg2subt->subtitle_alpha[2] = buf[1] & 0xf;
mpeg2subt->subtitle_alpha[1] = buf[2] >> 4;
mpeg2subt->subtitle_alpha[0] = buf[2] & 0xf;
buf += 3;
break;
case SPU_SET_SIZE: /* image coordinates */
PARSE_BYTES_NEEDED (7);
mpeg2subt->left =
CLAMP ((((unsigned int) buf[1]) << 4) | (buf[2] >> 4), 0,
(mpeg2subt->in_width - 1));
mpeg2subt->top =
CLAMP ((((unsigned int) buf[4]) << 4) | (buf[5] >> 4), 0,
(mpeg2subt->in_height - 1));
mpeg2subt->right =
CLAMP ((((buf[2] & 0x0f) << 8) | buf[3]), 0,
(mpeg2subt->in_width - 1));
mpeg2subt->bottom =
CLAMP ((((buf[5] & 0x0f) << 8) | buf[6]), 0,
(mpeg2subt->in_height - 1));
GST_DEBUG ("left %d, top %d, right %d, bottom %d", mpeg2subt->left,
mpeg2subt->top, mpeg2subt->right, mpeg2subt->bottom);
buf += 7;
break;
case SPU_SET_OFFSETS: /* image 1 / image 2 offsets */
PARSE_BYTES_NEEDED (5);
mpeg2subt->offset[0] = (((unsigned int) buf[1]) << 8) | buf[2];
mpeg2subt->offset[1] = (((unsigned int) buf[3]) << 8) | buf[4];
GST_DEBUG ("Offset1 %d, Offset2 %d", mpeg2subt->offset[0],
mpeg2subt->offset[1]);
buf += 5;
break;
case SPU_WIPE:
{
guint length;
GST_WARNING ("SPU_WIPE not yet implemented");
PARSE_BYTES_NEEDED (3);
length = (buf[1] << 8) | (buf[2]);
buf += 1 + length;
}
break;
case SPU_END:
buf = (last_seq) ? end : next_seq;
/* Start a new control sequence */
if (buf + 4 < end) {
event_time = GUINT16_FROM_BE (*(guint16 *) (buf));
next_seq = start + GUINT16_FROM_BE (*(guint16 *) (buf + 2));
last_seq = (next_seq == buf);
}
buf += 4;
break;
default:
GST_ERROR
("Invalid sequence in subtitle packet header (%.2x). Skipping",
*buf);
broken = TRUE;
break;
}
}
if (!mpeg2subt->forced_display)
gst_setup_palette (mpeg2subt, mpeg2subt->subtitle_index,
mpeg2subt->subtitle_alpha);
}
inline int
gst_get_nibble (guchar * buffer, RLE_state * state)
{
if (state->aligned) {
state->next = buffer[state->offset[state->id]++];
state->aligned = 0;
return state->next >> 4;
} else {
state->aligned = 1;
return state->next & 0xf;
}
}
/* Premultiply the current lookup table into the palette_cache */
static void
gst_setup_palette (GstMpeg2Subt * mpeg2subt, guchar * indexes, guchar * alpha)
{
gint i;
YUVA_val *target = mpeg2subt->palette_cache;
for (i = 0; i < 4; i++, target++) {
guint32 col = mpeg2subt->current_clut[indexes[i]];
target->Y = (guint16) ((col >> 16) & 0xff) * alpha[i];
target->U = (guint16) ((col >> 8) & 0xff) * alpha[i];
target->V = (guint16) (col & 0xff) * alpha[i];
target->A = alpha[i];
}
}
inline guint
gst_get_rle_code (guchar * buffer, RLE_state * state)
{
gint code;
code = gst_get_nibble (buffer, state);
if (code < 0x4) { /* 4 .. f */
code = (code << 4) | gst_get_nibble (buffer, state);
if (code < 0x10) { /* 1x .. 3x */
code = (code << 4) | gst_get_nibble (buffer, state);
if (code < 0x40) { /* 04x .. 0fx */
code = (code << 4) | gst_get_nibble (buffer, state);
}
}
}
return code;
}
/*
* This function steps over each run-length segment, drawing
* into the YUVA buffers as it goes. UV are composited and then output
* at half width/height
*/
static void
gst_draw_rle_line (GstMpeg2Subt * mpeg2subt, guchar * buffer, RLE_state * state)
{
gint length, colourid;
gint right = mpeg2subt->right + 1;
YUVA_val *colour_entry;
guint code;
gint x;
gboolean in_clip = FALSE;
guchar *target_Y;
guint16 *target_U;
guint16 *target_V;
guint16 *target_A;
target_Y = state->target_Y;
target_U = mpeg2subt->out_buffers[0];
target_V = mpeg2subt->out_buffers[1];
target_A = mpeg2subt->out_buffers[2];
x = mpeg2subt->left;
while (x < right) {
code = gst_get_rle_code (buffer, state);
length = code >> 2;
colourid = code & 3;
colour_entry = mpeg2subt->palette_cache + colourid;
/* Length = 0 implies fill to the end of the line */
if (length == 0)
length = right - x;
else {
/* Restrict the colour run to the end of the line */
length = length < (right - x) ? length : (right - x);
}
/* Check if this run of colour crosses into the clip region */
in_clip = (((x + length) >= state->clip_left) && (x <= state->clip_right));
/* Draw YA onto the frame via target_Y, UVA into the composite buffers */
if ((in_clip) && (colour_entry->A)) {
guint16 inv_alpha = 0xf - colour_entry->A;
gint i;
for (i = 0; i < length; i++) {
*target_Y = ((inv_alpha * (*target_Y)) + colour_entry->Y) / 0xf;
*target_U += colour_entry->U;
*target_V += colour_entry->V;
*target_A += colour_entry->A;
target_Y++;
target_U++;
target_V++;
target_A++;
}
} else {
target_Y += length;
target_U += length;
target_V += length;
target_A += length;
}
x += length;
}
}
inline void
gst_merge_uv_data (GstMpeg2Subt * mpeg2subt, guchar * buffer, RLE_state * state)
{
gint x;
guchar *target_V;
guchar *target_U;
gint width = mpeg2subt->right - mpeg2subt->left + 1;
guint16 *comp_U;
guint16 *comp_V;
guint16 *comp_A;
/* The compositing buffers should contain the results of accumulating 2 scanlines of
* U, V (premultiplied) and A data. Merge them back into their output buffers at
* half width/height.
*/
target_U = state->target_U;
target_V = state->target_V;
comp_U = mpeg2subt->out_buffers[0];
comp_V = mpeg2subt->out_buffers[1];
comp_A = mpeg2subt->out_buffers[2];
for (x = 0; x < width; x += 2) {
guint16 temp1, temp2;
/* Average out the alpha accumulated to compute transparency */
guint16 alpha = (comp_A[0] + comp_A[1]);
if (alpha > 0) {
temp1 = (*target_U) * ((4 * 0xf) - alpha) + comp_U[0] + comp_U[1];
temp2 = (*target_V) * ((4 * 0xf) - alpha) + comp_V[0] + comp_V[1];
*target_U = temp1 / (4 * 0xf);
*target_V = temp2 / (4 * 0xf);
};
comp_U += 2;
comp_V += 2;
comp_A += 2;
target_U++;
target_V++;
}
}
/*
* Decode the RLE subtitle image and blend with the current
* frame buffer.
*/
static void
gst_mpeg2subt_merge_title (GstMpeg2Subt * mpeg2subt, GstBuffer * buf)
{
gint y;
gint width = mpeg2subt->right - mpeg2subt->left + 1;
gint Y_stride;
gint UV_stride;
guchar *buffer = GST_BUFFER_DATA (mpeg2subt->partialbuf);
gint last_y;
gint first_y;
RLE_state state;
/* Set up the initial offsets, remembering the half-res size for UV in I420 packing
* see http://www.fourcc.org for details
*/
Y_stride = mpeg2subt->in_width;
UV_stride = (mpeg2subt->in_width + 1) / 2;
GST_DEBUG ("Merging subtitle on frame at time %" G_GUINT64_FORMAT
" using %s colour table", GST_BUFFER_TIMESTAMP (buf),
mpeg2subt->forced_display ? "menu" : "subtitle");
state.id = 0;
state.aligned = 1;
state.offset[0] = mpeg2subt->offset[0];
state.offset[1] = mpeg2subt->offset[1];
/* skip over lines until we hit the clip region */
if (mpeg2subt->forced_display) {
state.clip_right = mpeg2subt->clip_right;
state.clip_left = mpeg2subt->clip_left;
last_y = mpeg2subt->clip_bottom;
first_y = mpeg2subt->clip_top;
} else {
state.clip_right = mpeg2subt->right;
state.clip_left = mpeg2subt->left;
last_y = mpeg2subt->bottom;
first_y = mpeg2subt->top;
}
for (y = mpeg2subt->top; y < first_y; y++) {
/* Skip a line of RLE data */
gint length;
guint code;
gint x = 0;
while (x < width) {
code = gst_get_rle_code (buffer, &state);
length = code >> 2;
/* Length = 0 implies fill to the end of the line so we're done */
if (length == 0)
break;
x += length;
}
if (!state.aligned)
gst_get_nibble (buffer, &state);
state.id = !state.id;
}
state.target_Y = GST_BUFFER_DATA (buf) + mpeg2subt->left + (y * Y_stride);
state.target_V = GST_BUFFER_DATA (buf) + (Y_stride * mpeg2subt->in_height)
+ ((mpeg2subt->left) / 2) + ((y / 2) * UV_stride);
state.target_U =
state.target_V + UV_stride * ((mpeg2subt->in_height + 1) / 2);
memset (mpeg2subt->out_buffers[0], 0, sizeof (guint16) * Y_stride);
memset (mpeg2subt->out_buffers[1], 0, sizeof (guint16) * Y_stride);
memset (mpeg2subt->out_buffers[2], 0, sizeof (guint16) * Y_stride);
/* Now draw scanlines until we hit last_y or end of RLE data */
for (; ((state.offset[1] < mpeg2subt->data_size + 2) && (y <= last_y)); y++) {
gst_draw_rle_line (mpeg2subt, buffer, &state);
if (state.id) {
gst_merge_uv_data (mpeg2subt, buffer, &state);
/* Clear the compositing buffers */
memset (mpeg2subt->out_buffers[0], 0, sizeof (guint16) * Y_stride);
memset (mpeg2subt->out_buffers[1], 0, sizeof (guint16) * Y_stride);
memset (mpeg2subt->out_buffers[2], 0, sizeof (guint16) * Y_stride);
state.target_U += UV_stride;
state.target_V += UV_stride;
}
state.target_Y += Y_stride;
/* Realign the RLE state for the next line */
if (!state.aligned)
gst_get_nibble (buffer, &state);
state.id = !state.id;
}
}
static void
gst_update_still_frame (GstMpeg2Subt * mpeg2subt)
{
GstBuffer *out_buf;
if ((mpeg2subt->still_frame) &&
(mpeg2subt->have_title) &&
((mpeg2subt->forced_display && (mpeg2subt->current_button != 0)))) {
gst_buffer_ref (mpeg2subt->still_frame);
out_buf = gst_buffer_copy_on_write (mpeg2subt->still_frame);
gst_mpeg2subt_merge_title (mpeg2subt, out_buf);
gst_pad_push (mpeg2subt->srcpad, GST_DATA (out_buf));
}
}
static void
gst_mpeg2subt_handle_subtitle (GstMpeg2Subt * mpeg2subt, GstData * _data)
{
g_return_if_fail (_data != NULL);
if (GST_IS_BUFFER (_data)) {
GstBuffer *buf = GST_BUFFER (_data);
guchar *data;
glong size = 0;
if (mpeg2subt->have_title) {
gst_buffer_unref (mpeg2subt->partialbuf);
mpeg2subt->partialbuf = NULL;
mpeg2subt->have_title = FALSE;
}
GST_DEBUG ("Got subtitle buffer, pts %" G_GUINT64_FORMAT,
GST_BUFFER_TIMESTAMP (buf));
/* deal with partial frame from previous buffer */
if (mpeg2subt->partialbuf) {
GstBuffer *merge;
merge = gst_buffer_merge (mpeg2subt->partialbuf, buf);
gst_buffer_unref (mpeg2subt->partialbuf);
gst_buffer_unref (buf);
mpeg2subt->partialbuf = merge;
} else {
mpeg2subt->partialbuf = buf;
}
data = GST_BUFFER_DATA (mpeg2subt->partialbuf);
size = GST_BUFFER_SIZE (mpeg2subt->partialbuf);
if (size > 4) {
configure.ac: bump required gstreamer version to 0.8.1.1 because of following changes [--ds] Original commit message from CVS: reviewed by David Schleef * configure.ac: bump required gstreamer version to 0.8.1.1 because of following changes [--ds] * gst-libs/gst/riff/riff-read.c: Include gst/gstutils.h. (gst_riff_peek_head, gst_riff_peek_list, gst_riff_read_list) (gst_riff_read_header): Use GST_READ_UINT* macros to access possibly unaligned memory. * gst/typefind/gsttypefindfunctions.c: Include gst/gstutils.h. (mp3_type_find): Use GST_READ_UINT* macros to access possibly unaligned memory. (mp3_type_find, mpeg1_parse_header, qt_type_find) (speex_type_find): Likewise * gst/tags/gstvorbistag.c: (ADVANCE): Likewise * gst/qtdemux/qtdemux.c: Include stdlib.h (needed by realloc). (QTDEMUX_GUINT32_GET, QTDEMUX_GUINT16_GET, QTDEMUX_FP32_GET) (QTDEMUX_FP16_GET, QTDEMUX_FOURCC_GET) (gst_qtdemux_loop_header, gst_qtdemux_loop_header) (qtdemux_node_dump_foreach, qtdemux_tree_get_child_by_type) (qtdemux_tree_get_sibling_by_type): Use GST_READ_UINT* macros to access possibly unaligned memory. * gst/mpegstream/gstmpegpacketize.c: (parse_generic, parse_chunk): Likewise. * gst/mpegstream/gstmpegdemux.c: (gst_mpeg_demux_parse_syshead) (gst_mpeg_demux_parse_packet, gst_mpeg_demux_parse_pes): Likewise. * gst/mpegaudioparse/gstmpegaudioparse.c: (gst_mp3parse_chain): Likewise. * gst/mpeg2sub/gstmpeg2subt.c: (GST_BUFFER_DATA) (gst_mpeg2subt_chain_subtitle): Likewise. * gst/mpeg1videoparse/gstmp1videoparse.c: (mp1videoparse_parse_seq) (gst_mp1videoparse_time_code, gst_mp1videoparse_real_chain): Likewise. * gst/mpeg1sys/buffer.c: (mpeg1mux_buffer_update_audio_info): Likewise. * gst/cdxaparse/gstcdxaparse.c: (gst_bytestream_peek_bytes): Likewise. * gst/asfdemux/gstasfdemux.c: (_read_var_length, _read_uint): Likewise.
2004-04-20 21:04:22 +00:00
mpeg2subt->packet_size = GST_READ_UINT16_BE (data);
if (mpeg2subt->packet_size == size) {
GST_LOG ("Subtitle packet size %d, current size %ld",
mpeg2subt->packet_size, size);
configure.ac: bump required gstreamer version to 0.8.1.1 because of following changes [--ds] Original commit message from CVS: reviewed by David Schleef * configure.ac: bump required gstreamer version to 0.8.1.1 because of following changes [--ds] * gst-libs/gst/riff/riff-read.c: Include gst/gstutils.h. (gst_riff_peek_head, gst_riff_peek_list, gst_riff_read_list) (gst_riff_read_header): Use GST_READ_UINT* macros to access possibly unaligned memory. * gst/typefind/gsttypefindfunctions.c: Include gst/gstutils.h. (mp3_type_find): Use GST_READ_UINT* macros to access possibly unaligned memory. (mp3_type_find, mpeg1_parse_header, qt_type_find) (speex_type_find): Likewise * gst/tags/gstvorbistag.c: (ADVANCE): Likewise * gst/qtdemux/qtdemux.c: Include stdlib.h (needed by realloc). (QTDEMUX_GUINT32_GET, QTDEMUX_GUINT16_GET, QTDEMUX_FP32_GET) (QTDEMUX_FP16_GET, QTDEMUX_FOURCC_GET) (gst_qtdemux_loop_header, gst_qtdemux_loop_header) (qtdemux_node_dump_foreach, qtdemux_tree_get_child_by_type) (qtdemux_tree_get_sibling_by_type): Use GST_READ_UINT* macros to access possibly unaligned memory. * gst/mpegstream/gstmpegpacketize.c: (parse_generic, parse_chunk): Likewise. * gst/mpegstream/gstmpegdemux.c: (gst_mpeg_demux_parse_syshead) (gst_mpeg_demux_parse_packet, gst_mpeg_demux_parse_pes): Likewise. * gst/mpegaudioparse/gstmpegaudioparse.c: (gst_mp3parse_chain): Likewise. * gst/mpeg2sub/gstmpeg2subt.c: (GST_BUFFER_DATA) (gst_mpeg2subt_chain_subtitle): Likewise. * gst/mpeg1videoparse/gstmp1videoparse.c: (mp1videoparse_parse_seq) (gst_mp1videoparse_time_code, gst_mp1videoparse_real_chain): Likewise. * gst/mpeg1sys/buffer.c: (mpeg1mux_buffer_update_audio_info): Likewise. * gst/cdxaparse/gstcdxaparse.c: (gst_bytestream_peek_bytes): Likewise. * gst/asfdemux/gstasfdemux.c: (_read_var_length, _read_uint): Likewise.
2004-04-20 21:04:22 +00:00
mpeg2subt->data_size = GST_READ_UINT16_BE (data + 2);
mpeg2subt->have_title = TRUE;
gst_mpeg2subt_parse_header (mpeg2subt);
}
}
} else if (GST_IS_EVENT (_data)) {
switch (GST_EVENT_TYPE (GST_EVENT (_data))) {
case GST_EVENT_ANY:
GST_LOG ("DVD event on subtitle pad with timestamp %llu",
GST_EVENT_TIMESTAMP (GST_EVENT (_data)));
gst_mpeg2subt_handle_dvd_event (mpeg2subt, GST_EVENT (_data), TRUE);
break;
case GST_EVENT_EMPTY:
if (GST_CLOCK_TIME_IS_VALID (mpeg2subt->next_video_time) &&
(mpeg2subt->next_video_time > 0)) {
mpeg2subt->next_subtitle_time = mpeg2subt->next_video_time + 1;
GST_LOG ("Forwarding subtitle time to %llu",
mpeg2subt->next_subtitle_time);
}
gst_update_still_frame (mpeg2subt);
break;
default:
GST_LOG ("Got event of type %d on subtitle pad",
GST_EVENT_TYPE (GST_EVENT (_data)));
break;
}
gst_data_unref (_data);
} else
gst_data_unref (_data);
}
static void
gst_mpeg2subt_handle_dvd_event (GstMpeg2Subt * mpeg2subt, GstEvent * event,
gboolean from_sub_pad)
{
GstStructure *structure;
const gchar *event_type;
structure = event->event_data.structure.structure;
event_type = gst_structure_get_string (structure, "event");
g_return_if_fail (event_type != NULL);
if (from_sub_pad && !strcmp (event_type, "dvd-spu-highlight")) {
gint button;
gint palette, sx, sy, ex, ey;
gint i;
/* Details for the highlight region to display */
if (!gst_structure_get_int (structure, "button", &button) ||
!gst_structure_get_int (structure, "palette", &palette) ||
!gst_structure_get_int (structure, "sx", &sx) ||
!gst_structure_get_int (structure, "sy", &sy) ||
!gst_structure_get_int (structure, "ex", &ex) ||
!gst_structure_get_int (structure, "ey", &ey)) {
GST_ERROR ("Invalid dvd-spu-highlight event received");
return;
}
mpeg2subt->current_button = button;
mpeg2subt->clip_left = sx;
mpeg2subt->clip_top = sy;
mpeg2subt->clip_right = ex;
mpeg2subt->clip_bottom = ey;
for (i = 0; i < 4; i++) {
mpeg2subt->menu_alpha[i] = ((guint32) (palette) >> (i * 4)) & 0x0f;
mpeg2subt->menu_index[i] = ((guint32) (palette) >> (16 + (i * 4))) & 0x0f;
}
GST_DEBUG ("New button activated clip=(%d,%d) to (%d,%d) palette 0x%x", sx,
sy, ex, ey, palette);
gst_setup_palette (mpeg2subt, mpeg2subt->menu_index, mpeg2subt->menu_alpha);
gst_update_still_frame (mpeg2subt);
} else if (from_sub_pad && !strcmp (event_type, "dvd-spu-clut-change")) {
/* Take a copy of the colour table */
guchar name[16];
int i;
gint value;
GST_LOG ("New colour table recieved");
for (i = 0; i < 16; i++) {
sprintf (name, "clut%02d", i);
if (!gst_structure_get_int (structure, name, &value)) {
GST_ERROR ("dvd-spu-clut-change event did not contain %s field", name);
break;
}
mpeg2subt->current_clut[i] = (guint32) (value);
}
if (mpeg2subt->forced_display)
gst_setup_palette (mpeg2subt, mpeg2subt->menu_index,
mpeg2subt->menu_alpha);
else
gst_setup_palette (mpeg2subt, mpeg2subt->subtitle_index,
mpeg2subt->subtitle_alpha);
gst_update_still_frame (mpeg2subt);
} else if ((from_sub_pad && !strcmp (event_type, "dvd-spu-stream-change"))
|| (from_sub_pad && !strcmp (event_type, "dvd-spu-reset-highlight"))) {
/* Turn off forced highlight display */
mpeg2subt->current_button = 0;
mpeg2subt->clip_left = mpeg2subt->left;
mpeg2subt->clip_top = mpeg2subt->top;
mpeg2subt->clip_right = mpeg2subt->right;
mpeg2subt->clip_bottom = mpeg2subt->bottom;
GST_LOG ("Clearing button state");
gst_update_still_frame (mpeg2subt);
} else if (!from_sub_pad && !strcmp (event_type, "dvd-spu-still-frame")) {
/* Handle a still frame */
GST_LOG ("Received still frame notification");
if (mpeg2subt->still_frame)
gst_buffer_unref (mpeg2subt->still_frame);
mpeg2subt->still_frame = mpeg2subt->hold_frame;
mpeg2subt->hold_frame = NULL;
gst_update_still_frame (mpeg2subt);
} else {
/* Ignore all other unknown events */
GST_LOG ("Ignoring DVD event %s from %s pad", event_type,
from_sub_pad ? "sub" : "video");
}
}
static void
gst_mpeg2subt_loop (GstElement * element)
{
GstMpeg2Subt *mpeg2subt = GST_MPEG2SUBT (element);
GstData *data;
GstClockTime timestamp = 0;
/* Process any pending video buffer */
if (mpeg2subt->pending_video_buffer) {
gst_mpeg2subt_handle_video (mpeg2subt, mpeg2subt->pending_video_buffer);
mpeg2subt->pending_video_buffer = NULL;
}
data = mpeg2subt->pending_video_buffer = gst_pad_pull (mpeg2subt->videopad);
if (!data)
return;
if (GST_IS_BUFFER (data)) {
timestamp = GST_BUFFER_TIMESTAMP (GST_BUFFER (data));
} else if (GST_IS_EVENT (data)) {
timestamp = GST_EVENT_TIMESTAMP (GST_EVENT (data));
} else {
GST_WARNING ("Got GstData of unknown type %d", GST_DATA_TYPE (data));
}
if (timestamp && GST_CLOCK_TIME_IS_VALID (timestamp) && (timestamp > 0)) {
mpeg2subt->next_video_time = timestamp;
GST_LOG ("next_video_time = %llu, next_subtitle_time = %llu",
mpeg2subt->next_video_time, mpeg2subt->next_subtitle_time);
}
/* Process subtitle buffers until we get one beyond 'next_video_time' */
if (mpeg2subt->pending_subtitle_buffer) {
gst_mpeg2subt_handle_subtitle (mpeg2subt,
mpeg2subt->pending_subtitle_buffer);
mpeg2subt->pending_subtitle_buffer = NULL;
}
data = mpeg2subt->pending_subtitle_buffer =
gst_pad_pull (mpeg2subt->subtitlepad);
if (!data) {
return;
}
if (GST_IS_BUFFER (data)) {
timestamp = GST_BUFFER_TIMESTAMP (GST_BUFFER (data));
} else if (GST_IS_EVENT (data)) {
timestamp = GST_EVENT_TIMESTAMP (GST_EVENT (data));
} else {
GST_WARNING ("Got GstData of unknown type %d", GST_DATA_TYPE (data));
}
if (GST_CLOCK_TIME_IS_VALID (timestamp) && (timestamp > 0)) {
mpeg2subt->next_subtitle_time = timestamp;
GST_LOG ("next_subtitle_time = %llu, next_video_time = %llu",
mpeg2subt->next_subtitle_time, mpeg2subt->next_video_time);
}
}
static void
gst_mpeg2subt_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstMpeg2Subt *src;
g_return_if_fail (GST_IS_MPEG2SUBT (object));
src = GST_MPEG2SUBT (object);
switch (prop_id) {
default:
break;
}
}
static void
gst_mpeg2subt_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstMpeg2Subt *src;
g_return_if_fail (GST_IS_MPEG2SUBT (object));
src = GST_MPEG2SUBT (object);
switch (prop_id) {
default:
break;
}
}
static gboolean
plugin_init (GstPlugin * plugin)
{
return gst_element_register (plugin, "mpeg2subt",
GST_RANK_NONE, GST_TYPE_MPEG2SUBT);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"mpeg2sub",
"MPEG-2 video subtitle parser",
plugin_init, VERSION, "LGPL", GST_PACKAGE, GST_ORIGIN)