ext/pango/gsttextoverlay.c: Refactoring of textoverlay without collectpads. This now supports sparse subtitles coming...

Original commit message from CVS:
2006-02-18  Julien MOUTTE  <julien@moutte.net>

* ext/pango/gsttextoverlay.c: (gst_text_overlay_base_init),
(gst_text_overlay_finalize), (gst_text_overlay_init),
(gst_text_overlay_setcaps), (gst_text_overlay_src_event),
(gst_text_overlay_render_text),
(gst_text_overlay_text_pad_link),
(gst_text_overlay_text_pad_unlink),
(gst_text_overlay_text_event),
(gst_text_overlay_video_event), (gst_text_overlay_pop_text),
(gst_text_overlay_text_chain), (gst_text_overlay_video_chain),
(gst_text_overlay_change_state): Refactoring of textoverlay
without collectpads. This now supports sparse subtitles coming
from a demuxer instead of a sub file. Seeking is still broken
though. Need to discuss with wtay some more on how to handle
seeking correctly.
* ext/pango/gsttextoverlay.h:
* gst/playback/gstplaybin.c: (setup_sinks): Support linking with
subtitles coming from the demuxer.
This commit is contained in:
Julien Moutte 2006-02-18 19:10:35 +00:00
parent 9a55cf386e
commit 3281418028
3 changed files with 353 additions and 212 deletions

View file

@ -147,14 +147,30 @@ static GstStaticPadTemplate text_sink_template_factory =
#define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2)) #define I420_SIZE(w,h) (I420_V_OFFSET(w,h)+(I420_V_ROWSTRIDE(w)*GST_ROUND_UP_2(h)/2))
#define GST_TEXT_OVERLAY_GET_COND(ov) (((GstTextOverlay *)ov)->cond)
#define GST_TEXT_OVERLAY_WAIT(ov) (g_cond_wait (GST_TEXT_OVERLAY_GET_COND (ov), GST_OBJECT_GET_LOCK (ov)))
#define GST_TEXT_OVERLAY_SIGNAL(ov) (g_cond_signal (GST_TEXT_OVERLAY_GET_COND (ov)))
#define GST_TEXT_OVERLAY_BROADCAST(ov)(g_cond_broadcast (GST_TEXT_OVERLAY_GET_COND (ov)))
static GstStateChangeReturn gst_text_overlay_change_state (GstElement * element, static GstStateChangeReturn gst_text_overlay_change_state (GstElement * element,
GstStateChange transition); GstStateChange transition);
static GstCaps *gst_text_overlay_getcaps (GstPad * pad); static GstCaps *gst_text_overlay_getcaps (GstPad * pad);
static gboolean gst_text_overlay_setcaps (GstPad * pad, GstCaps * caps); static gboolean gst_text_overlay_setcaps (GstPad * pad, GstCaps * caps);
static gboolean gst_text_overlay_src_event (GstPad * pad, GstEvent * event); static gboolean gst_text_overlay_src_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_text_overlay_collected (GstCollectPads * pads,
gpointer data); static gboolean gst_text_overlay_video_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_text_overlay_video_chain (GstPad * pad,
GstBuffer * buffer);
static gboolean gst_text_overlay_text_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_text_overlay_text_chain (GstPad * pad,
GstBuffer * buffer);
static GstPadLinkReturn gst_text_overlay_text_pad_link (GstPad * pad,
GstPad * peer);
static void gst_text_overlay_text_pad_unlink (GstPad * pad);
static void gst_text_overlay_pop_text (GstTextOverlay * overlay);
static void gst_text_overlay_finalize (GObject * object); static void gst_text_overlay_finalize (GObject * object);
static void gst_text_overlay_set_property (GObject * object, guint prop_id, static void gst_text_overlay_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec); const GValue * value, GParamSpec * pspec);
@ -263,14 +279,22 @@ gst_text_overlay_finalize (GObject * object)
{ {
GstTextOverlay *overlay = GST_TEXT_OVERLAY (object); GstTextOverlay *overlay = GST_TEXT_OVERLAY (object);
gst_object_unref (overlay->collect);
g_free (overlay->default_text); g_free (overlay->default_text);
g_free (overlay->bitmap.buffer); g_free (overlay->bitmap.buffer);
if (overlay->layout) if (overlay->layout)
g_object_unref (overlay->layout); g_object_unref (overlay->layout);
if (overlay->segment) {
gst_segment_free (overlay->segment);
overlay->segment = NULL;
}
if (overlay->cond) {
g_cond_free (overlay->cond);
overlay->cond = NULL;
}
G_OBJECT_CLASS (parent_class)->finalize (object); G_OBJECT_CLASS (parent_class)->finalize (object);
} }
@ -285,6 +309,10 @@ gst_text_overlay_init (GstTextOverlay * overlay, GstTextOverlayClass * klass)
GST_DEBUG_FUNCPTR (gst_text_overlay_getcaps)); GST_DEBUG_FUNCPTR (gst_text_overlay_getcaps));
gst_pad_set_setcaps_function (overlay->video_sinkpad, gst_pad_set_setcaps_function (overlay->video_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_setcaps)); GST_DEBUG_FUNCPTR (gst_text_overlay_setcaps));
gst_pad_set_event_function (overlay->video_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_video_event));
gst_pad_set_chain_function (overlay->video_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_video_chain));
gst_element_add_pad (GST_ELEMENT (overlay), overlay->video_sinkpad); gst_element_add_pad (GST_ELEMENT (overlay), overlay->video_sinkpad);
if (!GST_IS_TIME_OVERLAY_CLASS (klass) && !GST_IS_CLOCK_OVERLAY_CLASS (klass)) { if (!GST_IS_TIME_OVERLAY_CLASS (klass) && !GST_IS_CLOCK_OVERLAY_CLASS (klass)) {
@ -292,6 +320,14 @@ gst_text_overlay_init (GstTextOverlay * overlay, GstTextOverlayClass * klass)
overlay->text_sinkpad = overlay->text_sinkpad =
gst_pad_new_from_template (gst_static_pad_template_get gst_pad_new_from_template (gst_static_pad_template_get
(&text_sink_template_factory), "text_sink"); (&text_sink_template_factory), "text_sink");
gst_pad_set_event_function (overlay->text_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_text_event));
gst_pad_set_chain_function (overlay->text_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_text_chain));
gst_pad_set_link_function (overlay->text_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_text_pad_link));
gst_pad_set_unlink_function (overlay->text_sinkpad,
GST_DEBUG_FUNCPTR (gst_text_overlay_text_pad_unlink));
gst_element_add_pad (GST_ELEMENT (overlay), overlay->text_sinkpad); gst_element_add_pad (GST_ELEMENT (overlay), overlay->text_sinkpad);
} }
@ -327,16 +363,15 @@ gst_text_overlay_init (GstTextOverlay * overlay, GstTextOverlayClass * klass)
overlay->fps_n = 0; overlay->fps_n = 0;
overlay->fps_d = 1; overlay->fps_d = 1;
overlay->collect = gst_collect_pads_new (); overlay->text_buffer = NULL;
overlay->text_linked = FALSE;
gst_collect_pads_set_function (overlay->collect, overlay->cond = g_cond_new ();
GST_DEBUG_FUNCPTR (gst_text_overlay_collected), overlay); overlay->segment = gst_segment_new ();
if (overlay->segment) {
overlay->video_collect_data = gst_collect_pads_add_pad (overlay->collect, gst_segment_init (overlay->segment, GST_FORMAT_TIME);
overlay->video_sinkpad, sizeof (GstCollectData)); } else {
if (overlay->text_sinkpad) { GST_WARNING_OBJECT (overlay, "segment creation failed");
overlay->text_collect_data = gst_collect_pads_add_pad (overlay->collect, g_assert_not_reached ();
overlay->text_sinkpad, sizeof (GstCollectData));
} }
} }
@ -392,6 +427,8 @@ gst_text_overlay_setcaps (GstPad * pad, GstCaps * caps)
GST_OBJECT_UNLOCK (overlay); GST_OBJECT_UNLOCK (overlay);
} }
gst_object_unref (overlay);
return ret; return ret;
} }
@ -515,7 +552,7 @@ gst_text_overlay_src_event (GstPad * pad, GstEvent * event)
switch (GST_EVENT_TYPE (event)) { switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK: case GST_EVENT_SEEK:
/* We don't handle seek if we have not text pad */ /* We don't handle seek if we have not text pad */
if (!overlay->text_sinkpad) { if (!overlay->text_linked) {
ret = gst_pad_push_event (overlay->video_sinkpad, event); ret = gst_pad_push_event (overlay->video_sinkpad, event);
goto beach; goto beach;
} }
@ -525,10 +562,18 @@ gst_text_overlay_src_event (GstPad * pad, GstEvent * event)
/* Flush downstream */ /* Flush downstream */
gst_pad_push_event (overlay->srcpad, gst_event_new_flush_start ()); gst_pad_push_event (overlay->srcpad, gst_event_new_flush_start ());
/* Stopping collect pads */ /* Mark our sink pads as flushing to acquire stream lock */
gst_collect_pads_stop (overlay->collect); GST_OBJECT_LOCK (overlay->video_sinkpad);
GST_PAD_SET_FLUSHING (overlay->video_sinkpad);
GST_OBJECT_UNLOCK (overlay->video_sinkpad);
GST_OBJECT_LOCK (overlay->text_sinkpad);
GST_PAD_SET_FLUSHING (overlay->text_sinkpad);
GST_OBJECT_UNLOCK (overlay->text_sinkpad);
/* Acquire stream lock */ /* Unblock the text chain if it's waiting */
gst_text_overlay_pop_text (overlay);
/* Take the stream locks */
GST_PAD_STREAM_LOCK (overlay->video_sinkpad); GST_PAD_STREAM_LOCK (overlay->video_sinkpad);
GST_PAD_STREAM_LOCK (overlay->text_sinkpad); GST_PAD_STREAM_LOCK (overlay->text_sinkpad);
@ -541,17 +586,14 @@ gst_text_overlay_src_event (GstPad * pad, GstEvent * event)
gst_event_unref (event); gst_event_unref (event);
} }
/* Start collect pads again */ /* Release the locks */
gst_collect_pads_start (overlay->collect);
/* Release stream lock */
GST_PAD_STREAM_UNLOCK (overlay->video_sinkpad); GST_PAD_STREAM_UNLOCK (overlay->video_sinkpad);
GST_PAD_STREAM_UNLOCK (overlay->text_sinkpad); GST_PAD_STREAM_UNLOCK (overlay->text_sinkpad);
break; break;
default: default:
gst_event_ref (event); gst_event_ref (event);
ret = gst_pad_push_event (overlay->video_sinkpad, event); ret = gst_pad_push_event (overlay->video_sinkpad, event);
if (overlay->text_sinkpad) { if (overlay->text_linked) {
ret = gst_pad_push_event (overlay->text_sinkpad, event); ret = gst_pad_push_event (overlay->text_sinkpad, event);
} }
} }
@ -767,10 +809,16 @@ gst_text_overlay_render_text (GstTextOverlay * overlay,
PangoRectangle ink_rect, logical_rect; PangoRectangle ink_rect, logical_rect;
gchar *string; gchar *string;
if (textlen < 0) /* -1 is the whole string */
if (text != NULL && textlen < 0) {
textlen = strlen (text); textlen = strlen (text);
}
string = g_strndup (text, textlen); if (text != NULL) {
string = g_strndup (text, textlen);
} else { /* empty string */
string = g_strdup (" ");
}
g_strdelimit (string, "\n\r\t", ' '); g_strdelimit (string, "\n\r\t", ' ');
textlen = strlen (string); textlen = strlen (string);
@ -851,217 +899,297 @@ gst_text_overlay_push_frame (GstTextOverlay * overlay, GstBuffer * video_frame)
return gst_pad_push (overlay->srcpad, video_frame); return gst_pad_push (overlay->srcpad, video_frame);
} }
static void static GstPadLinkReturn
gst_text_overlay_pop_video (GstTextOverlay * overlay) gst_text_overlay_text_pad_link (GstPad * pad, GstPad * peer)
{ {
GstBuffer *buf; GstTextOverlay *overlay;
buf = gst_collect_pads_pop (overlay->collect, overlay->video_collect_data); overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
g_return_if_fail (buf != NULL);
gst_buffer_unref (buf); GST_DEBUG_OBJECT (overlay, "Text pad linked");
overlay->text_linked = TRUE;
gst_object_unref (overlay);
return GST_PAD_LINK_OK;
} }
static void
gst_text_overlay_text_pad_unlink (GstPad * pad)
{
GstTextOverlay *overlay;
/* don't use gst_pad_get_parent() here, will deadlock */
overlay = GST_TEXT_OVERLAY (GST_PAD_PARENT (pad));
GST_DEBUG_OBJECT (overlay, "Text pad unlinked");
overlay->text_linked = FALSE;
}
static gboolean
gst_text_overlay_text_event (GstPad * pad, GstEvent * event)
{
gboolean ret = FALSE;
GstTextOverlay *overlay = NULL;
overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
case GST_EVENT_NEWSEGMENT:
case GST_EVENT_FLUSH_START:
case GST_EVENT_FLUSH_STOP:
/* We just ignore those events from the text pad */
gst_event_unref (event);
ret = TRUE;
break;
default:
ret = gst_pad_event_default (pad, event);
goto beach;
}
beach:
gst_object_unref (overlay);
return ret;
}
static gboolean
gst_text_overlay_video_event (GstPad * pad, GstEvent * event)
{
gboolean ret = FALSE;
GstTextOverlay *overlay = NULL;
overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
{
GstFormat format;
gdouble rate;
gint64 start, stop, time;
gboolean update;
GST_DEBUG_OBJECT (overlay, "received new segment");
gst_event_parse_new_segment (event, &update, &rate, &format, &start,
&stop, &time);
/* now copy over the values */
gst_segment_set_newsegment (overlay->segment, update, rate, format,
start, stop, time);
ret = gst_pad_event_default (pad, event);
break;
}
case GST_EVENT_FLUSH_START:
case GST_EVENT_FLUSH_STOP:
case GST_EVENT_EOS:
default:
ret = gst_pad_event_default (pad, event);
}
gst_object_unref (overlay);
return ret;
}
/* Called with lock held */
static void static void
gst_text_overlay_pop_text (GstTextOverlay * overlay) gst_text_overlay_pop_text (GstTextOverlay * overlay)
{ {
GstBuffer *buf; g_return_if_fail (GST_IS_TEXT_OVERLAY (overlay));
if (overlay->text_collect_data) { if (overlay->text_buffer) {
buf = gst_collect_pads_pop (overlay->collect, overlay->text_collect_data); GST_DEBUG_OBJECT (overlay, "releasing text buffer %p",
g_return_if_fail (buf != NULL); overlay->text_buffer);
gst_buffer_unref (buf); gst_buffer_unref (overlay->text_buffer);
overlay->text_buffer = NULL;
} }
overlay->need_render = TRUE; /* Let the text task know we used that buffer */
GST_TEXT_OVERLAY_BROADCAST (overlay);
} }
/* This function is called when there is data on all pads */ /* We receive text buffers here. If they are out of segment we just ignore them.
If the buffer is in our segment we keep it internally except if another one
is already waiting here, in that case we wait that it gets kicked out */
static GstFlowReturn static GstFlowReturn
gst_text_overlay_collected (GstCollectPads * pads, gpointer data) gst_text_overlay_text_chain (GstPad * pad, GstBuffer * buffer)
{ {
GstTextOverlayClass *klass;
GstTextOverlay *overlay;
GstFlowReturn ret = GST_FLOW_OK; GstFlowReturn ret = GST_FLOW_OK;
GstClockTime now, txt_end, frame_end; GstTextOverlay *overlay = NULL;
GstBuffer *video_frame = NULL; gboolean in_seg = FALSE;
GstBuffer *text_buf = NULL; gint64 clip_start = 0, clip_stop = 0;
gchar *text;
overlay = GST_TEXT_OVERLAY (data); overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
klass = GST_TEXT_OVERLAY_GET_CLASS (data);
GST_DEBUG ("Collecting"); GST_OBJECT_LOCK (overlay);
if (overlay->video_collect_data->abidata.ABI.new_segment) { in_seg = gst_segment_clip (overlay->segment, GST_FORMAT_TIME,
GST_BUFFER_TIMESTAMP (buffer),
GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer),
&clip_start, &clip_stop);
GST_DEBUG ("generating newsegment, start %" GST_TIME_FORMAT if (in_seg) {
", stop %" GST_TIME_FORMAT, GST_BUFFER_TIMESTAMP (buffer) = clip_start;
GST_TIME_ARGS (overlay->video_collect_data->segment.start), GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
GST_TIME_ARGS (overlay->video_collect_data->segment.stop));
gst_pad_push_event (overlay->srcpad, gst_event_new_new_segment (FALSE, /* Wait for the previous buffer to go away */
overlay->video_collect_data->segment.rate, GST_FORMAT_TIME, while (overlay->text_buffer != NULL) {
overlay->video_collect_data->segment.start, GST_DEBUG ("Pad %s:%s has a buffer queued, waiting",
overlay->video_collect_data->segment.stop, GST_DEBUG_PAD_NAME (pad));
overlay->video_collect_data->segment.last_stop)); GST_TEXT_OVERLAY_WAIT (overlay);
GST_DEBUG ("Pad %s:%s resuming", GST_DEBUG_PAD_NAME (pad));
overlay->video_collect_data->abidata.ABI.new_segment = FALSE;
}
video_frame = gst_collect_pads_peek (overlay->collect,
overlay->video_collect_data);
/* send EOS if video stream EOSed regardless of text stream */
if (video_frame == NULL) {
GST_DEBUG ("Video stream at EOS");
if (overlay->text_collect_data) {
text_buf = gst_collect_pads_pop (overlay->collect,
overlay->text_collect_data);
} }
gst_pad_push_event (overlay->srcpad, gst_event_new_eos ());
ret = GST_FLOW_UNEXPECTED; overlay->text_buffer = buffer;
goto done; /* That's a new text buffer we need to render */
overlay->need_render = TRUE;
} }
if (GST_BUFFER_TIMESTAMP (video_frame) == GST_CLOCK_TIME_NONE) { GST_OBJECT_UNLOCK (overlay);
g_warning ("%s: video frame has invalid timestamp", G_STRLOC);
}
now = GST_BUFFER_TIMESTAMP (video_frame); gst_object_unref (overlay);
if (GST_BUFFER_DURATION (video_frame) != GST_CLOCK_TIME_NONE) { return ret;
frame_end = now + GST_BUFFER_DURATION (video_frame); }
} else if (overlay->fps_n > 0) {
frame_end = now + gst_util_uint64_scale_int (GST_SECOND,
overlay->fps_d, overlay->fps_n);
} else {
/* magic value, does not really matter since texts
* tend to span quite a few frames in practice anyway */
frame_end = now + GST_SECOND / 25;
}
GST_DEBUG ("Got video frame: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT, static GstFlowReturn
GST_TIME_ARGS (now), GST_TIME_ARGS (frame_end)); gst_text_overlay_video_chain (GstPad * pad, GstBuffer * buffer)
{
GstFlowReturn ret = GST_FLOW_OK;
GstTextOverlay *overlay = NULL;
gboolean in_seg = FALSE;
gint64 clip_start = 0, clip_stop = 0;
GstTextOverlayClass *klass = NULL;
/* text pad not linked? */ overlay = GST_TEXT_OVERLAY (gst_pad_get_parent (pad));
if (overlay->text_collect_data == NULL) { klass = GST_TEXT_OVERLAY_GET_CLASS (overlay);
gchar *txt;
if (klass->get_text) GST_OBJECT_LOCK (overlay);
txt = klass->get_text (overlay, video_frame);
else
txt = g_strdup (overlay->default_text);
GST_DEBUG ("Text pad not linked, rendering default text: '%s'", in_seg = gst_segment_clip (overlay->segment, GST_FORMAT_TIME,
GST_STR_NULL (txt)); GST_BUFFER_TIMESTAMP (buffer),
if (txt != NULL && *txt != '\0') { GST_BUFFER_TIMESTAMP (buffer) + GST_BUFFER_DURATION (buffer),
gst_text_overlay_render_text (overlay, txt, -1); &clip_start, &clip_stop);
ret = gst_text_overlay_push_frame (overlay, video_frame);
if (in_seg) {
gchar *text = NULL;
GST_BUFFER_TIMESTAMP (buffer) = clip_start;
GST_BUFFER_DURATION (buffer) = clip_stop - clip_start;
/* Text pad not linked, rendering internal text */
if (!overlay->text_linked) {
if (klass->get_text) {
text = klass->get_text (overlay, buffer);
} else {
text = g_strdup (overlay->default_text);
}
GST_DEBUG_OBJECT (overlay, "Text pad not linked, rendering default "
"text: '%s'", GST_STR_NULL (text));
GST_OBJECT_UNLOCK (overlay);
if (text != NULL && *text != '\0') {
/* Render and push */
gst_text_overlay_render_text (overlay, text, -1);
ret = gst_text_overlay_push_frame (overlay, buffer);
} else {
/* Invalid or empty string */
ret = gst_pad_push (overlay->srcpad, buffer);
}
} else { } else {
ret = gst_pad_push (overlay->srcpad, video_frame); if (overlay->text_buffer) {
gboolean pop_text = FALSE;
gint64 text_end = 0;
/* if the text buffer isn't stamped right, pop it off the
* queue and display it for the current video frame only */
if (GST_BUFFER_TIMESTAMP (overlay->text_buffer) == GST_CLOCK_TIME_NONE
|| GST_BUFFER_DURATION (overlay->text_buffer) ==
GST_CLOCK_TIME_NONE) {
GST_WARNING_OBJECT (overlay,
"Got text buffer with invalid time " "stamp or duration");
gst_buffer_stamp (overlay->text_buffer, buffer);
pop_text = TRUE;
}
text_end = GST_BUFFER_TIMESTAMP (overlay->text_buffer) +
GST_BUFFER_DURATION (overlay->text_buffer);
/* Text too old or in the future */
if ((text_end < clip_start) ||
(clip_stop < GST_BUFFER_TIMESTAMP (overlay->text_buffer))) {
if (text_end < clip_start) {
/* Get rid of it, if it's too old only */
pop_text = FALSE;
gst_text_overlay_pop_text (overlay);
}
GST_OBJECT_UNLOCK (overlay);
/* Push the video frame */
ret = gst_pad_push (overlay->srcpad, buffer);
} else {
/* Get the string */
text = g_strndup ((gchar *) GST_BUFFER_DATA (overlay->text_buffer),
GST_BUFFER_SIZE (overlay->text_buffer));
if (text != NULL && *text != '\0') {
gint text_len = strlen (text);
while (text_len > 0 && (text[text_len - 1] == '\n' ||
text[text_len - 1] == '\r')) {
--text_len;
}
GST_DEBUG_OBJECT (overlay, "Rendering text '%*s'", text_len, text);
gst_text_overlay_render_text (overlay, text, text_len);
} else {
GST_DEBUG_OBJECT (overlay, "No text to render (empty buffer)");
gst_text_overlay_render_text (overlay, " ", 1);
}
GST_OBJECT_UNLOCK (overlay);
ret = gst_text_overlay_push_frame (overlay, buffer);
}
} else {
/* No text to overlay, push the frame as is */
GST_OBJECT_UNLOCK (overlay);
ret = gst_pad_push (overlay->srcpad, buffer);
}
} }
gst_text_overlay_pop_video (overlay);
video_frame = NULL; g_free (text);
goto done;
/* Update last_stop */
gst_segment_set_last_stop (overlay->segment, GST_FORMAT_TIME,
GST_BUFFER_TIMESTAMP (buffer));
} else { /* Out of segment */
GST_OBJECT_UNLOCK (overlay);
GST_DEBUG_OBJECT (overlay, "buffer out of segment discarding");
gst_buffer_unref (buffer);
} }
text_buf = gst_collect_pads_peek (overlay->collect, gst_object_unref (overlay);
overlay->text_collect_data);
/* just push the video frame if the text stream has EOSed */ return ret;
if (text_buf == NULL) {
GST_DEBUG ("Text pad EOSed, just pushing video frame as is");
ret = gst_pad_push (overlay->srcpad, video_frame);
gst_text_overlay_pop_video (overlay);
video_frame = NULL;
goto done;
}
/* if the text buffer isn't stamped right, pop it off the
* queue and display it for the current video frame only */
if (GST_BUFFER_TIMESTAMP (text_buf) == GST_CLOCK_TIME_NONE ||
GST_BUFFER_DURATION (text_buf) == GST_CLOCK_TIME_NONE) {
GST_WARNING ("Got text buffer with invalid time stamp or duration");
gst_text_overlay_pop_text (overlay);
GST_BUFFER_TIMESTAMP (text_buf) = now;
GST_BUFFER_DURATION (text_buf) = frame_end - now;
}
txt_end = GST_BUFFER_TIMESTAMP (text_buf) + GST_BUFFER_DURATION (text_buf);
GST_DEBUG ("Got text buffer: %" GST_TIME_FORMAT " - %" GST_TIME_FORMAT,
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (text_buf)), GST_TIME_ARGS (txt_end));
/* if the text buffer is too old, pop it off the
* queue and return so we get a new one next time */
if (txt_end < now) {
GST_DEBUG ("Text buffer too old, popping off the queue");
gst_text_overlay_pop_text (overlay);
ret = GST_FLOW_OK;
goto done;
}
/* if the video frame ends before the text even starts,
* just push it out as is and pop it off the queue */
if (frame_end < GST_BUFFER_TIMESTAMP (text_buf)) {
GST_DEBUG ("Video buffer before text, pushing out and popping off queue");
ret = gst_pad_push (overlay->srcpad, video_frame);
gst_text_overlay_pop_video (overlay);
video_frame = NULL;
goto done;
}
/* text duration overlaps video frame duration */
text = g_strndup ((gchar *) GST_BUFFER_DATA (text_buf),
GST_BUFFER_SIZE (text_buf));
if (text != NULL && *text != '\0') {
gint text_len = strlen (text);
while (text_len > 0 && (text[text_len - 1] == '\n' ||
text[text_len - 1] == '\r')) {
--text_len;
}
GST_DEBUG ("Rendering text '%*s'", text_len, text);;
gst_text_overlay_render_text (overlay, text, text_len);
} else {
GST_DEBUG ("No text to render (empty buffer)");
gst_text_overlay_render_text (overlay, " ", 1);
}
g_free (text);
gst_text_overlay_pop_video (overlay);
ret = gst_text_overlay_push_frame (overlay, video_frame);
video_frame = NULL;
goto done;
done:
{
if (text_buf)
gst_buffer_unref (text_buf);
if (video_frame)
gst_buffer_unref (video_frame);
return ret;
}
} }
static GstStateChangeReturn static GstStateChangeReturn
gst_text_overlay_change_state (GstElement * element, GstStateChange transition) gst_text_overlay_change_state (GstElement * element, GstStateChange transition)
{ {
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS; GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
GstTextOverlay *overlay = GST_TEXT_OVERLAY (element);
/*GstTextOverlay *overlay = GST_TEXT_OVERLAY (element); */
switch (transition) { switch (transition) {
case GST_STATE_CHANGE_READY_TO_PAUSED: case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_collect_pads_start (overlay->collect);
break; break;
case GST_STATE_CHANGE_PAUSED_TO_READY: case GST_STATE_CHANGE_PAUSED_TO_READY:
/* need to unblock the collectpads before calling the
* parent change_state so that streaming can finish */
gst_collect_pads_stop (overlay->collect);
break; break;
default: default:
break; break;

View file

@ -2,7 +2,6 @@
#define __GST_TEXT_OVERLAY_H__ #define __GST_TEXT_OVERLAY_H__
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/base/gstcollectpads.h>
#include <pango/pangoft2.h> #include <pango/pangoft2.h>
G_BEGIN_DECLS G_BEGIN_DECLS
@ -53,9 +52,11 @@ struct _GstTextOverlay {
GstPad *text_sinkpad; GstPad *text_sinkpad;
GstPad *srcpad; GstPad *srcpad;
GstCollectPads *collect; GstSegment *segment;
GstCollectData *video_collect_data; GstBuffer *text_buffer;
GstCollectData *text_collect_data; gboolean text_linked;
GCond *cond; /* to signal removal of data */
gint width; gint width;
gint height; gint height;

View file

@ -1154,25 +1154,34 @@ setup_sinks (GstPlayBaseBin * play_base_bin, GstPlayBaseGroup * group)
goto beach; goto beach;
} }
ghost = gst_ghost_pad_new ("text_src", textsrcpad); /* We ghost the pad on subtitle_bin only, if the text pad is from the
if (!GST_IS_PAD (ghost)) { media demuxer we keep it as it is */
GST_WARNING_OBJECT (textsrcpad, "failed creating ghost pad for " if (!GST_IS_PLAY_BIN (grandparent)) {
"subtitle-bin"); GST_DEBUG_OBJECT (textsrcpad, "this subtitle pad is from a subtitle "
gst_object_unref (parent); "file, ghosting to a suitable hierarchy");
gst_object_unref (grandparent); ghost = gst_ghost_pad_new ("text_src", textsrcpad);
gst_object_unref (textsrcpad); if (!GST_IS_PAD (ghost)) {
textsrcpad = NULL; GST_WARNING_OBJECT (textsrcpad, "failed creating ghost pad for "
goto beach; "subtitle-bin");
} gst_object_unref (parent);
gst_object_unref (grandparent);
gst_object_unref (textsrcpad);
textsrcpad = NULL;
goto beach;
}
if (gst_element_add_pad (GST_ELEMENT (grandparent), ghost)) { if (gst_element_add_pad (GST_ELEMENT (grandparent), ghost)) {
gst_object_unref (textsrcpad); gst_object_unref (textsrcpad);
textsrcpad = ghost; textsrcpad = gst_object_ref (ghost);
} else {
GST_WARNING_OBJECT (ghost, "failed adding ghost pad on subtitle-bin");
gst_object_unref (ghost);
gst_object_unref (textsrcpad);
textsrcpad = NULL;
}
} else { } else {
GST_WARNING_OBJECT (ghost, "failed adding ghost pad on subtitle-bin"); GST_DEBUG_OBJECT (textsrcpad, "this subtitle pad is from the demuxer "
gst_object_unref (ghost); "no changes to hierarchy needed");
gst_object_unref (textsrcpad);
textsrcpad = NULL;
} }
gst_object_unref (parent); gst_object_unref (parent);
@ -1187,6 +1196,9 @@ setup_sinks (GstPlayBaseBin * play_base_bin, GstPlayBaseGroup * group)
"src"); "src");
res = add_sink (play_bin, sink, pad, textsrcpad); res = add_sink (play_bin, sink, pad, textsrcpad);
gst_object_unref (pad); gst_object_unref (pad);
if (textsrcpad) {
gst_object_unref (textsrcpad);
}
} }
/* remove the sinks now, pipeline get_state will now wait for the /* remove the sinks now, pipeline get_state will now wait for the