camerabin2: Fix A/V sync for captures

Audio src was pushing newsegments starting from 0, even when its buffers
were not starting with 0 as their timestamps. Add a buffer probe that
checks the ts of the first buffer and uses that to push a newsegment,
just like we do for video on wrappercamerabinsrc.
This commit is contained in:
Thiago Santos 2011-12-19 16:49:08 -03:00
parent 24407f0aa6
commit 0ce5021058
2 changed files with 42 additions and 10 deletions

View file

@ -430,6 +430,7 @@ gst_camera_bin_start_capture (GstCameraBin2 * camerabin)
g_signal_emit_by_name (camerabin->src, "start-capture", NULL);
if (camerabin->mode == MODE_VIDEO) {
camerabin->audio_send_newseg = TRUE;
if (camerabin->audio_src)
gst_element_set_state (camerabin->audio_src, GST_STATE_PLAYING);
@ -1436,18 +1437,46 @@ gst_camera_bin_image_sink_event_probe (GstPad * pad, GstEvent * event,
}
static gboolean
gst_camera_bin_audio_src_event_probe (GstPad * pad, GstEvent * event,
gst_camera_bin_audio_src_data_probe (GstPad * pad, GstMiniObject * obj,
gpointer data)
{
GstCameraBin2 *camera = data;
gboolean ret = TRUE;
if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
/* we only let an EOS pass when the user is stopping a capture */
if (camera->audio_drop_eos) {
if (GST_IS_BUFFER (obj)) {
if (G_UNLIKELY (camera->audio_send_newseg)) {
GstBuffer *buf = GST_BUFFER_CAST (obj);
GstClockTime ts = GST_BUFFER_TIMESTAMP (buf);
GstPad *peer;
if (!GST_CLOCK_TIME_IS_VALID (ts)) {
ts = 0;
}
peer = gst_pad_get_peer (pad);
g_return_val_if_fail (peer != NULL, TRUE);
gst_pad_send_event (peer, gst_event_new_new_segment (FALSE, 1.0,
GST_FORMAT_TIME, ts, -1, 0));
gst_object_unref (peer);
camera->audio_send_newseg = FALSE;
}
} else {
GstEvent *event = GST_EVENT_CAST (obj);
if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
/* we only let an EOS pass when the user is stopping a capture */
if (camera->audio_drop_eos) {
ret = FALSE;
} else {
camera->audio_drop_eos = TRUE;
/* should already be false, but reinforce in case no buffers get
* pushed */
camera->audio_send_newseg = FALSE;
}
} else if (GST_EVENT_TYPE (event) == GST_EVENT_NEWSEGMENT) {
ret = FALSE;
} else {
camera->audio_drop_eos = TRUE;
}
}
@ -1803,10 +1832,11 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
srcpad = gst_element_get_static_pad (camera->audio_src, "src");
/* drop EOS for audiosrc elements that push them on state_changes
* (basesrc does this) */
gst_pad_add_event_probe (srcpad,
(GCallback) gst_camera_bin_audio_src_event_probe, camera);
/* 1) drop EOS for audiosrc elements that push them on state_changes
* (basesrc does this)
* 2) Fix newsegment events to have start time = first buffer ts */
gst_pad_add_data_probe (srcpad,
(GCallback) gst_camera_bin_audio_src_data_probe, camera);
gst_object_unref (srcpad);
}
@ -1864,6 +1894,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
case GST_STATE_CHANGE_READY_TO_PAUSED:
GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera);
camera->audio_drop_eos = TRUE;
camera->audio_send_newseg = FALSE;
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
if (GST_STATE (camera->videosink) >= GST_STATE_PAUSED)

View file

@ -126,6 +126,7 @@ struct _GstCameraBin2
gboolean image_profile_switch;
gboolean audio_drop_eos;
gboolean audio_send_newseg;
GMutex *video_capture_mutex;
GCond *video_state_cond;