avwait: Add recording property

It works like a valve in front of the actual avwait. When recording ==
TRUE, other rules are then examined. When recording == FALSE, nothing is
passing through.

https://bugzilla.gnome.org/show_bug.cgi?id=796836
This commit is contained in:
Vivia Nikolaidou 2018-07-19 18:34:40 +03:00
parent b0ae6a5221
commit 854baf4fdb
4 changed files with 632 additions and 48 deletions

View file

@ -29,8 +29,15 @@
* time has been reached. It will then pass-through both audio and video,
* starting from that specific timecode or running time, making sure that
* audio starts as early as possible after the video (or at the same time as
* the video). In the "audio-after-video" mode, it only drops audio buffers
* until video has started.
* the video). In the "video-first" mode, it only drops audio buffers until
* video has started.
*
* The "recording" property acts essentially like a valve connected before
* everything else. If recording is FALSE, all buffers are dropped regardless
* of settings. If recording is TRUE, the other settings (mode,
* target-timecode, target-running-time, etc) are taken into account. Audio
* will always start and end together with the video, as long as the stream
* itself doesn't start too late or end too early.
*
* ## Example launch line
* |[
@ -85,6 +92,7 @@ enum
PROP_TARGET_TIME_CODE_STRING,
PROP_TARGET_RUNNING_TIME,
PROP_END_TIME_CODE,
PROP_RECORDING,
PROP_MODE
};
@ -159,20 +167,24 @@ gst_avwait_class_init (GstAvWaitClass * klass)
g_param_spec_boxed ("target-timecode", "Target timecode (object)",
"Timecode to wait for in timecode mode (object)",
GST_TYPE_VIDEO_TIME_CODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_TARGET_RUNNING_TIME,
g_param_spec_uint64 ("target-running-time", "Target running time",
"Running time to wait for in running-time mode",
0, G_MAXUINT64,
DEFAULT_TARGET_RUNNING_TIME,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_MODE,
g_param_spec_enum ("mode", "Mode",
"Operation mode: What to wait for",
GST_TYPE_AVWAIT_MODE,
DEFAULT_MODE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
DEFAULT_MODE,
GST_PARAM_MUTABLE_READY | G_PARAM_READWRITE |
G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_END_TIME_CODE,
g_param_spec_boxed ("end-timecode", "End timecode (object)",
@ -180,6 +192,13 @@ gst_avwait_class_init (GstAvWaitClass * klass)
GST_TYPE_VIDEO_TIME_CODE,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_RECORDING,
g_param_spec_boolean ("recording",
"Recording state",
"Whether the element is stopped or recording. "
"If set to FALSE, all buffers will be dropped regardless of settings.",
TRUE, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
gobject_class->finalize = gst_avwait_finalize;
gstelement_class->change_state = gst_avwait_change_state;
@ -252,6 +271,9 @@ gst_avwait_init (GstAvWait * self)
self->tc = gst_video_time_code_new_empty ();
self->end_tc = NULL;
self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
self->recording = TRUE;
self->target_running_time = DEFAULT_TARGET_RUNNING_TIME;
self->mode = DEFAULT_MODE;
@ -271,6 +293,7 @@ gst_avwait_send_element_message (GstAvWait * self, gboolean dropping,
"dropping", G_TYPE_BOOLEAN, dropping,
"running-time", GST_TYPE_CLOCK_TIME, running_time, NULL)))) {
GST_ERROR_OBJECT (self, "Unable to send element message!");
g_assert_not_reached ();
}
}
@ -306,6 +329,8 @@ gst_avwait_change_state (GstElement * element, GstStateChange transition)
GST_DEBUG_OBJECT (self, "First time reset in paused to ready");
self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
}
if (!self->dropping) {
self->dropping = TRUE;
@ -317,6 +342,8 @@ gst_avwait_change_state (GstElement * element, GstStateChange transition)
self->vsegment.position = GST_CLOCK_TIME_NONE;
gst_video_info_init (&self->vinfo);
self->last_seen_video_running_time = GST_CLOCK_TIME_NONE;
if (self->last_seen_tc)
gst_video_time_code_free (self->last_seen_tc);
self->last_seen_tc = NULL;
g_mutex_unlock (&self->mutex);
break;
@ -374,6 +401,10 @@ gst_avwait_get_property (GObject * object, guint prop_id,
g_value_set_uint64 (value, self->target_running_time);
break;
}
case PROP_RECORDING:{
g_value_set_boolean (value, self->recording);
break;
}
case PROP_MODE:{
g_value_set_enum (value, self->mode);
break;
@ -490,9 +521,11 @@ gst_avwait_set_property (GObject * object, guint prop_id,
self->target_running_time = g_value_get_uint64 (value);
if (self->mode == MODE_RUNNING_TIME) {
self->running_time_to_wait_for = self->target_running_time;
if (self->recording) {
self->audio_running_time_to_wait_for = self->running_time_to_wait_for;
}
if (self->target_running_time < self->last_seen_video_running_time) {
self->dropping = TRUE;
gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
}
}
break;
@ -508,14 +541,16 @@ gst_avwait_set_property (GObject * object, guint prop_id,
self->tc) < 0) {
self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->dropping = TRUE;
gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
}
break;
case MODE_RUNNING_TIME:
self->running_time_to_wait_for = self->target_running_time;
if (self->recording) {
self->audio_running_time_to_wait_for =
self->running_time_to_wait_for;
}
if (self->target_running_time < self->last_seen_video_running_time) {
self->dropping = TRUE;
gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
}
break;
/* Let the chain functions handle the rest */
@ -527,6 +562,12 @@ gst_avwait_set_property (GObject * object, guint prop_id,
}
break;
}
case PROP_RECORDING:{
g_mutex_lock (&self->mutex);
self->recording = g_value_get_boolean (value);
g_mutex_unlock (&self->mutex);
break;
}
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
@ -553,6 +594,8 @@ gst_avwait_vsink_event (GstPad * pad, GstObject * parent, GstEvent * event)
GST_DEBUG_OBJECT (self, "First time reset in video segment");
self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
if (!self->dropping) {
self->dropping = TRUE;
gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
@ -576,6 +619,8 @@ gst_avwait_vsink_event (GstPad * pad, GstObject * parent, GstEvent * event)
GST_DEBUG_OBJECT (self, "First time reset in video flush");
self->running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->running_time_to_end_at = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE;
self->audio_running_time_to_end_at = GST_CLOCK_TIME_NONE;
if (!self->dropping) {
self->dropping = TRUE;
gst_avwait_send_element_message (self, TRUE, GST_CLOCK_TIME_NONE);
@ -685,7 +730,10 @@ gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
tc_meta = gst_buffer_get_video_time_code_meta (inbuf);
if (tc_meta) {
tc = &tc_meta->tc;
tc = gst_video_time_code_copy (&tc_meta->tc);
if (self->last_seen_tc) {
gst_video_time_code_free (self->last_seen_tc);
}
self->last_seen_tc = tc;
}
switch (self->mode) {
@ -707,6 +755,10 @@ gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
self->running_time_to_wait_for =
gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
self->vsegment.position);
if (self->recording) {
self->audio_running_time_to_wait_for =
self->running_time_to_wait_for;
}
}
if (self->end_tc && gst_video_time_code_compare (tc, self->end_tc) >= 0) {
if (self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
@ -716,12 +768,15 @@ gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
self->running_time_to_end_at =
gst_segment_to_running_time (&self->vsegment, GST_FORMAT_TIME,
self->vsegment.position);
gst_avwait_send_element_message (self, TRUE,
self->running_time_to_end_at);
if (self->recording) {
self->audio_running_time_to_end_at = self->running_time_to_end_at;
gst_avwait_send_element_message (self, TRUE,
self->running_time_to_end_at);
}
}
gst_buffer_unref (inbuf);
inbuf = NULL;
} else if (emit_passthrough_signal) {
} else if (emit_passthrough_signal && self->recording) {
gst_avwait_send_element_message (self, FALSE,
self->running_time_to_wait_for);
}
@ -739,7 +794,8 @@ gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
} else {
if (self->dropping) {
self->dropping = FALSE;
gst_avwait_send_element_message (self, FALSE, running_time);
if (self->recording)
gst_avwait_send_element_message (self, FALSE, running_time);
}
GST_INFO_OBJECT (self,
"Have %" GST_TIME_FORMAT ", waiting for %" GST_TIME_FORMAT,
@ -755,15 +811,72 @@ gst_avwait_vsink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
self->vsegment.position);
GST_DEBUG_OBJECT (self, "First video running time is %" GST_TIME_FORMAT,
GST_TIME_ARGS (self->running_time_to_wait_for));
if (self->recording) {
self->audio_running_time_to_wait_for = self->running_time_to_wait_for;
}
if (self->dropping) {
self->dropping = FALSE;
gst_avwait_send_element_message (self, FALSE,
self->running_time_to_wait_for);
if (self->recording)
gst_avwait_send_element_message (self, FALSE,
self->running_time_to_wait_for);
}
}
break;
}
}
if (!self->recording) {
if (self->was_recording) {
GST_INFO_OBJECT (self, "Recording stopped at %" GST_TIME_FORMAT,
GST_TIME_ARGS (running_time));
if (running_time > self->running_time_to_wait_for
&& running_time <= self->running_time_to_end_at) {
/* We just stopped recording: synchronise the audio */
self->audio_running_time_to_end_at = running_time;
gst_avwait_send_element_message (self, TRUE, running_time);
} else if (running_time < self->running_time_to_wait_for
&& self->running_time_to_wait_for != GST_CLOCK_TIME_NONE) {
/* We should set audio_running_time_to_wait_for to a value far enough
* in the future, so that it will never be reached. However, setting
* it to GST_CLOCK_TIME_NONE would eternally trigger the g_cond_wait
* in the audio chain function, causing audio upstream to be queued up
* forever. There is already code in place to ensure that audio will
* not exceed the video at the same place, so we just set it to
* GST_CLOCK_TIME_NONE - 1 here to ensure it will never be reached,
* but still not trigger the eternal waiting code */
self->audio_running_time_to_wait_for = GST_CLOCK_TIME_NONE - 1;
}
}
/* Recording is FALSE: we drop all buffers */
if (inbuf) {
gst_buffer_unref (inbuf);
inbuf = NULL;
}
} else {
if (!self->was_recording) {
GST_INFO_OBJECT (self,
"Recording started at %" GST_TIME_FORMAT " waiting for %"
GST_TIME_FORMAT " inbuf %p", GST_TIME_ARGS (running_time),
GST_TIME_ARGS (self->running_time_to_wait_for), inbuf);
if (running_time < self->running_time_to_end_at ||
self->running_time_to_end_at == GST_CLOCK_TIME_NONE) {
/* We are before the end of the recording. Check if we just actually
* started */
if (running_time > self->running_time_to_wait_for) {
/* We just started recording: synchronise the audio */
self->audio_running_time_to_wait_for = running_time;
gst_avwait_send_element_message (self, FALSE, running_time);
} else {
/* We will start in the future when running_time_to_wait_for is
* reached */
self->audio_running_time_to_wait_for = self->running_time_to_wait_for;
}
self->audio_running_time_to_end_at = self->running_time_to_end_at;
}
}
}
self->was_recording = self->recording;
g_cond_signal (&self->cond);
g_mutex_unlock (&self->mutex);
if (inbuf)
@ -825,30 +938,6 @@ gst_avwait_asink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
video_running_time = GST_CLOCK_TIME_NONE;
}
}
while (!(self->video_eos_flag || self->audio_flush_flag
|| self->shutdown_flag) &&
/* Start at timecode */
/* Wait if we haven't received video yet */
(video_running_time == GST_CLOCK_TIME_NONE
/* Wait if audio is after the video: dunno what to do */
|| gst_avwait_compare_guint64_with_signs (asign,
current_running_time, vsign, video_running_time) == 1
/* Wait if we don't even know what to wait for yet */
|| self->running_time_to_wait_for == GST_CLOCK_TIME_NONE)) {
g_cond_wait (&self->cond, &self->mutex);
vsign =
gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
self->vsegment.position, &video_running_time);
if (vsign == 0) {
video_running_time = GST_CLOCK_TIME_NONE;
}
}
if (self->audio_flush_flag || self->shutdown_flag) {
GST_DEBUG_OBJECT (self, "Shutting down, ignoring frame");
gst_buffer_unref (inbuf);
g_mutex_unlock (&self->mutex);
return GST_FLOW_FLUSHING;
}
duration =
gst_util_uint64_scale (gst_buffer_get_size (inbuf) / self->ainfo.bpf,
GST_SECOND, self->ainfo.rate);
@ -863,36 +952,60 @@ gst_avwait_asink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
return GST_FLOW_ERROR;
}
}
if (self->running_time_to_wait_for == GST_CLOCK_TIME_NONE
while (!(self->video_eos_flag || self->audio_flush_flag
|| self->shutdown_flag) &&
/* Start at timecode */
/* Wait if we haven't received video yet */
(video_running_time == GST_CLOCK_TIME_NONE
/* Wait if audio is after the video: dunno what to do */
|| gst_avwait_compare_guint64_with_signs (asign,
running_time_at_end, vsign, video_running_time) == 1
/* Wait if we don't even know what to wait for yet */
|| self->audio_running_time_to_wait_for == GST_CLOCK_TIME_NONE)) {
g_cond_wait (&self->cond, &self->mutex);
vsign =
gst_segment_to_running_time_full (&self->vsegment, GST_FORMAT_TIME,
self->vsegment.position, &video_running_time);
if (vsign == 0) {
video_running_time = GST_CLOCK_TIME_NONE;
}
}
if (self->audio_flush_flag || self->shutdown_flag) {
GST_DEBUG_OBJECT (self, "Shutting down, ignoring frame");
gst_buffer_unref (inbuf);
g_mutex_unlock (&self->mutex);
return GST_FLOW_FLUSHING;
}
if (self->audio_running_time_to_wait_for == GST_CLOCK_TIME_NONE
/* Audio ends before start : drop */
|| gst_avwait_compare_guint64_with_signs (esign,
running_time_at_end, 1, self->running_time_to_wait_for) == -1
running_time_at_end, 1, self->audio_running_time_to_wait_for) == -1
/* Audio starts after end: drop */
|| current_running_time >= self->running_time_to_end_at) {
|| current_running_time >= self->audio_running_time_to_end_at) {
GST_DEBUG_OBJECT (self,
"Dropped an audio buf at %" GST_TIME_FORMAT " waiting for %"
GST_TIME_FORMAT " video time %" GST_TIME_FORMAT,
GST_TIME_ARGS (current_running_time),
GST_TIME_ARGS (self->running_time_to_wait_for),
GST_TIME_ARGS (self->audio_running_time_to_wait_for),
GST_TIME_ARGS (video_running_time));
GST_DEBUG_OBJECT (self, "Would have ended at %i %" GST_TIME_FORMAT,
esign, GST_TIME_ARGS (running_time_at_end));
gst_buffer_unref (inbuf);
inbuf = NULL;
} else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
1, self->running_time_to_wait_for) >= 0
1, self->audio_running_time_to_wait_for) >= 0
&& gst_avwait_compare_guint64_with_signs (esign, running_time_at_end, 1,
self->running_time_to_end_at) == -1) {
self->audio_running_time_to_end_at) == -1) {
/* Audio ends after start, but before end: clip */
GstSegment asegment2 = self->asegment;
gst_segment_set_running_time (&asegment2, GST_FORMAT_TIME,
self->running_time_to_wait_for);
self->audio_running_time_to_wait_for);
inbuf =
gst_audio_buffer_clip (inbuf, &asegment2, self->ainfo.rate,
self->ainfo.bpf);
} else if (gst_avwait_compare_guint64_with_signs (esign, running_time_at_end,
1, self->running_time_to_end_at) >= 0) {
1, self->audio_running_time_to_end_at) >= 0) {
/* Audio starts after start, but before end: clip from the other side */
GstSegment asegment2 = self->asegment;
guint64 stop;
@ -900,7 +1013,7 @@ gst_avwait_asink_chain (GstPad * pad, GstObject * parent, GstBuffer * inbuf)
ssign =
gst_segment_position_from_running_time_full (&asegment2,
GST_FORMAT_TIME, self->running_time_to_end_at, &stop);
GST_FORMAT_TIME, self->audio_running_time_to_end_at, &stop);
if (ssign > 0) {
asegment2.stop = stop;
} else {

View file

@ -65,11 +65,22 @@ struct _GstAvWait
GstClockTime last_seen_video_running_time;
GstVideoTimeCode *last_seen_tc;
/* If running_time_to_wait_for has been reached but we are
* not recording, audio shouldn't start running. It should
* instead start synchronised with the video when we start
* recording. Similarly when stopping recording manually vs
* when the target timecode has been reached. So we use
* different variables for the audio */
GstClockTime audio_running_time_to_wait_for;
GstClockTime audio_running_time_to_end_at;
gboolean video_eos_flag;
gboolean audio_flush_flag;
gboolean shutdown_flag;
gboolean dropping;
gboolean recording;
gboolean was_recording;
GCond cond;
GMutex mutex;

View file

@ -257,6 +257,7 @@ check_PROGRAMS = \
elements/videoframe-audiolevel \
elements/autoconvert \
elements/autovideoconvert \
elements/avwait \
elements/asfmux \
elements/camerabin \
elements/gdppay \
@ -403,6 +404,13 @@ elements_videoframe_audiolevel_LDADD = \
$(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) $(LDADD) \
$(GST_AUDIO_LIBS)
elements_avwait_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) $(GST_CFLAGS) $(AM_CFLAGS)
elements_avwait_LDADD = \
$(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS) $(LDADD) \
$(GST_AUDIO_LIBS) $(GST_VIDEO_LIBS)
elements_faad_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) $(GST_CFLAGS) $(AM_CFLAGS)

View file

@ -0,0 +1,452 @@
/* GStreamer unit test for avwait
*
* Copyright (C) 2018 Vivia Nikolaidou <vivia@toolsonair.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301, USA.
*/
#ifdef HAVE_CONFIG_H
# include <config.h>
#endif
#include <gst/check/gstcheck.h>
#include <gst/audio/audio.h>
#include <gst/video/video.h>
typedef enum _SwitchType
{
DO_NOT_SWITCH = -1,
SWITCH_FALSE = 0,
SWITCH_TRUE = 1
} SwitchType;
static guint audio_buffer_count, video_buffer_count;
static SwitchType switch_after_2s;
static GstVideoTimeCode *target_tc;
static GstVideoTimeCode *end_tc;
static GstClockTime target_running_time;
static gboolean recording;
static gint mode;
static GstAudioInfo ainfo;
static guint n_abuffers, n_vbuffers;
static GstClockTime first_audio_timestamp, last_audio_timestamp;
static GstClockTime first_video_timestamp, last_video_timestamp;
typedef struct _ElementPadAndSwitchType
{
GstElement *element;
GstPad *pad;
SwitchType switch_after_2s;
} ElementPadAndSwitchType;
static void
set_default_params (void)
{
n_abuffers = 16;
n_vbuffers = 160;
switch_after_2s = DO_NOT_SWITCH;
target_tc = NULL;
end_tc = NULL;
target_running_time = GST_CLOCK_TIME_NONE;
recording = TRUE;
mode = 2;
first_audio_timestamp = GST_CLOCK_TIME_NONE;
last_audio_timestamp = GST_CLOCK_TIME_NONE;
first_video_timestamp = GST_CLOCK_TIME_NONE;
last_video_timestamp = GST_CLOCK_TIME_NONE;
};
static GstFlowReturn
output_achain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstClockTime timestamp, duration;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
duration =
gst_util_uint64_scale (gst_buffer_get_size (buffer) / ainfo.bpf,
GST_SECOND, ainfo.rate);
if (first_audio_timestamp == GST_CLOCK_TIME_NONE)
first_audio_timestamp = timestamp;
last_audio_timestamp = timestamp + duration;
audio_buffer_count++;
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
static GstFlowReturn
output_vchain (GstPad * pad, GstObject * parent, GstBuffer * buffer)
{
GstClockTime timestamp;
timestamp = GST_BUFFER_TIMESTAMP (buffer);
if (first_video_timestamp == GST_CLOCK_TIME_NONE)
first_video_timestamp = timestamp;
last_video_timestamp = timestamp + GST_BUFFER_DURATION (buffer);
video_buffer_count++;
gst_buffer_unref (buffer);
return GST_FLOW_OK;
}
static gpointer
push_abuffers (gpointer data)
{
GstSegment segment;
GstPad *pad = data;
gint i;
GstClockTime timestamp = 0;
GstCaps *caps;
guint buf_size = 1000;
guint channels = 2;
gst_pad_send_event (pad, gst_event_new_stream_start ("test"));
gst_audio_info_set_format (&ainfo, GST_AUDIO_FORMAT_S8, buf_size, channels,
NULL);
caps = gst_audio_info_to_caps (&ainfo);
gst_pad_send_event (pad, gst_event_new_caps (caps));
gst_caps_unref (caps);
gst_segment_init (&segment, GST_FORMAT_TIME);
gst_pad_send_event (pad, gst_event_new_segment (&segment));
for (i = 0; i < n_abuffers; i++) {
GstBuffer *buf = gst_buffer_new_and_alloc (channels * buf_size);
gst_buffer_memset (buf, 0, 0, channels * buf_size);
GST_BUFFER_TIMESTAMP (buf) = timestamp;
timestamp += 1 * GST_SECOND;
GST_BUFFER_DURATION (buf) = timestamp - GST_BUFFER_TIMESTAMP (buf);
fail_unless (gst_pad_chain (pad, buf) == GST_FLOW_OK);
}
gst_pad_send_event (pad, gst_event_new_eos ());
return NULL;
}
static gpointer
push_vbuffers (gpointer data)
{
GstSegment segment;
ElementPadAndSwitchType *e = data;
GstPad *pad = e->pad;
gint i;
GstClockTime timestamp = 0;
GstVideoTimeCode *tc;
gst_pad_send_event (pad, gst_event_new_stream_start ("test"));
gst_segment_init (&segment, GST_FORMAT_TIME);
gst_pad_send_event (pad, gst_event_new_segment (&segment));
tc = gst_video_time_code_new (40, 1, NULL, GST_VIDEO_TIME_CODE_FLAGS_NONE, 0,
0, 0, 0, 0);
for (i = 0; i < n_vbuffers; i++) {
GstBuffer *buf = gst_buffer_new_and_alloc (1000);
gst_buffer_memset (buf, 0, i, 1);
GST_BUFFER_TIMESTAMP (buf) = timestamp;
timestamp += 25 * GST_MSECOND;
GST_BUFFER_DURATION (buf) = timestamp - GST_BUFFER_TIMESTAMP (buf);
gst_buffer_add_video_time_code_meta (buf, tc);
gst_video_time_code_increment_frame (tc);
fail_unless (gst_pad_chain (pad, buf) == GST_FLOW_OK);
if (timestamp == 2 * GST_SECOND && e->switch_after_2s != DO_NOT_SWITCH) {
g_object_set (e->element, "recording", ! !e->switch_after_2s, NULL);
}
}
gst_pad_send_event (pad, gst_event_new_eos ());
gst_video_time_code_free (tc);
return NULL;
}
static void
test_avwait_generic (void)
{
GstElement *avwait;
GstPad *asink, *vsink, *asrc, *vsrc, *aoutput_sink, *voutput_sink;
GThread *athread, *vthread;
GstBus *bus;
ElementPadAndSwitchType *e;
audio_buffer_count = 0;
video_buffer_count = 0;
avwait = gst_element_factory_make ("avwait", NULL);
fail_unless (avwait != NULL);
g_object_set (avwait, "mode", mode,
"target-running-time", target_running_time, "recording", recording, NULL);
if (target_tc != NULL)
g_object_set (avwait, "target-timecode", target_tc, NULL);
if (end_tc != NULL)
g_object_set (avwait, "end-timecode", end_tc, NULL);
bus = gst_bus_new ();
gst_element_set_bus (avwait, bus);
asink = gst_element_get_static_pad (avwait, "asink");
fail_unless (asink != NULL);
vsink = gst_element_get_static_pad (avwait, "vsink");
fail_unless (vsink != NULL);
asrc = gst_element_get_static_pad (avwait, "asrc");
aoutput_sink = gst_pad_new ("sink", GST_PAD_SINK);
fail_unless (aoutput_sink != NULL);
fail_unless (gst_pad_link (asrc, aoutput_sink) == GST_PAD_LINK_OK);
vsrc = gst_element_get_static_pad (avwait, "vsrc");
voutput_sink = gst_pad_new ("sink", GST_PAD_SINK);
fail_unless (voutput_sink != NULL);
fail_unless (gst_pad_link (vsrc, voutput_sink) == GST_PAD_LINK_OK);
gst_pad_set_chain_function (aoutput_sink, output_achain);
gst_pad_set_chain_function (voutput_sink, output_vchain);
gst_pad_set_active (aoutput_sink, TRUE);
gst_pad_set_active (voutput_sink, TRUE);
fail_unless (gst_element_set_state (avwait,
GST_STATE_PLAYING) == GST_STATE_CHANGE_SUCCESS);
e = g_new0 (ElementPadAndSwitchType, 1);
e->element = avwait;
e->pad = vsink;
e->switch_after_2s = switch_after_2s;
athread = g_thread_new ("athread", (GThreadFunc) push_abuffers, asink);
vthread = g_thread_new ("vthread", (GThreadFunc) push_vbuffers, e);
g_thread_join (vthread);
g_thread_join (athread);
/* teardown */
gst_element_set_state (avwait, GST_STATE_NULL);
gst_bus_set_flushing (bus, TRUE);
gst_object_unref (bus);
g_free (e);
gst_pad_unlink (asrc, aoutput_sink);
gst_object_unref (asrc);
gst_pad_unlink (vsrc, voutput_sink);
gst_object_unref (vsrc);
gst_object_unref (asink);
gst_object_unref (vsink);
gst_pad_set_active (aoutput_sink, FALSE);
gst_object_unref (aoutput_sink);
gst_pad_set_active (voutput_sink, FALSE);
gst_object_unref (voutput_sink);
gst_object_unref (avwait);
}
GST_START_TEST (test_avwait_switch_to_true)
{
set_default_params ();
recording = FALSE;
switch_after_2s = SWITCH_TRUE;
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 2 * GST_SECOND);
fail_unless_equals_uint64 (first_video_timestamp, 2 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_switch_to_false)
{
set_default_params ();
recording = TRUE;
switch_after_2s = SWITCH_FALSE;
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 0);
fail_unless_equals_uint64 (first_video_timestamp, 0);
fail_unless_equals_uint64 (last_video_timestamp, 2 * GST_SECOND);
fail_unless_equals_uint64 (last_audio_timestamp, 2 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_1s_switch_to_true)
{
set_default_params ();
recording = FALSE;
switch_after_2s = SWITCH_TRUE;
mode = 1;
target_running_time = 1 * GST_SECOND;
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 2 * GST_SECOND);
fail_unless_equals_uint64 (first_video_timestamp, 2 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_1s_switch_to_false)
{
set_default_params ();
recording = TRUE;
switch_after_2s = SWITCH_FALSE;
mode = 1;
target_running_time = 1 * GST_SECOND;
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 1 * GST_SECOND);
fail_unless_equals_uint64 (first_video_timestamp, 1 * GST_SECOND);
fail_unless_equals_uint64 (last_video_timestamp, 2 * GST_SECOND);
fail_unless_equals_uint64 (last_audio_timestamp, 2 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_3s_switch_to_true)
{
set_default_params ();
recording = FALSE;
switch_after_2s = SWITCH_TRUE;
mode = 1;
target_running_time = 3 * GST_SECOND;
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 3 * GST_SECOND);
fail_unless_equals_uint64 (first_video_timestamp, 3 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_3s_switch_to_false)
{
set_default_params ();
recording = TRUE;
switch_after_2s = SWITCH_FALSE;
mode = 1;
target_running_time = 3 * GST_SECOND;
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, GST_CLOCK_TIME_NONE);
fail_unless_equals_uint64 (first_video_timestamp, GST_CLOCK_TIME_NONE);
fail_unless_equals_uint64 (last_audio_timestamp, GST_CLOCK_TIME_NONE);
fail_unless_equals_uint64 (last_video_timestamp, GST_CLOCK_TIME_NONE);
}
GST_END_TEST;
GST_START_TEST (test_avwait_1stc_switch_to_true)
{
set_default_params ();
recording = FALSE;
switch_after_2s = SWITCH_TRUE;
mode = 0;
target_tc =
gst_video_time_code_new (40, 1, NULL, GST_VIDEO_TIME_CODE_FLAGS_NONE, 0,
0, 1, 0, 0);
end_tc =
gst_video_time_code_new (40, 1, NULL, GST_VIDEO_TIME_CODE_FLAGS_NONE, 0,
0, 3, 0, 0);
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 2 * GST_SECOND);
fail_unless_equals_uint64 (first_video_timestamp, 2 * GST_SECOND);
fail_unless_equals_uint64 (last_video_timestamp, 3 * GST_SECOND);
fail_unless_equals_uint64 (last_audio_timestamp, 3 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_1stc_switch_to_false)
{
set_default_params ();
recording = TRUE;
switch_after_2s = SWITCH_FALSE;
mode = 0;
target_tc =
gst_video_time_code_new (40, 1, NULL, GST_VIDEO_TIME_CODE_FLAGS_NONE, 0,
0, 1, 0, 0);
end_tc =
gst_video_time_code_new (40, 1, NULL, GST_VIDEO_TIME_CODE_FLAGS_NONE, 0,
0, 3, 0, 0);
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 1 * GST_SECOND);
fail_unless_equals_uint64 (first_video_timestamp, 1 * GST_SECOND);
fail_unless_equals_uint64 (last_video_timestamp, 2 * GST_SECOND);
fail_unless_equals_uint64 (last_audio_timestamp, 2 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_3stc_switch_to_true)
{
set_default_params ();
recording = FALSE;
switch_after_2s = SWITCH_TRUE;
mode = 0;
target_tc =
gst_video_time_code_new (40, 1, NULL, GST_VIDEO_TIME_CODE_FLAGS_NONE, 0,
0, 3, 0, 0);
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, 3 * GST_SECOND);
fail_unless_equals_uint64 (first_video_timestamp, 3 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_avwait_3stc_switch_to_false)
{
set_default_params ();
recording = TRUE;
switch_after_2s = SWITCH_FALSE;
mode = 0;
target_tc =
gst_video_time_code_new (40, 1, NULL, GST_VIDEO_TIME_CODE_FLAGS_NONE, 0,
0, 3, 0, 0);
test_avwait_generic ();
fail_unless_equals_uint64 (first_audio_timestamp, GST_CLOCK_TIME_NONE);
fail_unless_equals_uint64 (first_video_timestamp, GST_CLOCK_TIME_NONE);
fail_unless_equals_uint64 (last_audio_timestamp, GST_CLOCK_TIME_NONE);
fail_unless_equals_uint64 (last_video_timestamp, GST_CLOCK_TIME_NONE);
}
GST_END_TEST;
static Suite *
avwait_suite (void)
{
Suite *s = suite_create ("avwait");
TCase *tc_chain;
tc_chain = tcase_create ("avwait");
tcase_add_test (tc_chain, test_avwait_switch_to_true);
tcase_add_test (tc_chain, test_avwait_switch_to_false);
tcase_add_test (tc_chain, test_avwait_1s_switch_to_true);
tcase_add_test (tc_chain, test_avwait_1s_switch_to_false);
tcase_add_test (tc_chain, test_avwait_3s_switch_to_true);
tcase_add_test (tc_chain, test_avwait_3s_switch_to_false);
tcase_add_test (tc_chain, test_avwait_1stc_switch_to_true);
tcase_add_test (tc_chain, test_avwait_1stc_switch_to_false);
tcase_add_test (tc_chain, test_avwait_3stc_switch_to_true);
tcase_add_test (tc_chain, test_avwait_3stc_switch_to_false);
suite_add_tcase (s, tc_chain);
return s;
}
GST_CHECK_MAIN (avwait);