mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-23 18:21:04 +00:00
Merge branch 'master' into 0.11
Conflicts: ext/resindvd/rsnwrappedbuffer.c
This commit is contained in:
commit
ad06ac2b4b
49 changed files with 11299 additions and 308 deletions
21
configure.ac
21
configure.ac
|
@ -362,6 +362,7 @@ AG_GST_CHECK_PLUGIN(pcapparse)
|
|||
AG_GST_CHECK_PLUGIN(pnm)
|
||||
AG_GST_CHECK_PLUGIN(rawparse)
|
||||
AG_GST_CHECK_PLUGIN(real)
|
||||
AG_GST_CHECK_PLUGIN(removesilence)
|
||||
AG_GST_CHECK_PLUGIN(rtpmux)
|
||||
AG_GST_CHECK_PLUGIN(rtpvp8)
|
||||
AG_GST_CHECK_PLUGIN(scaletempo)
|
||||
|
@ -389,11 +390,6 @@ if test "x$HAVE_CPU_I386" != "xyes" && test "x$HAVE_CPU_X86_64" != "xyes"; then
|
|||
AG_GST_DISABLE_PLUGIN(real)
|
||||
fi
|
||||
|
||||
dnl disable experimental plug-ins
|
||||
if test "x$BUILD_EXPERIMENTAL" != "xyes"; then
|
||||
AG_GST_DISABLE_PLUGIN(camerabin2)
|
||||
fi
|
||||
|
||||
# This will always succeed because we depend on GLib >= 2.16
|
||||
PKG_CHECK_MODULES(GIO, gio-2.0 >= 2.16, HAVE_GIO=yes, HAVE_GIO=no)
|
||||
AC_SUBST(GIO_CFLAGS)
|
||||
|
@ -1708,6 +1704,20 @@ AG_GST_CHECK_FEATURE(VP8, [VP8 decoder], vp8, [
|
|||
HAVE_VP8=yes
|
||||
AC_DEFINE(HAVE_VP8_ENCODER, 1, [Defined if the VP8 encoder is available])
|
||||
VPX_LIBS="-lvpx"
|
||||
|
||||
AC_MSG_CHECKING([for VP8E_SET_TUNING presence])
|
||||
AC_TRY_COMPILE([
|
||||
#include <vpx/vpx_encoder.h>
|
||||
#include <vpx/vp8cx.h>
|
||||
int foo=VP8E_SET_TUNING;
|
||||
], [
|
||||
return 0;
|
||||
], [
|
||||
AC_DEFINE(HAVE_VP8ENC_TUNING, 1, [Defined if the VP8 encoder has tuning API])
|
||||
AC_MSG_RESULT(yes)
|
||||
], [
|
||||
AC_MSG_RESULT(no)
|
||||
])
|
||||
])
|
||||
AC_CHECK_LIB(vpx, vpx_codec_vp8_dx_algo, [
|
||||
HAVE_VP8=yes
|
||||
|
@ -1929,6 +1939,7 @@ gst/pcapparse/Makefile
|
|||
gst/pnm/Makefile
|
||||
gst/rawparse/Makefile
|
||||
gst/real/Makefile
|
||||
gst/removesilence/Makefile
|
||||
gst/rtpmux/Makefile
|
||||
gst/rtpvp8/Makefile
|
||||
gst/scaletempo/Makefile
|
||||
|
|
|
@ -62,10 +62,10 @@ gst_kate_spu_decode_alpha (GstKateEnc * ke, const guint8 * ptr)
|
|||
static void
|
||||
gst_kate_spu_decode_area (GstKateEnc * ke, const guint8 * ptr)
|
||||
{
|
||||
ke->spu_left = ((((guint16) ptr[0]) & 0x3f) << 4) | (ptr[1] >> 4);
|
||||
ke->spu_top = ((((guint16) ptr[3]) & 0x3f) << 4) | (ptr[4] >> 4);
|
||||
ke->spu_right = ((((guint16) ptr[1]) & 0x03) << 8) | ptr[2];
|
||||
ke->spu_bottom = ((((guint16) ptr[4]) & 0x03) << 8) | ptr[5];
|
||||
ke->spu_left = ((((guint16) ptr[0]) & 0xff) << 4) | (ptr[1] >> 4);
|
||||
ke->spu_top = ((((guint16) ptr[3]) & 0xff) << 4) | (ptr[4] >> 4);
|
||||
ke->spu_right = ((((guint16) ptr[1]) & 0x0f) << 8) | ptr[2];
|
||||
ke->spu_bottom = ((((guint16) ptr[4]) & 0x0f) << 8) | ptr[5];
|
||||
GST_DEBUG_OBJECT (ke, "SPU area %u %u -> %u %d", ke->spu_left, ke->spu_top,
|
||||
ke->spu_right, ke->spu_bottom);
|
||||
}
|
||||
|
@ -494,9 +494,14 @@ gst_kate_spu_decode_spu (GstKateEnc * ke, GstBuffer * buf, kate_region * kr,
|
|||
if (G_UNLIKELY (ke->spu_right - ke->spu_left < 0
|
||||
|| ke->spu_bottom - ke->spu_top < 0 || ke->spu_pix_data[0] == 0
|
||||
|| ke->spu_pix_data[1] == 0)) {
|
||||
GST_ELEMENT_ERROR (ke, STREAM, ENCODE, (NULL),
|
||||
("SPU area is empty, nothing to encode"));
|
||||
return GST_FLOW_ERROR;
|
||||
GST_DEBUG_OBJECT (ke,
|
||||
"left %d, right %d, top %d, bottom %d, pix data %d %d", ke->spu_left,
|
||||
ke->spu_right, ke->spu_top, ke->spu_bottom, ke->spu_pix_data[0],
|
||||
ke->spu_pix_data[1]);
|
||||
GST_WARNING_OBJECT (ke, "SPU area is empty, nothing to encode");
|
||||
kate_bitmap_init (kb);
|
||||
kb->width = kb->height = 0;
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
/* create the palette */
|
||||
|
|
|
@ -226,6 +226,18 @@ gst_kate_util_decoder_base_get_property (GstKateDecoderBase * decoder,
|
|||
return res;
|
||||
}
|
||||
|
||||
static inline gboolean
|
||||
gst_kate_util_is_utf8_string (const char *value, size_t len)
|
||||
{
|
||||
if (len == 0)
|
||||
return FALSE;
|
||||
if (memchr (value, 0, len - 1))
|
||||
return FALSE;
|
||||
if (value[len - 1])
|
||||
return FALSE;
|
||||
return (kate_text_validate (kate_utf8, value, len) >= 0);
|
||||
}
|
||||
|
||||
GstFlowReturn
|
||||
gst_kate_util_decoder_base_chain_kate_packet (GstKateDecoderBase * decoder,
|
||||
GstElement * element, GstPad * pad, GstBuffer * buf, GstPad * srcpad,
|
||||
|
@ -371,6 +383,40 @@ gst_kate_util_decoder_base_chain_kate_packet (GstKateDecoderBase * decoder,
|
|||
break;
|
||||
}
|
||||
}
|
||||
#if ((KATE_VERSION_MAJOR<<16)|(KATE_VERSION_MINOR<<8)|KATE_VERSION_PATCH) >= 0x000400
|
||||
else if (*ev && (*ev)->meta) {
|
||||
int count = kate_meta_query_count ((*ev)->meta);
|
||||
if (count > 0) {
|
||||
GstTagList *evtags = gst_tag_list_new ();
|
||||
int idx;
|
||||
GST_DEBUG_OBJECT (decoder, "Kate event has %d attached metadata", count);
|
||||
for (idx = 0; idx < count; ++idx) {
|
||||
const char *tag, *value;
|
||||
size_t len;
|
||||
if (kate_meta_query ((*ev)->meta, idx, &tag, &value, &len) < 0) {
|
||||
GST_WARNING_OBJECT (decoder, "Failed to retrieve metadata %d", idx);
|
||||
} else {
|
||||
if (gst_kate_util_is_utf8_string (value, len)) {
|
||||
gchar *compound = g_strdup_printf ("%s=%s", tag, value);
|
||||
GST_DEBUG_OBJECT (decoder, "Metadata %d: %s=%s (%zu bytes)", idx,
|
||||
tag, value, len);
|
||||
gst_tag_list_add (evtags, GST_TAG_MERGE_APPEND,
|
||||
GST_TAG_EXTENDED_COMMENT, compound, NULL);
|
||||
g_free (compound);
|
||||
} else {
|
||||
GST_INFO_OBJECT (decoder,
|
||||
"Metadata %d, (%s, %zu bytes) is binary, ignored", idx, tag,
|
||||
len);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (gst_tag_list_is_empty (evtags))
|
||||
gst_tag_list_free (evtags);
|
||||
else
|
||||
gst_element_found_tags_for_pad (element, tagpad, evtags);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return rflow;
|
||||
}
|
||||
|
|
|
@ -115,9 +115,11 @@ static void
|
|||
create_fingerprint (GstOFA * ofa)
|
||||
{
|
||||
GstBuffer *buf;
|
||||
gint rate = GST_AUDIO_FILTER (ofa)->format.rate;
|
||||
gint channels = GST_AUDIO_FILTER (ofa)->format.channels;
|
||||
gint endianness;
|
||||
GstAudioFilter *ofa_filter = GST_AUDIO_FILTER (ofa);
|
||||
gint rate = ofa_filter->format.rate;
|
||||
gint channels = ofa_filter->format.channels;
|
||||
gint endianness =
|
||||
ofa_filter->format.bigend ? OFA_BIG_ENDIAN : OFA_LITTLE_ENDIAN;
|
||||
GstTagList *tags;
|
||||
guint available;
|
||||
|
||||
|
@ -135,7 +137,8 @@ create_fingerprint (GstOFA * ofa)
|
|||
endianness = OFA_LITTLE_ENDIAN;
|
||||
|
||||
|
||||
GST_DEBUG ("Generating fingerprint");
|
||||
GST_DEBUG_OBJECT (ofa, "Generating fingerprint for %u samples",
|
||||
available / 2);
|
||||
|
||||
buf = gst_adapter_take_buffer (ofa->adapter, available);
|
||||
|
||||
|
@ -143,16 +146,22 @@ create_fingerprint (GstOFA * ofa)
|
|||
endianness, GST_BUFFER_SIZE (buf) / 2, rate,
|
||||
(channels == 2) ? 1 : 0));
|
||||
|
||||
GST_DEBUG ("Generated fingerprint");
|
||||
if (ofa->fingerprint) {
|
||||
GST_INFO_OBJECT (ofa, "Generated fingerprint: %s", ofa->fingerprint);
|
||||
} else {
|
||||
GST_WARNING_OBJECT (ofa, "Failed to generate fingerprint");
|
||||
}
|
||||
|
||||
gst_buffer_unref (buf);
|
||||
|
||||
if (ofa->fingerprint) {
|
||||
tags = gst_tag_list_new ();
|
||||
gst_tag_list_add (tags, GST_TAG_MERGE_REPLACE,
|
||||
GST_TAG_OFA_FINGERPRINT, ofa->fingerprint, NULL);
|
||||
gst_element_found_tags (GST_ELEMENT (ofa), tags);
|
||||
|
||||
g_object_notify (G_OBJECT (ofa), "fingerprint");
|
||||
}
|
||||
|
||||
ofa->record = FALSE;
|
||||
}
|
||||
|
@ -165,7 +174,8 @@ gst_ofa_event (GstBaseTransform * trans, GstEvent * event)
|
|||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
case GST_EVENT_NEWSEGMENT:
|
||||
GST_DEBUG ("Got %s event, clearing buffer", GST_EVENT_TYPE_NAME (event));
|
||||
GST_DEBUG_OBJECT (ofa, "Got %s event, clearing buffer",
|
||||
GST_EVENT_TYPE_NAME (event));
|
||||
gst_adapter_clear (ofa->adapter);
|
||||
ofa->record = TRUE;
|
||||
g_free (ofa->fingerprint);
|
||||
|
@ -175,7 +185,7 @@ gst_ofa_event (GstBaseTransform * trans, GstEvent * event)
|
|||
/* we got to the end of the stream but never generated a fingerprint
|
||||
* (probably under 135 seconds)
|
||||
*/
|
||||
if (!ofa->fingerprint)
|
||||
if (!ofa->fingerprint && ofa->record)
|
||||
create_fingerprint (ofa);
|
||||
break;
|
||||
default:
|
||||
|
@ -200,10 +210,11 @@ static GstFlowReturn
|
|||
gst_ofa_transform_ip (GstBaseTransform * trans, GstBuffer * buf)
|
||||
{
|
||||
GstOFA *ofa = GST_OFA (trans);
|
||||
GstAudioFilter *ofa_filter = GST_AUDIO_FILTER (ofa);
|
||||
guint64 nframes;
|
||||
GstClockTime duration;
|
||||
gint rate = GST_AUDIO_FILTER (ofa)->format.rate;
|
||||
gint channels = GST_AUDIO_FILTER (ofa)->format.channels;
|
||||
gint rate = ofa_filter->format.rate;
|
||||
gint channels = ofa_filter->format.channels;
|
||||
|
||||
g_return_val_if_fail (rate > 0 && channels > 0, GST_FLOW_NOT_NEGOTIATED);
|
||||
|
||||
|
|
|
@ -75,6 +75,7 @@ rsn_meta_wrapped_unwrap_and_unref (GstBuffer * wrap_buf, RsnMetaWrapped * meta)
|
|||
g_return_val_if_fail (meta->wrapped_buffer != NULL, NULL);
|
||||
|
||||
buf = gst_buffer_ref (meta->wrapped_buffer);
|
||||
buf = gst_buffer_make_metadata_writable (buf);
|
||||
|
||||
/* Copy changed metadata back to the wrapped buffer from the wrapper,
|
||||
* except the the read-only flag and the caps. */
|
||||
|
|
|
@ -141,7 +141,7 @@ gst_rsvg_dec_reset (GstRsvgDec * dec)
|
|||
dec->width = dec->height = 0;
|
||||
dec->fps_n = 0;
|
||||
dec->fps_d = 1;
|
||||
dec->timestamp_offset = GST_CLOCK_TIME_NONE;
|
||||
dec->first_timestamp = GST_CLOCK_TIME_NONE;
|
||||
dec->frame_count = 0;
|
||||
|
||||
gst_segment_init (&dec->segment, GST_FORMAT_UNDEFINED);
|
||||
|
@ -341,11 +341,18 @@ gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer)
|
|||
guint size;
|
||||
gboolean ret = GST_FLOW_OK;
|
||||
|
||||
if (rsvg->timestamp_offset == GST_CLOCK_TIME_NONE) {
|
||||
/* first_timestamp is used slightly differently where a framerate
|
||||
is given or not.
|
||||
If there is a frame rate, it will be used as a base.
|
||||
If there is not, it will be used to keep track of the timestamp
|
||||
of the first buffer, to be used as the timestamp of the output
|
||||
buffer. When a buffer is output, first timestamp will resync to
|
||||
the next buffer's timestamp. */
|
||||
if (rsvg->first_timestamp == GST_CLOCK_TIME_NONE) {
|
||||
if (GST_BUFFER_TIMESTAMP_IS_VALID (buffer))
|
||||
rsvg->timestamp_offset = GST_BUFFER_TIMESTAMP (buffer);
|
||||
else
|
||||
rsvg->timestamp_offset = 0;
|
||||
rsvg->first_timestamp = GST_BUFFER_TIMESTAMP (buffer);
|
||||
else if (rsvg->fps_n != 0)
|
||||
rsvg->first_timestamp = 0;
|
||||
}
|
||||
|
||||
gst_adapter_push (rsvg->adapter, buffer);
|
||||
|
@ -377,15 +384,33 @@ gst_rsvg_dec_chain (GstPad * pad, GstBuffer * buffer)
|
|||
break;
|
||||
|
||||
|
||||
if (rsvg->fps_n != 0) {
|
||||
if (rsvg->first_timestamp != GST_CLOCK_TIME_NONE) {
|
||||
GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
|
||||
GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
|
||||
if (GST_BUFFER_DURATION_IS_VALID (buffer)) {
|
||||
GstClockTime end =
|
||||
GST_BUFFER_TIMESTAMP_IS_VALID (buffer) ?
|
||||
GST_BUFFER_TIMESTAMP (buffer) : rsvg->first_timestamp;
|
||||
end += GST_BUFFER_DURATION (buffer);
|
||||
GST_BUFFER_DURATION (outbuf) = end - GST_BUFFER_TIMESTAMP (outbuf);
|
||||
}
|
||||
if (rsvg->fps_n == 0) {
|
||||
rsvg->first_timestamp = GST_CLOCK_TIME_NONE;
|
||||
} else {
|
||||
GST_BUFFER_DURATION (outbuf) =
|
||||
gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
|
||||
rsvg->fps_n * GST_SECOND);
|
||||
}
|
||||
} else if (rsvg->fps_n != 0) {
|
||||
GST_BUFFER_TIMESTAMP (outbuf) =
|
||||
rsvg->timestamp_offset + gst_util_uint64_scale (rsvg->frame_count,
|
||||
rsvg->first_timestamp + gst_util_uint64_scale (rsvg->frame_count,
|
||||
rsvg->fps_d, rsvg->fps_n * GST_SECOND);
|
||||
GST_BUFFER_DURATION (outbuf) =
|
||||
gst_util_uint64_scale (rsvg->frame_count, rsvg->fps_d,
|
||||
rsvg->fps_n * GST_SECOND);
|
||||
} else {
|
||||
GST_BUFFER_TIMESTAMP (outbuf) = 0;
|
||||
GST_BUFFER_TIMESTAMP (outbuf) = rsvg->first_timestamp;
|
||||
GST_BUFFER_DURATION (outbuf) = GST_CLOCK_TIME_NONE;
|
||||
}
|
||||
rsvg->frame_count++;
|
||||
|
||||
|
|
|
@ -58,7 +58,7 @@ struct _GstRsvgDec
|
|||
GstTagList *pending_tags;
|
||||
|
||||
gint fps_n, fps_d;
|
||||
GstClockTime timestamp_offset;
|
||||
GstClockTime first_timestamp;
|
||||
guint64 frame_count;
|
||||
|
||||
GstSegment segment;
|
||||
|
|
|
@ -101,7 +101,13 @@ gst_vp8_enc_coder_hook_free (GstVP8EncCoderHook * hook)
|
|||
#define DEFAULT_LAG_IN_FRAMES 0
|
||||
#define DEFAULT_SHARPNESS 0
|
||||
#define DEFAULT_NOISE_SENSITIVITY 0
|
||||
#ifdef HAVE_VP8ENC_TUNING
|
||||
#define DEFAULT_TUNE VP8_TUNE_PSNR
|
||||
#else
|
||||
typedef enum
|
||||
{ VP8_TUNE_NONE } vp8e_tuning;
|
||||
#define DEFAULT_TUNE VP8_TUNE_NONE
|
||||
#endif
|
||||
#define DEFAULT_STATIC_THRESHOLD 0
|
||||
#define DEFAULT_DROP_FRAME 0
|
||||
#define DEFAULT_RESIZE_ALLOWED TRUE
|
||||
|
@ -186,8 +192,12 @@ static GType
|
|||
gst_vp8_enc_tune_get_type (void)
|
||||
{
|
||||
static const GEnumValue values[] = {
|
||||
#ifdef HAVE_VP8ENC_TUNING
|
||||
{VP8_TUNE_PSNR, "Tune for PSNR", "psnr"},
|
||||
{VP8_TUNE_SSIM, "Tune for SSIM", "ssim"},
|
||||
#else
|
||||
{VP8_TUNE_NONE, "none", "none"},
|
||||
#endif
|
||||
{0, NULL, NULL}
|
||||
};
|
||||
static volatile GType id = 0;
|
||||
|
@ -542,7 +552,12 @@ gst_vp8_enc_set_property (GObject * object, guint prop_id,
|
|||
gst_vp8_enc->noise_sensitivity = g_value_get_int (value);
|
||||
break;
|
||||
case PROP_TUNE:
|
||||
#ifdef HAVE_VP8ENC_TUNING
|
||||
gst_vp8_enc->tuning = g_value_get_enum (value);
|
||||
#else
|
||||
GST_WARNING_OBJECT (gst_vp8_enc,
|
||||
"The tuning property is unsupported by this libvpx");
|
||||
#endif
|
||||
break;
|
||||
case PROP_STATIC_THRESHOLD:
|
||||
gst_vp8_enc->static_threshold = g_value_get_int (value);
|
||||
|
@ -626,7 +641,12 @@ gst_vp8_enc_get_property (GObject * object, guint prop_id, GValue * value,
|
|||
g_value_set_int (value, gst_vp8_enc->noise_sensitivity);
|
||||
break;
|
||||
case PROP_TUNE:
|
||||
#ifdef HAVE_VP8ENC_TUNING
|
||||
g_value_set_enum (value, gst_vp8_enc->tuning);
|
||||
#else
|
||||
GST_WARNING_OBJECT (gst_vp8_enc,
|
||||
"The tuning property is unsupported by this libvpx");
|
||||
#endif
|
||||
break;
|
||||
case PROP_STATIC_THRESHOLD:
|
||||
g_value_set_int (value, gst_vp8_enc->static_threshold);
|
||||
|
@ -800,8 +820,10 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
|||
status = vpx_codec_control (&encoder->encoder, VP8E_SET_ARNR_TYPE,
|
||||
encoder->arnr_type);
|
||||
#endif
|
||||
#ifdef HAVE_VP8ENC_TUNING
|
||||
status = vpx_codec_control (&encoder->encoder, VP8E_SET_TUNING,
|
||||
encoder->tuning);
|
||||
#endif
|
||||
|
||||
status =
|
||||
vpx_codec_control (&encoder->encoder, VP8E_SET_ENABLEAUTOALTREF,
|
||||
|
@ -857,7 +879,6 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
|||
GstStructure *s;
|
||||
GstBuffer *stream_hdr, *vorbiscomment;
|
||||
const GstTagList *iface_tags;
|
||||
GstTagList *tags;
|
||||
GValue array = { 0, };
|
||||
GValue value = { 0, };
|
||||
s = gst_caps_get_structure (caps, 0);
|
||||
|
@ -892,8 +913,8 @@ gst_vp8_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
|
|||
gst_tag_setter_get_tag_list (GST_TAG_SETTER (base_video_encoder));
|
||||
if (iface_tags) {
|
||||
vorbiscomment =
|
||||
gst_tag_list_to_vorbiscomment_buffer ((iface_tags) ? iface_tags :
|
||||
tags, (const guint8 *) "OVP80\2 ", 7,
|
||||
gst_tag_list_to_vorbiscomment_buffer (iface_tags,
|
||||
(const guint8 *) "OVP80\2 ", 7,
|
||||
"Encoded with GStreamer vp8enc " PACKAGE_VERSION);
|
||||
|
||||
GST_BUFFER_FLAG_SET (vorbiscomment, GST_BUFFER_FLAG_IN_CAPS);
|
||||
|
|
|
@ -78,7 +78,9 @@ struct _GstVP8Enc
|
|||
unsigned int lag_in_frames;
|
||||
int sharpness;
|
||||
int noise_sensitivity;
|
||||
#ifdef HAVE_VP8ENC_TUNING
|
||||
vp8e_tuning tuning;
|
||||
#endif
|
||||
int static_threshold;
|
||||
gboolean drop_frame;
|
||||
gboolean resize_allowed;
|
||||
|
|
|
@ -1,8 +1,5 @@
|
|||
if BUILD_EXPERIMENTAL
|
||||
EXPERIMENTAL_LIBS=basecamerabinsrc
|
||||
endif
|
||||
|
||||
SUBDIRS = interfaces signalprocessor video $(EXPERIMENTAL_LIBS)
|
||||
SUBDIRS = interfaces signalprocessor video basecamerabinsrc
|
||||
|
||||
noinst_HEADERS = gst-i18n-plugin.h gettext.h
|
||||
DIST_SUBDIRS = interfaces signalprocessor video basecamerabinsrc
|
||||
|
|
|
@ -97,6 +97,7 @@ gst_base_video_codec_init (GstBaseVideoCodec * base_video_codec,
|
|||
|
||||
gst_segment_init (&base_video_codec->segment, GST_FORMAT_TIME);
|
||||
|
||||
g_static_rec_mutex_init (&base_video_codec->stream_lock);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -106,24 +107,28 @@ gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec)
|
|||
|
||||
GST_DEBUG_OBJECT (base_video_codec, "reset");
|
||||
|
||||
GST_OBJECT_LOCK (base_video_codec);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_codec);
|
||||
for (g = base_video_codec->frames; g; g = g_list_next (g)) {
|
||||
gst_base_video_codec_free_frame ((GstVideoFrame *) g->data);
|
||||
}
|
||||
g_list_free (base_video_codec->frames);
|
||||
base_video_codec->frames = NULL;
|
||||
GST_OBJECT_UNLOCK (base_video_codec);
|
||||
|
||||
base_video_codec->bytes = 0;
|
||||
base_video_codec->time = 0;
|
||||
|
||||
gst_buffer_replace (&base_video_codec->state.codec_data, NULL);
|
||||
gst_caps_replace (&base_video_codec->state.caps, NULL);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_base_video_codec_finalize (GObject * object)
|
||||
{
|
||||
GstBaseVideoCodec *base_video_codec = GST_BASE_VIDEO_CODEC (object);
|
||||
|
||||
g_static_rec_mutex_free (&base_video_codec->stream_lock);
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
|
@ -170,8 +175,10 @@ gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec)
|
|||
|
||||
frame = g_slice_new0 (GstVideoFrame);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_codec);
|
||||
frame->system_frame_number = base_video_codec->system_frame_number;
|
||||
base_video_codec->system_frame_number++;
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec);
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
|
|
@ -79,6 +79,9 @@ G_BEGIN_DECLS
|
|||
*/
|
||||
#define GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
|
||||
|
||||
#define GST_BASE_VIDEO_CODEC_STREAM_LOCK(codec) g_static_rec_mutex_lock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock)
|
||||
#define GST_BASE_VIDEO_CODEC_STREAM_UNLOCK(codec) g_static_rec_mutex_unlock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock)
|
||||
|
||||
typedef struct _GstVideoState GstVideoState;
|
||||
typedef struct _GstVideoFrame GstVideoFrame;
|
||||
typedef struct _GstBaseVideoCodec GstBaseVideoCodec;
|
||||
|
@ -145,6 +148,11 @@ struct _GstBaseVideoCodec
|
|||
GstPad *sinkpad;
|
||||
GstPad *srcpad;
|
||||
|
||||
/* protects all data processing, i.e. is locked
|
||||
* in the chain function, finish_frame and when
|
||||
* processing serialized events */
|
||||
GStaticRecMutex stream_lock;
|
||||
|
||||
guint64 system_frame_number;
|
||||
|
||||
GList *frames; /* Protected with OBJECT_LOCK */
|
||||
|
|
|
@ -196,8 +196,6 @@ gst_base_video_decoder_class_init (GstBaseVideoDecoderClass * klass)
|
|||
|
||||
gstelement_class->change_state =
|
||||
GST_DEBUG_FUNCPTR (gst_base_video_decoder_change_state);
|
||||
|
||||
parent_class = g_type_class_peek_parent (klass);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -257,8 +255,10 @@ gst_base_video_decoder_push_src_event (GstBaseVideoDecoder * decoder,
|
|||
|| GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP)
|
||||
return gst_pad_push_event (decoder->base_video_codec.srcpad, event);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (decoder);
|
||||
decoder->current_frame_events =
|
||||
g_list_prepend (decoder->current_frame_events, event);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (decoder);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
@ -279,6 +279,8 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
|||
|
||||
GST_DEBUG_OBJECT (base_video_decoder, "setcaps %" GST_PTR_FORMAT, caps);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
memset (&state, 0, sizeof (state));
|
||||
|
||||
state.caps = gst_caps_ref (caps);
|
||||
|
@ -322,6 +324,7 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
|||
gst_caps_replace (&state.caps, NULL);
|
||||
}
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
g_object_unref (base_video_decoder);
|
||||
|
||||
return ret;
|
||||
|
@ -404,7 +407,8 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
case GST_EVENT_EOS:
|
||||
{
|
||||
GstFlowReturn flow_ret;
|
||||
;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
if (!base_video_decoder->packetized) {
|
||||
do {
|
||||
flow_ret =
|
||||
|
@ -420,8 +424,9 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
|
||||
if (flow_ret == GST_FLOW_OK)
|
||||
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||
}
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_NEWSEGMENT:
|
||||
{
|
||||
gboolean update;
|
||||
|
@ -432,6 +437,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
gint64 pos;
|
||||
GstSegment *segment = &GST_BASE_VIDEO_CODEC (base_video_decoder)->segment;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
gst_event_parse_new_segment_full (event, &update, &rate,
|
||||
&arate, &format, &start, &stop, &pos);
|
||||
|
||||
|
@ -468,6 +474,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
event = gst_event_new_new_segment_full (update, rate, arate,
|
||||
GST_FORMAT_TIME, start, stop, pos);
|
||||
} else {
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
goto newseg_wrong_format;
|
||||
}
|
||||
}
|
||||
|
@ -482,12 +489,15 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
update, rate, arate, format, start, stop, pos);
|
||||
|
||||
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
{
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
/* well, this is kind of worse than a DISCONT */
|
||||
gst_base_video_decoder_flush (base_video_decoder, TRUE);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
}
|
||||
default:
|
||||
/* FIXME this changes the order of events */
|
||||
|
@ -930,6 +940,8 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder,
|
|||
{
|
||||
GST_DEBUG_OBJECT (base_video_decoder, "reset full %d", full);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
if (full) {
|
||||
gst_segment_init (&GST_BASE_VIDEO_CODEC (base_video_decoder)->segment,
|
||||
GST_FORMAT_UNDEFINED);
|
||||
|
@ -965,6 +977,7 @@ gst_base_video_decoder_reset (GstBaseVideoDecoder * base_video_decoder,
|
|||
GST_CLOCK_TIME_NONE;
|
||||
GST_BASE_VIDEO_CODEC (base_video_decoder)->proportion = 0.5;
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
|
@ -1211,6 +1224,7 @@ static GstFlowReturn
|
|||
gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
||||
{
|
||||
GstBaseVideoDecoder *base_video_decoder;
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
|
||||
base_video_decoder = GST_BASE_VIDEO_DECODER (GST_PAD_PARENT (pad));
|
||||
|
||||
|
@ -1219,6 +1233,8 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_SIZE (buf));
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
/* NOTE:
|
||||
* requiring the pad to be negotiated makes it impossible to use
|
||||
* oggdemux or filesrc ! decoder */
|
||||
|
@ -1242,7 +1258,8 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||
if (!ret) {
|
||||
GST_ERROR_OBJECT (base_video_decoder, "new segment event ret=%d", ret);
|
||||
return GST_FLOW_ERROR;
|
||||
ret = GST_FLOW_ERROR;
|
||||
goto done;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1270,9 +1287,13 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
}
|
||||
|
||||
if (GST_BASE_VIDEO_CODEC (base_video_decoder)->segment.rate > 0.0)
|
||||
return gst_base_video_decoder_chain_forward (base_video_decoder, buf);
|
||||
ret = gst_base_video_decoder_chain_forward (base_video_decoder, buf);
|
||||
else
|
||||
return gst_base_video_decoder_chain_reverse (base_video_decoder, buf);
|
||||
ret = gst_base_video_decoder_chain_reverse (base_video_decoder, buf);
|
||||
|
||||
done:
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
return ret;
|
||||
}
|
||||
|
||||
static GstStateChangeReturn
|
||||
|
@ -1302,11 +1323,14 @@ gst_base_video_decoder_change_state (GstElement * element,
|
|||
if (base_video_decoder_class->stop) {
|
||||
base_video_decoder_class->stop (base_video_decoder);
|
||||
}
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
gst_base_video_decoder_reset (base_video_decoder, TRUE);
|
||||
g_list_foreach (base_video_decoder->current_frame_events,
|
||||
(GFunc) gst_event_unref, NULL);
|
||||
g_list_free (base_video_decoder->current_frame_events);
|
||||
base_video_decoder->current_frame_events = NULL;
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
|
@ -1320,6 +1344,7 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
|
|||
{
|
||||
GstVideoFrame *frame;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
frame =
|
||||
gst_base_video_codec_new_frame (GST_BASE_VIDEO_CODEC
|
||||
(base_video_decoder));
|
||||
|
@ -1335,6 +1360,8 @@ gst_base_video_decoder_new_frame (GstBaseVideoDecoder * base_video_decoder)
|
|||
frame->events = base_video_decoder->current_frame_events;
|
||||
base_video_decoder->current_frame_events = NULL;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
||||
|
@ -1360,13 +1387,13 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
GList *l, *events = NULL;
|
||||
|
||||
GST_LOG_OBJECT (base_video_decoder, "finish frame");
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
#ifndef GST_DISABLE_GST_DEBUG
|
||||
GST_OBJECT_LOCK (base_video_decoder);
|
||||
GST_LOG_OBJECT (base_video_decoder, "n %d in %d out %d",
|
||||
g_list_length (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames),
|
||||
gst_adapter_available (base_video_decoder->input_adapter),
|
||||
gst_adapter_available (base_video_decoder->output_adapter));
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
#endif
|
||||
|
||||
GST_LOG_OBJECT (base_video_decoder,
|
||||
|
@ -1374,7 +1401,6 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
GST_TIME_ARGS (frame->presentation_timestamp));
|
||||
|
||||
/* Push all pending events that arrived before this frame */
|
||||
GST_OBJECT_LOCK (base_video_decoder);
|
||||
for (l = base_video_decoder->base_video_codec.frames; l; l = l->next) {
|
||||
GstVideoFrame *tmp = l->data;
|
||||
|
||||
|
@ -1390,7 +1416,6 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
if (tmp == frame)
|
||||
break;
|
||||
}
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
|
||||
for (l = g_list_last (events); l; l = l->next)
|
||||
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder),
|
||||
|
@ -1544,7 +1569,8 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
GST_TIME_ARGS (segment->start),
|
||||
GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time));
|
||||
gst_buffer_unref (src_buffer);
|
||||
return GST_FLOW_OK;
|
||||
ret = GST_FLOW_OK;
|
||||
goto done;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1562,12 +1588,12 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
}
|
||||
|
||||
done:
|
||||
GST_OBJECT_LOCK (base_video_decoder);
|
||||
GST_BASE_VIDEO_CODEC (base_video_decoder)->frames =
|
||||
g_list_remove (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame);
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
gst_base_video_codec_free_frame (frame);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -1589,6 +1615,7 @@ gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
if (n_bytes == 0)
|
||||
return;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
if (gst_adapter_available (base_video_decoder->output_adapter) == 0) {
|
||||
base_video_decoder->frame_offset = base_video_decoder->input_offset -
|
||||
gst_adapter_available (base_video_decoder->input_adapter);
|
||||
|
@ -1596,6 +1623,7 @@ gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
buf = gst_adapter_take_buffer (base_video_decoder->input_adapter, n_bytes);
|
||||
|
||||
gst_adapter_push (base_video_decoder->output_adapter, buf);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
}
|
||||
|
||||
static guint64
|
||||
|
@ -1670,9 +1698,12 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder)
|
|||
int n_available;
|
||||
GstClockTime timestamp;
|
||||
GstClockTime duration;
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
|
||||
GST_LOG_OBJECT (base_video_decoder, "have_frame");
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
n_available = gst_adapter_available (base_video_decoder->output_adapter);
|
||||
if (n_available) {
|
||||
buffer = gst_adapter_take_buffer (base_video_decoder->output_adapter,
|
||||
|
@ -1693,7 +1724,11 @@ gst_base_video_decoder_have_frame (GstBaseVideoDecoder * base_video_decoder)
|
|||
"ts %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT,
|
||||
n_available, GST_TIME_ARGS (timestamp), GST_TIME_ARGS (duration));
|
||||
|
||||
return gst_base_video_decoder_have_frame_2 (base_video_decoder);
|
||||
ret = gst_base_video_decoder_have_frame_2 (base_video_decoder);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
|
@ -1729,10 +1764,8 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder)
|
|||
GST_TIME_ARGS (frame->decode_timestamp));
|
||||
GST_LOG_OBJECT (base_video_decoder, "dist %d", frame->distance_from_sync);
|
||||
|
||||
GST_OBJECT_LOCK (base_video_decoder);
|
||||
GST_BASE_VIDEO_CODEC (base_video_decoder)->frames =
|
||||
g_list_append (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames, frame);
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
|
||||
frame->deadline =
|
||||
gst_segment_to_running_time (&GST_BASE_VIDEO_CODEC
|
||||
|
@ -1779,11 +1812,14 @@ gst_base_video_decoder_lost_sync (GstBaseVideoDecoder * base_video_decoder)
|
|||
|
||||
GST_DEBUG_OBJECT (base_video_decoder, "lost_sync");
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
if (gst_adapter_available (base_video_decoder->input_adapter) >= 1) {
|
||||
gst_adapter_flush (base_video_decoder->input_adapter, 1);
|
||||
}
|
||||
|
||||
base_video_decoder->have_sync = FALSE;
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
}
|
||||
|
||||
/* FIXME not quite exciting; get rid of this ? */
|
||||
|
@ -1798,8 +1834,10 @@ gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder * base_video_decoder)
|
|||
{
|
||||
GST_DEBUG_OBJECT (base_video_decoder, "set_sync_point");
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
base_video_decoder->current_frame->is_sync_point = TRUE;
|
||||
base_video_decoder->distance_from_sync = 0;
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -1814,9 +1852,9 @@ gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *
|
|||
{
|
||||
GList *g;
|
||||
|
||||
GST_OBJECT_LOCK (base_video_decoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames);
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
|
||||
if (g == NULL)
|
||||
return NULL;
|
||||
|
@ -1837,7 +1875,7 @@ gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
GList *g;
|
||||
GstVideoFrame *frame = NULL;
|
||||
|
||||
GST_OBJECT_LOCK (base_video_decoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
for (g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_decoder)->frames);
|
||||
g; g = g_list_next (g)) {
|
||||
GstVideoFrame *tmp = g->data;
|
||||
|
@ -1847,7 +1885,7 @@ gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder,
|
|||
break;
|
||||
}
|
||||
}
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
|
||||
return frame;
|
||||
}
|
||||
|
@ -1871,6 +1909,8 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
|||
g_return_val_if_fail (state->width != 0, FALSE);
|
||||
g_return_val_if_fail (state->height != 0, FALSE);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
/* sanitize */
|
||||
if (state->fps_n == 0 || state->fps_d == 0) {
|
||||
state->fps_n = 0;
|
||||
|
@ -1898,6 +1938,8 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
|||
state->bytes_per_picture =
|
||||
gst_video_format_get_size (state->format, state->width, state->height);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -1922,6 +1964,9 @@ gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder *
|
|||
|
||||
GST_DEBUG ("alloc src buffer caps=%" GST_PTR_FORMAT,
|
||||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)));
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
flow_ret =
|
||||
gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
|
||||
(base_video_decoder), GST_BUFFER_OFFSET_NONE, num_bytes,
|
||||
|
@ -1934,6 +1979,7 @@ gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder *
|
|||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)));
|
||||
}
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
return buffer;
|
||||
}
|
||||
|
||||
|
@ -1962,6 +2008,8 @@ gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder *
|
|||
(base_video_decoder)) != NULL, GST_FLOW_ERROR);
|
||||
|
||||
GST_LOG_OBJECT (base_video_decoder, "alloc buffer size %d", num_bytes);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||
|
||||
flow_ret =
|
||||
gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
|
||||
(base_video_decoder), GST_BUFFER_OFFSET_NONE, num_bytes,
|
||||
|
@ -1973,6 +2021,8 @@ gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder *
|
|||
gst_flow_get_name (flow_ret));
|
||||
}
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||
|
||||
return flow_ret;
|
||||
}
|
||||
|
||||
|
@ -1995,6 +2045,7 @@ gst_base_video_decoder_get_max_decode_time (GstBaseVideoDecoder *
|
|||
GstClockTimeDiff deadline;
|
||||
GstClockTime earliest_time;
|
||||
|
||||
GST_OBJECT_LOCK (base_video_decoder);
|
||||
earliest_time = GST_BASE_VIDEO_CODEC (base_video_decoder)->earliest_time;
|
||||
if (GST_CLOCK_TIME_IS_VALID (earliest_time))
|
||||
deadline = GST_CLOCK_DIFF (earliest_time, frame->deadline);
|
||||
|
@ -2006,6 +2057,8 @@ gst_base_video_decoder_get_max_decode_time (GstBaseVideoDecoder *
|
|||
GST_TIME_ARGS (earliest_time), GST_TIME_ARGS (frame->deadline),
|
||||
GST_TIME_ARGS (deadline));
|
||||
|
||||
GST_OBJECT_UNLOCK (base_video_decoder);
|
||||
|
||||
return deadline;
|
||||
}
|
||||
|
||||
|
|
|
@ -169,13 +169,13 @@ gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
|
|||
|
||||
gstelement_class->change_state =
|
||||
GST_DEBUG_FUNCPTR (gst_base_video_encoder_change_state);
|
||||
|
||||
parent_class = g_type_class_peek_parent (klass);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder)
|
||||
{
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||
|
||||
base_video_encoder->presentation_frame_number = 0;
|
||||
base_video_encoder->distance_from_sync = 0;
|
||||
base_video_encoder->force_keyframe = FALSE;
|
||||
|
@ -193,6 +193,8 @@ gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder)
|
|||
(GFunc) gst_event_unref, NULL);
|
||||
g_list_free (base_video_encoder->current_frame_events);
|
||||
base_video_encoder->current_frame_events = NULL;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -285,6 +287,8 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
|||
|
||||
GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||
|
||||
state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
|
||||
memset (&tmp_state, 0, sizeof (tmp_state));
|
||||
|
||||
|
@ -351,13 +355,15 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
|||
}
|
||||
|
||||
exit:
|
||||
g_object_unref (base_video_encoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
|
||||
if (!ret) {
|
||||
GST_WARNING_OBJECT (base_video_encoder, "rejected caps %" GST_PTR_FORMAT,
|
||||
caps);
|
||||
}
|
||||
|
||||
g_object_unref (base_video_encoder);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -452,6 +458,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
|||
{
|
||||
GstFlowReturn flow_ret;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||
base_video_encoder->a.at_eos = TRUE;
|
||||
|
||||
if (base_video_encoder_class->finish) {
|
||||
|
@ -461,6 +468,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
|||
}
|
||||
|
||||
ret = (flow_ret == GST_BASE_VIDEO_ENCODER_FLOW_DROPPED);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_NEWSEGMENT:
|
||||
|
@ -473,6 +481,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
|||
gint64 stop;
|
||||
gint64 position;
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
|
||||
&format, &start, &stop, &position);
|
||||
|
||||
|
@ -484,6 +493,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
|||
|
||||
if (format != GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment");
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -492,6 +502,7 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
|||
gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC
|
||||
(base_video_encoder)->segment, update, rate, applied_rate, format,
|
||||
start, stop, position);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_CUSTOM_DOWNSTREAM:
|
||||
|
@ -556,8 +567,10 @@ gst_base_video_encoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
|| GST_EVENT_TYPE (event) == GST_EVENT_FLUSH_STOP) {
|
||||
ret = gst_pad_push_event (enc->base_video_codec.srcpad, event);
|
||||
} else {
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (enc);
|
||||
enc->current_frame_events =
|
||||
g_list_prepend (enc->current_frame_events, event);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (enc);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -699,8 +712,11 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
|
||||
g_return_val_if_fail (klass->handle_frame != NULL, GST_FLOW_ERROR);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||
|
||||
if (!GST_PAD_CAPS (pad)) {
|
||||
return GST_FLOW_NOT_NEGOTIATED;
|
||||
ret = GST_FLOW_NOT_NEGOTIATED;
|
||||
goto done;
|
||||
}
|
||||
|
||||
GST_LOG_OBJECT (base_video_encoder,
|
||||
|
@ -710,7 +726,8 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
||||
|
||||
if (base_video_encoder->a.at_eos) {
|
||||
return GST_FLOW_UNEXPECTED;
|
||||
ret = GST_FLOW_UNEXPECTED;
|
||||
goto done;
|
||||
}
|
||||
|
||||
if (base_video_encoder->sink_clipping) {
|
||||
|
@ -746,10 +763,8 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
frame->force_keyframe = base_video_encoder->force_keyframe;
|
||||
base_video_encoder->force_keyframe = FALSE;
|
||||
|
||||
GST_OBJECT_LOCK (base_video_encoder);
|
||||
GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
|
||||
g_list_append (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);
|
||||
GST_OBJECT_UNLOCK (base_video_encoder);
|
||||
|
||||
/* new data, more finish needed */
|
||||
base_video_encoder->drained = FALSE;
|
||||
|
@ -760,6 +775,8 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
ret = klass->handle_frame (base_video_encoder, frame);
|
||||
|
||||
done:
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
|
||||
g_object_unref (base_video_encoder);
|
||||
|
||||
return ret;
|
||||
|
@ -831,6 +848,8 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
|
|||
GST_LOG_OBJECT (base_video_encoder,
|
||||
"finish frame fpn %d", frame->presentation_frame_number);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||
|
||||
/* Push all pending events that arrived before this frame */
|
||||
for (l = base_video_encoder->base_video_codec.frames; l; l = l->next) {
|
||||
GstVideoFrame *tmp = l->data;
|
||||
|
@ -945,13 +964,13 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
|
|||
|
||||
done:
|
||||
/* handed out */
|
||||
GST_OBJECT_LOCK (base_video_encoder);
|
||||
GST_BASE_VIDEO_CODEC (base_video_encoder)->frames =
|
||||
g_list_remove (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames, frame);
|
||||
GST_OBJECT_UNLOCK (base_video_encoder);
|
||||
|
||||
gst_base_video_codec_free_frame (frame);
|
||||
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -1025,9 +1044,9 @@ gst_base_video_encoder_get_oldest_frame (GstBaseVideoEncoder *
|
|||
{
|
||||
GList *g;
|
||||
|
||||
GST_OBJECT_LOCK (base_video_encoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||
g = g_list_first (GST_BASE_VIDEO_CODEC (base_video_encoder)->frames);
|
||||
GST_OBJECT_UNLOCK (base_video_encoder);
|
||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||
|
||||
if (g == NULL)
|
||||
return NULL;
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
* elements, providing an API for controlling a digital camera.
|
||||
*
|
||||
* <note>
|
||||
* Note that camerabin2 is still UNSTABLE, EXPERIMENTAL and under
|
||||
* Note that camerabin2 is still UNSTABLE and under
|
||||
* development.
|
||||
* </note>
|
||||
*
|
||||
|
@ -270,6 +270,9 @@ gst_cam_flags_get_type (void)
|
|||
"elements", "no-audio-conversion"},
|
||||
{C_FLAGS (GST_CAM_FLAG_NO_VIDEO_CONVERSION), "Do not use video conversion "
|
||||
"elements", "no-video-conversion"},
|
||||
{C_FLAGS (GST_CAM_FLAG_NO_VIEWFINDER_CONVERSION),
|
||||
"Do not use viewfinder conversion " "elements",
|
||||
"no-viewfinder-conversion"},
|
||||
{0, NULL, NULL}
|
||||
};
|
||||
static volatile GType id = 0;
|
||||
|
@ -439,6 +442,7 @@ gst_camera_bin_stop_capture (GstCameraBin2 * camerabin)
|
|||
g_signal_emit_by_name (camerabin->src, "stop-capture", NULL);
|
||||
|
||||
if (camerabin->mode == MODE_VIDEO && camerabin->audio_src) {
|
||||
camerabin->audio_drop_eos = FALSE;
|
||||
gst_element_send_event (camerabin->audio_src, gst_event_new_eos ());
|
||||
}
|
||||
}
|
||||
|
@ -1213,6 +1217,25 @@ gst_camera_bin_image_sink_event_probe (GstPad * pad, GstEvent * event,
|
|||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_camera_bin_audio_src_event_probe (GstPad * pad, GstEvent * event,
|
||||
gpointer data)
|
||||
{
|
||||
GstCameraBin2 *camera = data;
|
||||
gboolean ret = TRUE;
|
||||
|
||||
if (GST_EVENT_TYPE (event) == GST_EVENT_EOS) {
|
||||
/* we only let an EOS pass when the user is stopping a capture */
|
||||
if (camera->audio_drop_eos) {
|
||||
ret = FALSE;
|
||||
} else {
|
||||
camera->audio_drop_eos = TRUE;
|
||||
}
|
||||
}
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* gst_camera_bin_create_elements:
|
||||
* @param camera: the #GstCameraBin2
|
||||
|
@ -1253,14 +1276,6 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
|
|||
g_signal_connect (camera->video_encodebin, "element-added",
|
||||
(GCallback) encodebin_element_added, camera);
|
||||
|
||||
/* propagate the flags property by translating appropriate values
|
||||
* to GstEncFlags values */
|
||||
if (camera->flags & GST_CAM_FLAG_NO_AUDIO_CONVERSION)
|
||||
encbin_flags |= (1 << 0);
|
||||
if (camera->flags & GST_CAM_FLAG_NO_VIDEO_CONVERSION)
|
||||
encbin_flags |= (1 << 1);
|
||||
g_object_set (camera->video_encodebin, "flags", encbin_flags, NULL);
|
||||
|
||||
camera->videosink =
|
||||
gst_element_factory_make ("filesink", "videobin-filesink");
|
||||
if (!camera->videosink) {
|
||||
|
@ -1362,11 +1377,14 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
|
|||
gst_object_ref (camera->imagesink),
|
||||
gst_object_ref (camera->viewfinderbin_queue), NULL);
|
||||
|
||||
/* Linking can be optimized TODO */
|
||||
gst_element_link (camera->video_encodebin, camera->videosink);
|
||||
gst_element_link (camera->image_encodebin, camera->imagesink);
|
||||
gst_element_link_many (camera->viewfinderbin_queue,
|
||||
camera->viewfinderbin_capsfilter, camera->viewfinderbin, NULL);
|
||||
gst_element_link_pads_full (camera->video_encodebin, "src",
|
||||
camera->videosink, "sink", GST_PAD_LINK_CHECK_NOTHING);
|
||||
gst_element_link_pads_full (camera->image_encodebin, "src",
|
||||
camera->imagesink, "sink", GST_PAD_LINK_CHECK_NOTHING);
|
||||
gst_element_link_pads_full (camera->viewfinderbin_queue, "src",
|
||||
camera->viewfinderbin_capsfilter, "sink", GST_PAD_LINK_CHECK_CAPS);
|
||||
gst_element_link_pads_full (camera->viewfinderbin_capsfilter, "src",
|
||||
camera->viewfinderbin, "sink", GST_PAD_LINK_CHECK_CAPS);
|
||||
|
||||
{
|
||||
/* set an event probe to watch for custom location changes */
|
||||
|
@ -1396,6 +1414,17 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
|
|||
g_object_set (camera->imagesink, "location", camera->location, NULL);
|
||||
}
|
||||
|
||||
/* propagate the flags property by translating appropriate values
|
||||
* to GstEncFlags values */
|
||||
if (camera->flags & GST_CAM_FLAG_NO_AUDIO_CONVERSION)
|
||||
encbin_flags |= (1 << 0);
|
||||
if (camera->flags & GST_CAM_FLAG_NO_VIDEO_CONVERSION)
|
||||
encbin_flags |= (1 << 1);
|
||||
g_object_set (camera->video_encodebin, "flags", encbin_flags, NULL);
|
||||
|
||||
g_object_set (camera->viewfinderbin, "disable-converters",
|
||||
camera->flags & GST_CAM_FLAG_NO_VIEWFINDER_CONVERSION, NULL);
|
||||
|
||||
if (camera->video_profile_switch) {
|
||||
GST_DEBUG_OBJECT (camera, "Switching encodebin's profile");
|
||||
g_object_set (camera->video_encodebin, "profile", camera->video_profile,
|
||||
|
@ -1529,6 +1558,8 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
|
|||
}
|
||||
|
||||
if (new_audio_src) {
|
||||
GstPad *srcpad;
|
||||
|
||||
if (g_object_class_find_property (G_OBJECT_GET_CLASS (camera->audio_src),
|
||||
"provide-clock")) {
|
||||
g_object_set (camera->audio_src, "provide-clock", FALSE, NULL);
|
||||
|
@ -1540,6 +1571,15 @@ gst_camera_bin_create_elements (GstCameraBin2 * camera)
|
|||
|
||||
gst_element_link_many (camera->audio_src, camera->audio_volume,
|
||||
camera->audio_capsfilter, NULL);
|
||||
|
||||
srcpad = gst_element_get_static_pad (camera->audio_src, "src");
|
||||
|
||||
/* drop EOS for audiosrc elements that push them on state_changes
|
||||
* (basesrc does this) */
|
||||
gst_pad_add_event_probe (srcpad,
|
||||
(GCallback) gst_camera_bin_audio_src_event_probe, camera);
|
||||
|
||||
gst_object_unref (srcpad);
|
||||
}
|
||||
if (has_audio) {
|
||||
gst_camera_bin_check_and_replace_filter (camera, &camera->audio_filter,
|
||||
|
@ -1578,6 +1618,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
|
|||
GstStateChangeReturn ret = GST_STATE_CHANGE_SUCCESS;
|
||||
GstCameraBin2 *camera = GST_CAMERA_BIN2_CAST (element);
|
||||
|
||||
|
||||
switch (trans) {
|
||||
case GST_STATE_CHANGE_NULL_TO_READY:
|
||||
if (!gst_camera_bin_create_elements (camera)) {
|
||||
|
@ -1586,6 +1627,7 @@ gst_camera_bin_change_state (GstElement * element, GstStateChange trans)
|
|||
break;
|
||||
case GST_STATE_CHANGE_READY_TO_PAUSED:
|
||||
GST_CAMERA_BIN2_RESET_PROCESSING_COUNTER (camera);
|
||||
camera->audio_drop_eos = TRUE;
|
||||
break;
|
||||
case GST_STATE_CHANGE_PAUSED_TO_READY:
|
||||
if (GST_STATE (camera->videosink) >= GST_STATE_PAUSED)
|
||||
|
@ -1734,8 +1776,12 @@ gst_camera_bin_set_property (GObject * object, guint prop_id,
|
|||
"Setting audio capture caps to %" GST_PTR_FORMAT,
|
||||
gst_value_get_caps (value));
|
||||
|
||||
if (G_LIKELY (camera->audio_capsfilter)) {
|
||||
g_object_set (camera->audio_capsfilter, "caps",
|
||||
gst_value_get_caps (value), NULL);
|
||||
} else {
|
||||
GST_WARNING_OBJECT (camera, "Audio capsfilter missing");
|
||||
}
|
||||
}
|
||||
break;
|
||||
case PROP_IMAGE_CAPTURE_CAPS:{
|
||||
|
@ -1750,13 +1796,18 @@ gst_camera_bin_set_property (GObject * object, guint prop_id,
|
|||
"Setting image capture caps to %" GST_PTR_FORMAT,
|
||||
gst_value_get_caps (value));
|
||||
|
||||
/* set the capsfilter caps and notify the src to renegotiate */
|
||||
if (G_LIKELY (camera->imagebin_capsfilter)) {
|
||||
g_object_set (camera->imagebin_capsfilter, "caps",
|
||||
gst_value_get_caps (value), NULL);
|
||||
} else {
|
||||
GST_WARNING_OBJECT (camera, "Image capsfilter missing");
|
||||
}
|
||||
|
||||
/* set the capsfilter caps and notify the src to renegotiate */
|
||||
if (pad) {
|
||||
GST_DEBUG_OBJECT (camera, "Pushing renegotiate on %s",
|
||||
GST_PAD_NAME (pad));
|
||||
GST_PAD_EVENTFUNC (pad) (pad, gst_camera_bin_new_event_renegotiate ());
|
||||
gst_pad_send_event (pad, gst_camera_bin_new_event_renegotiate ());
|
||||
gst_object_unref (pad);
|
||||
}
|
||||
}
|
||||
|
@ -1774,12 +1825,17 @@ gst_camera_bin_set_property (GObject * object, guint prop_id,
|
|||
gst_value_get_caps (value));
|
||||
|
||||
/* set the capsfilter caps and notify the src to renegotiate */
|
||||
if (G_LIKELY (camera->videobin_capsfilter)) {
|
||||
g_object_set (camera->videobin_capsfilter, "caps",
|
||||
gst_value_get_caps (value), NULL);
|
||||
} else {
|
||||
GST_WARNING_OBJECT (camera, "Video capsfilter missing");
|
||||
}
|
||||
|
||||
if (pad) {
|
||||
GST_DEBUG_OBJECT (camera, "Pushing renegotiate on %s",
|
||||
GST_PAD_NAME (pad));
|
||||
GST_PAD_EVENTFUNC (pad) (pad, gst_camera_bin_new_event_renegotiate ());
|
||||
gst_pad_send_event (pad, gst_camera_bin_new_event_renegotiate ());
|
||||
gst_object_unref (pad);
|
||||
}
|
||||
}
|
||||
|
@ -1797,12 +1853,17 @@ gst_camera_bin_set_property (GObject * object, guint prop_id,
|
|||
gst_value_get_caps (value));
|
||||
|
||||
/* set the capsfilter caps and notify the src to renegotiate */
|
||||
if (G_LIKELY (camera->viewfinderbin_capsfilter)) {
|
||||
g_object_set (camera->viewfinderbin_capsfilter, "caps",
|
||||
gst_value_get_caps (value), NULL);
|
||||
} else {
|
||||
GST_WARNING_OBJECT (camera, "Viewfinder capsfilter missing");
|
||||
}
|
||||
|
||||
if (pad) {
|
||||
GST_DEBUG_OBJECT (camera, "Pushing renegotiate on %s",
|
||||
GST_PAD_NAME (pad));
|
||||
GST_PAD_EVENTFUNC (pad) (pad, gst_camera_bin_new_event_renegotiate ());
|
||||
gst_pad_send_event (pad, gst_camera_bin_new_event_renegotiate ());
|
||||
gst_object_unref (pad);
|
||||
}
|
||||
}
|
||||
|
@ -1970,28 +2031,44 @@ gst_camera_bin_get_property (GObject * object, guint prop_id,
|
|||
break;
|
||||
case PROP_AUDIO_CAPTURE_CAPS:{
|
||||
GstCaps *caps = NULL;
|
||||
if (G_LIKELY (camera->audio_capsfilter)) {
|
||||
g_object_get (camera->audio_capsfilter, "caps", &caps, NULL);
|
||||
} else {
|
||||
GST_WARNING ("Missing audio capsfilter");
|
||||
}
|
||||
gst_value_set_caps (value, caps);
|
||||
gst_caps_unref (caps);
|
||||
}
|
||||
break;
|
||||
case PROP_IMAGE_CAPTURE_CAPS:{
|
||||
GstCaps *caps = NULL;
|
||||
if (G_LIKELY (camera->imagebin_capsfilter)) {
|
||||
g_object_get (camera->imagebin_capsfilter, "caps", &caps, NULL);
|
||||
} else {
|
||||
GST_WARNING ("Missing imagebin capsfilter");
|
||||
}
|
||||
gst_value_set_caps (value, caps);
|
||||
gst_caps_unref (caps);
|
||||
}
|
||||
break;
|
||||
case PROP_VIDEO_CAPTURE_CAPS:{
|
||||
GstCaps *caps = NULL;
|
||||
if (G_LIKELY (camera->videobin_capsfilter)) {
|
||||
g_object_get (camera->videobin_capsfilter, "caps", &caps, NULL);
|
||||
} else {
|
||||
GST_WARNING ("Missing imagebin capsfilter");
|
||||
}
|
||||
gst_value_set_caps (value, caps);
|
||||
gst_caps_unref (caps);
|
||||
}
|
||||
break;
|
||||
case PROP_VIEWFINDER_CAPS:{
|
||||
GstCaps *caps = NULL;
|
||||
if (G_LIKELY (camera->viewfinderbin_capsfilter)) {
|
||||
g_object_get (camera->viewfinderbin_capsfilter, "caps", &caps, NULL);
|
||||
} else {
|
||||
GST_WARNING ("Missing imagebin capsfilter");
|
||||
}
|
||||
gst_value_set_caps (value, caps);
|
||||
gst_caps_unref (caps);
|
||||
}
|
||||
|
|
|
@ -36,7 +36,9 @@ typedef enum
|
|||
/* matches GstEncFlags GST_ENC_FLAG_NO_AUDIO_CONVERSION in encodebin */
|
||||
GST_CAM_FLAG_NO_AUDIO_CONVERSION = (1 << 0),
|
||||
/* matches GstEncFlags GST_ENC_FLAG_NO_VIDEO_CONVERSION in encodebin */
|
||||
GST_CAM_FLAG_NO_VIDEO_CONVERSION = (1 << 1)
|
||||
GST_CAM_FLAG_NO_VIDEO_CONVERSION = (1 << 1),
|
||||
/* maps to 'disable-converters' property in viewfinderbin */
|
||||
GST_CAM_FLAG_NO_VIEWFINDER_CONVERSION = (1 << 2)
|
||||
} GstCamFlags;
|
||||
|
||||
|
||||
|
@ -92,6 +94,8 @@ struct _GstCameraBin2
|
|||
gboolean video_profile_switch;
|
||||
gboolean image_profile_switch;
|
||||
|
||||
gboolean audio_drop_eos;
|
||||
|
||||
/* properties */
|
||||
gint mode;
|
||||
gchar *location;
|
||||
|
|
|
@ -43,15 +43,15 @@
|
|||
GST_DEBUG_CATEGORY_STATIC (gst_viewfinder_bin_debug);
|
||||
#define GST_CAT_DEFAULT gst_viewfinder_bin_debug
|
||||
|
||||
/* prototypes */
|
||||
|
||||
|
||||
enum
|
||||
{
|
||||
PROP_0,
|
||||
PROP_VIDEO_SINK,
|
||||
PROP_DISABLE_CONVERTERS
|
||||
};
|
||||
|
||||
#define DEFAULT_DISABLE_CONVERTERS FALSE
|
||||
|
||||
/* pad templates */
|
||||
|
||||
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
|
||||
|
@ -128,6 +128,12 @@ gst_viewfinder_bin_class_init (GstViewfinderBinClass * klass)
|
|||
g_param_spec_object ("video-sink", "Video Sink",
|
||||
"the video output element to use (NULL = default)",
|
||||
GST_TYPE_ELEMENT, G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
||||
|
||||
g_object_class_install_property (gobject_klass, PROP_DISABLE_CONVERTERS,
|
||||
g_param_spec_boolean ("disable-converters", "Disable conversion elements",
|
||||
"If video converters should be disabled (must be set on NULL)",
|
||||
DEFAULT_DISABLE_CONVERTERS,
|
||||
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -140,6 +146,8 @@ gst_viewfinder_bin_init (GstViewfinderBin * viewfinderbin,
|
|||
gst_object_unref (templ);
|
||||
gst_element_add_pad (GST_ELEMENT_CAST (viewfinderbin),
|
||||
viewfinderbin->ghostpad);
|
||||
|
||||
viewfinderbin->disable_converters = DEFAULT_DISABLE_CONVERTERS;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
|
@ -147,42 +155,15 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
|
|||
{
|
||||
GstElement *csp = NULL;
|
||||
GstElement *videoscale = NULL;
|
||||
GstPad *pad = NULL;
|
||||
GstPad *firstpad = NULL;
|
||||
const gchar *missing_element_name;
|
||||
gboolean newsink = FALSE;
|
||||
gboolean updated_converters = FALSE;
|
||||
|
||||
GST_DEBUG_OBJECT (vfbin, "Creating internal elements");
|
||||
|
||||
if (!vfbin->elements_created) {
|
||||
/* create elements */
|
||||
csp =
|
||||
gst_camerabin_create_and_add_element (GST_BIN (vfbin),
|
||||
"ffmpegcolorspace", "vfbin-csp");
|
||||
if (!csp) {
|
||||
missing_element_name = "ffmpegcolorspace";
|
||||
goto missing_element;
|
||||
}
|
||||
|
||||
videoscale =
|
||||
gst_camerabin_create_and_add_element (GST_BIN (vfbin), "videoscale",
|
||||
"vfbin-videoscale");
|
||||
if (!videoscale) {
|
||||
missing_element_name = "videoscale";
|
||||
goto missing_element;
|
||||
}
|
||||
|
||||
/* add ghostpad */
|
||||
pad = gst_element_get_static_pad (csp, "sink");
|
||||
if (!gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), pad))
|
||||
goto error;
|
||||
gst_object_unref (pad);
|
||||
pad = NULL;
|
||||
|
||||
vfbin->elements_created = TRUE;
|
||||
GST_DEBUG_OBJECT (vfbin, "Elements succesfully created and linked");
|
||||
}
|
||||
|
||||
/* First check if we need to add/replace the internal sink */
|
||||
if (vfbin->video_sink) {
|
||||
/* check if we need to replace the current one */
|
||||
if (vfbin->user_video_sink && vfbin->video_sink != vfbin->user_video_sink) {
|
||||
gst_bin_remove (GST_BIN_CAST (vfbin), vfbin->video_sink);
|
||||
gst_object_unref (vfbin->video_sink);
|
||||
|
@ -203,20 +184,88 @@ gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
|
|||
}
|
||||
|
||||
gst_bin_add (GST_BIN_CAST (vfbin), gst_object_ref (vfbin->video_sink));
|
||||
newsink = TRUE;
|
||||
}
|
||||
|
||||
if (!videoscale)
|
||||
videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin),
|
||||
/* check if we want add/remove the conversion elements */
|
||||
if (vfbin->elements_created && vfbin->disable_converters) {
|
||||
/* remove the elements, user doesn't want them */
|
||||
|
||||
gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
|
||||
csp = gst_bin_get_by_name (GST_BIN (vfbin), "vfbin-csp");
|
||||
videoscale = gst_bin_get_by_name (GST_BIN (vfbin), "vfbin-videoscale");
|
||||
|
||||
gst_bin_remove (GST_BIN (vfbin), csp);
|
||||
gst_bin_remove (GST_BIN (vfbin), videoscale);
|
||||
|
||||
gst_object_unref (csp);
|
||||
gst_object_unref (videoscale);
|
||||
|
||||
updated_converters = TRUE;
|
||||
} else if (!vfbin->elements_created && !vfbin->disable_converters) {
|
||||
gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
|
||||
|
||||
/* add the elements, user wants them */
|
||||
csp =
|
||||
gst_camerabin_create_and_add_element (GST_BIN (vfbin),
|
||||
"ffmpegcolorspace", "vfbin-csp");
|
||||
if (!csp) {
|
||||
missing_element_name = "ffmpegcolorspace";
|
||||
goto missing_element;
|
||||
}
|
||||
|
||||
videoscale =
|
||||
gst_camerabin_create_and_add_element (GST_BIN (vfbin), "videoscale",
|
||||
"vfbin-videoscale");
|
||||
if (!videoscale) {
|
||||
missing_element_name = "videoscale";
|
||||
goto missing_element;
|
||||
}
|
||||
|
||||
vfbin->elements_created = TRUE;
|
||||
GST_DEBUG_OBJECT (vfbin, "Elements succesfully created and linked");
|
||||
|
||||
updated_converters = TRUE;
|
||||
}
|
||||
/* otherwise, just leave it as is */
|
||||
|
||||
/* if sink was replaced -> link it to the internal converters */
|
||||
if (newsink && !vfbin->disable_converters) {
|
||||
gboolean unref = FALSE;
|
||||
if (!videoscale) {
|
||||
videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin),
|
||||
"vfbin-videscale");
|
||||
unref = TRUE;
|
||||
}
|
||||
|
||||
if (!gst_element_link_pads (videoscale, "src", vfbin->video_sink, "sink")) {
|
||||
GST_ELEMENT_ERROR (vfbin, CORE, NEGOTIATION, (NULL),
|
||||
("linking videoscale and viewfindersink failed"));
|
||||
}
|
||||
|
||||
/* prevent it from being removed from the bin at this point */
|
||||
if (unref)
|
||||
gst_object_unref (videoscale);
|
||||
videoscale = NULL;
|
||||
}
|
||||
|
||||
/* Check if we need a new ghostpad target */
|
||||
if (updated_converters || (newsink && vfbin->disable_converters)) {
|
||||
if (vfbin->disable_converters) {
|
||||
firstpad = gst_element_get_static_pad (vfbin->video_sink, "sink");
|
||||
} else {
|
||||
/* csp should always exist at this point */
|
||||
firstpad = gst_element_get_static_pad (csp, "sink");
|
||||
}
|
||||
}
|
||||
|
||||
/* need to change the ghostpad target if firstpad is set */
|
||||
if (firstpad) {
|
||||
if (!gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), firstpad))
|
||||
goto error;
|
||||
gst_object_unref (firstpad);
|
||||
firstpad = NULL;
|
||||
}
|
||||
|
||||
return TRUE;
|
||||
|
||||
missing_element:
|
||||
|
@ -230,8 +279,8 @@ missing_element:
|
|||
|
||||
error:
|
||||
GST_WARNING_OBJECT (vfbin, "Creating internal elements failed");
|
||||
if (pad)
|
||||
gst_object_unref (pad);
|
||||
if (firstpad)
|
||||
gst_object_unref (firstpad);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
|
@ -288,6 +337,9 @@ gst_viewfinder_bin_set_property (GObject * object, guint prop_id,
|
|||
case PROP_VIDEO_SINK:
|
||||
gst_viewfinder_bin_set_video_sink (vfbin, g_value_get_object (value));
|
||||
break;
|
||||
case PROP_DISABLE_CONVERTERS:
|
||||
vfbin->disable_converters = g_value_get_boolean (value);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
|
@ -304,6 +356,9 @@ gst_viewfinder_bin_get_property (GObject * object, guint prop_id,
|
|||
case PROP_VIDEO_SINK:
|
||||
g_value_set_object (value, vfbin->video_sink);
|
||||
break;
|
||||
case PROP_DISABLE_CONVERTERS:
|
||||
g_value_set_boolean (value, vfbin->disable_converters);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
|
|
|
@ -43,6 +43,8 @@ struct _GstViewfinderBin
|
|||
GstElement *user_video_sink;
|
||||
|
||||
gboolean elements_created;
|
||||
|
||||
gboolean disable_converters;
|
||||
};
|
||||
|
||||
struct _GstViewfinderBinClass
|
||||
|
|
|
@ -187,10 +187,10 @@ gst_dvd_spu_exec_cmd_blk (GstDVDSpu * dvdspu, guint8 * data, guint8 * end)
|
|||
if (G_UNLIKELY (data + 7 >= end))
|
||||
return; /* Invalid SET_DAREA cmd at the end of the blk */
|
||||
|
||||
r->top = ((data[4] & 0x3f) << 4) | ((data[5] & 0xe0) >> 4);
|
||||
r->left = ((data[1] & 0x3f) << 4) | ((data[2] & 0xf0) >> 4);
|
||||
r->right = ((data[2] & 0x03) << 8) | data[3];
|
||||
r->bottom = ((data[5] & 0x03) << 8) | data[6];
|
||||
r->top = ((data[4] & 0xff) << 4) | ((data[5] & 0xf0) >> 4);
|
||||
r->left = ((data[1] & 0xff) << 4) | ((data[2] & 0xf0) >> 4);
|
||||
r->right = ((data[2] & 0x0f) << 8) | data[3];
|
||||
r->bottom = ((data[5] & 0x0f) << 8) | data[6];
|
||||
|
||||
GST_DEBUG_OBJECT (dvdspu,
|
||||
" Set Display Area top %u left %u bottom %u right %u", r->top,
|
||||
|
|
|
@ -240,10 +240,9 @@ gst_hls_demux_init (GstHLSDemux * demux, GstHLSDemuxClass * klass)
|
|||
demux->fetcher_lock = g_mutex_new ();
|
||||
demux->queue = g_queue_new ();
|
||||
g_static_rec_mutex_init (&demux->task_lock);
|
||||
/* FIXME: This really should be a pad task instead */
|
||||
demux->task = gst_task_create ((GstTaskFunction) gst_hls_demux_loop, demux);
|
||||
gst_task_set_lock (demux->task, &demux->task_lock);
|
||||
|
||||
demux->position = 0;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -314,11 +313,95 @@ gst_hls_demux_change_state (GstElement * element, GstStateChange transition)
|
|||
static gboolean
|
||||
gst_hls_demux_src_event (GstPad * pad, GstEvent * event)
|
||||
{
|
||||
GstHLSDemux *demux;
|
||||
|
||||
demux = GST_HLS_DEMUX (gst_pad_get_element_private (pad));
|
||||
|
||||
switch (event->type) {
|
||||
/* FIXME: ignore seek event for the moment */
|
||||
case GST_EVENT_SEEK:
|
||||
gst_event_unref (event);
|
||||
{
|
||||
gdouble rate;
|
||||
GstFormat format;
|
||||
GstSeekFlags flags;
|
||||
GstSeekType start_type, stop_type;
|
||||
gint64 start, stop;
|
||||
GList *walk;
|
||||
gint current_pos;
|
||||
gint current_sequence;
|
||||
gint target_second;
|
||||
GstM3U8MediaFile *file;
|
||||
|
||||
GST_INFO_OBJECT (demux, "Received GST_EVENT_SEEK");
|
||||
|
||||
if (gst_m3u8_client_is_live (demux->client)) {
|
||||
GST_WARNING_OBJECT (demux, "Received seek event for live stream");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
|
||||
&stop_type, &stop);
|
||||
|
||||
if (format != GST_FORMAT_TIME)
|
||||
return FALSE;
|
||||
|
||||
GST_DEBUG_OBJECT (demux, "seek event, rate: %f start: %" GST_TIME_FORMAT
|
||||
" stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start),
|
||||
GST_TIME_ARGS (stop));
|
||||
|
||||
file = GST_M3U8_MEDIA_FILE (demux->client->current->files->data);
|
||||
current_sequence = file->sequence;
|
||||
current_pos = 0;
|
||||
target_second = start / GST_SECOND;
|
||||
GST_DEBUG_OBJECT (demux, "Target seek to %d", target_second);
|
||||
for (walk = demux->client->current->files; walk; walk = walk->next) {
|
||||
file = walk->data;
|
||||
|
||||
current_sequence = file->sequence;
|
||||
if (current_pos <= target_second
|
||||
&& target_second < current_pos + file->duration) {
|
||||
break;
|
||||
}
|
||||
current_pos += file->duration;
|
||||
}
|
||||
|
||||
if (walk == NULL) {
|
||||
GST_WARNING_OBJECT (demux, "Could not find seeked fragment");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
if (flags & GST_SEEK_FLAG_FLUSH) {
|
||||
GST_DEBUG_OBJECT (demux, "sending flush start");
|
||||
gst_pad_push_event (demux->srcpad, gst_event_new_flush_start ());
|
||||
}
|
||||
|
||||
gst_hls_demux_stop_fetcher (demux, TRUE);
|
||||
gst_task_pause (demux->task);
|
||||
g_cond_signal (demux->thread_cond);
|
||||
|
||||
/* wait for streaming to finish */
|
||||
g_static_rec_mutex_lock (&demux->task_lock);
|
||||
|
||||
demux->need_cache = TRUE;
|
||||
while (!g_queue_is_empty (demux->queue)) {
|
||||
GstBuffer *buf = g_queue_pop_head (demux->queue);
|
||||
gst_buffer_unref (buf);
|
||||
}
|
||||
|
||||
GST_DEBUG_OBJECT (demux, "seeking to sequence %d", current_sequence);
|
||||
demux->client->sequence = current_sequence;
|
||||
demux->position = start;
|
||||
demux->need_segment = TRUE;
|
||||
|
||||
if (flags & GST_SEEK_FLAG_FLUSH) {
|
||||
GST_DEBUG_OBJECT (demux, "sending flush stop");
|
||||
gst_pad_push_event (demux->srcpad, gst_event_new_flush_stop ());
|
||||
}
|
||||
|
||||
gst_task_start (demux->task);
|
||||
g_static_rec_mutex_unlock (&demux->task_lock);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
@ -405,7 +488,7 @@ gst_hls_demux_src_query (GstPad * pad, GstQuery * query)
|
|||
|
||||
switch (query->type) {
|
||||
case GST_QUERY_DURATION:{
|
||||
GstClockTime duration;
|
||||
GstClockTime duration = -1;
|
||||
GstFormat fmt;
|
||||
|
||||
gst_query_parse_duration (query, &fmt, NULL);
|
||||
|
@ -416,6 +499,8 @@ gst_hls_demux_src_query (GstPad * pad, GstQuery * query)
|
|||
ret = TRUE;
|
||||
}
|
||||
}
|
||||
GST_INFO_OBJECT (hlsdemux, "GST_QUERY_DURATION returns %s with duration %"
|
||||
GST_TIME_FORMAT, ret ? "TRUE" : "FALSE", GST_TIME_ARGS (duration));
|
||||
break;
|
||||
}
|
||||
case GST_QUERY_URI:
|
||||
|
@ -428,18 +513,24 @@ gst_hls_demux_src_query (GstPad * pad, GstQuery * query)
|
|||
break;
|
||||
case GST_QUERY_SEEKING:{
|
||||
GstFormat fmt;
|
||||
gint stop = -1;
|
||||
gint64 stop = -1;
|
||||
|
||||
gst_query_parse_seeking (query, &fmt, NULL, NULL, NULL);
|
||||
GST_INFO_OBJECT (hlsdemux, "Received GST_QUERY_SEEKING with format %d",
|
||||
fmt);
|
||||
if (fmt == GST_FORMAT_TIME) {
|
||||
GstClockTime duration;
|
||||
|
||||
duration = gst_m3u8_client_get_duration (hlsdemux->client);
|
||||
if (GST_CLOCK_TIME_IS_VALID (duration) && duration > 0)
|
||||
stop = duration;
|
||||
}
|
||||
gst_query_set_seeking (query, fmt, FALSE, 0, stop);
|
||||
|
||||
gst_query_set_seeking (query, fmt,
|
||||
!gst_m3u8_client_is_live (hlsdemux->client), 0, stop);
|
||||
ret = TRUE;
|
||||
GST_INFO_OBJECT (hlsdemux, "GST_QUERY_SEEKING returning with stop : %"
|
||||
GST_TIME_FORMAT, GST_TIME_ARGS (stop));
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
|
@ -570,8 +661,8 @@ switch_pads (GstHLSDemux * demux, GstCaps * newcaps)
|
|||
GST_DEBUG_FUNCPTR (gst_hls_demux_src_query));
|
||||
gst_pad_set_element_private (demux->srcpad, demux);
|
||||
gst_pad_set_active (demux->srcpad, TRUE);
|
||||
gst_element_add_pad (GST_ELEMENT (demux), demux->srcpad);
|
||||
gst_pad_set_caps (demux->srcpad, newcaps);
|
||||
gst_element_add_pad (GST_ELEMENT (demux), demux->srcpad);
|
||||
|
||||
gst_element_no_more_pads (GST_ELEMENT (demux));
|
||||
|
||||
|
@ -608,23 +699,26 @@ gst_hls_demux_loop (GstHLSDemux * demux)
|
|||
if (demux->end_of_playlist)
|
||||
goto end_of_playlist;
|
||||
|
||||
GST_TASK_WAIT (demux->task);
|
||||
/* If the queue is still empty check again if it's the end of the
|
||||
* playlist in case we reached it after beeing woken up */
|
||||
if (g_queue_is_empty (demux->queue) && demux->end_of_playlist)
|
||||
goto end_of_playlist;
|
||||
goto empty_queue;
|
||||
}
|
||||
|
||||
buf = g_queue_pop_head (demux->queue);
|
||||
|
||||
/* Figure out if we need to create/switch pads */
|
||||
if (G_UNLIKELY (!demux->srcpad
|
||||
|| GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad))) {
|
||||
|| GST_BUFFER_CAPS (buf) != GST_PAD_CAPS (demux->srcpad)
|
||||
|| demux->need_segment)) {
|
||||
switch_pads (demux, GST_BUFFER_CAPS (buf));
|
||||
demux->need_segment = TRUE;
|
||||
}
|
||||
if (demux->need_segment) {
|
||||
/* And send a newsegment */
|
||||
GST_DEBUG_OBJECT (demux, "Sending new-segment. Segment start:%"
|
||||
GST_TIME_FORMAT, GST_TIME_ARGS (demux->position));
|
||||
gst_pad_push_event (demux->srcpad,
|
||||
gst_event_new_new_segment (0, 1.0, GST_FORMAT_TIME, demux->position,
|
||||
gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, demux->position,
|
||||
GST_CLOCK_TIME_NONE, demux->position));
|
||||
demux->need_segment = FALSE;
|
||||
}
|
||||
|
||||
if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
|
||||
|
@ -655,9 +749,16 @@ cache_error:
|
|||
error:
|
||||
{
|
||||
/* FIXME: handle error */
|
||||
GST_DEBUG_OBJECT (demux, "error, stopping task");
|
||||
gst_hls_demux_stop (demux);
|
||||
return;
|
||||
}
|
||||
|
||||
empty_queue:
|
||||
{
|
||||
gst_task_pause (demux->task);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
static GstBusSyncReply
|
||||
|
@ -735,6 +836,9 @@ gst_hls_demux_reset (GstHLSDemux * demux, gboolean dispose)
|
|||
gst_buffer_unref (buf);
|
||||
}
|
||||
g_queue_clear (demux->queue);
|
||||
|
||||
demux->position = 0;
|
||||
demux->need_segment = TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
|
@ -834,7 +938,7 @@ gst_hls_demux_cache_fragments (GstHLSDemux * demux)
|
|||
/* If this playlist is a variant playlist, select the first one
|
||||
* and update it */
|
||||
if (gst_m3u8_client_has_variant_playlist (demux->client)) {
|
||||
GstM3U8 *child = demux->client->main->lists->data;
|
||||
GstM3U8 *child = demux->client->main->current_variant->data;
|
||||
gst_m3u8_client_set_current (demux->client, child);
|
||||
if (!gst_hls_demux_update_playlist (demux, FALSE)) {
|
||||
GST_ERROR_OBJECT (demux, "Could not fetch the child playlist %s",
|
||||
|
@ -854,7 +958,13 @@ gst_hls_demux_cache_fragments (GstHLSDemux * demux)
|
|||
}
|
||||
|
||||
/* Cache the first fragments */
|
||||
for (i = 0; i < demux->fragments_cache - 1; i++) {
|
||||
for (i = 0; i < demux->fragments_cache; i++) {
|
||||
gst_element_post_message (GST_ELEMENT (demux),
|
||||
gst_message_new_buffering (GST_OBJECT (demux),
|
||||
100 * i / demux->fragments_cache));
|
||||
g_get_current_time (&demux->next_update);
|
||||
g_time_val_add (&demux->next_update,
|
||||
demux->client->current->targetduration * 1000000);
|
||||
if (!gst_hls_demux_get_next_fragment (demux, FALSE)) {
|
||||
if (!demux->cancelled)
|
||||
GST_ERROR_OBJECT (demux, "Error caching the first fragments");
|
||||
|
@ -863,7 +973,10 @@ gst_hls_demux_cache_fragments (GstHLSDemux * demux)
|
|||
/* make sure we stop caching fragments if something cancelled it */
|
||||
if (demux->cancelled)
|
||||
return FALSE;
|
||||
gst_hls_demux_switch_playlist (demux);
|
||||
}
|
||||
gst_element_post_message (GST_ELEMENT (demux),
|
||||
gst_message_new_buffering (GST_OBJECT (demux), 100));
|
||||
|
||||
g_get_current_time (&demux->next_update);
|
||||
|
||||
|
@ -968,17 +1081,17 @@ gst_hls_demux_change_playlist (GstHLSDemux * demux, gboolean is_fast)
|
|||
GstStructure *s;
|
||||
|
||||
if (is_fast)
|
||||
list = g_list_next (demux->client->main->lists);
|
||||
list = g_list_next (demux->client->main->current_variant);
|
||||
else
|
||||
list = g_list_previous (demux->client->main->lists);
|
||||
list = g_list_previous (demux->client->main->current_variant);
|
||||
|
||||
/* Don't do anything else if the playlist is the same */
|
||||
if (!list || list->data == demux->client->current)
|
||||
return TRUE;
|
||||
|
||||
demux->client->main->lists = list;
|
||||
demux->client->main->current_variant = list;
|
||||
|
||||
gst_m3u8_client_set_current (demux->client, demux->client->main->lists->data);
|
||||
gst_m3u8_client_set_current (demux->client, list->data);
|
||||
gst_hls_demux_update_playlist (demux, TRUE);
|
||||
GST_INFO_OBJECT (demux, "Client is %s, switching to bitrate %d",
|
||||
is_fast ? "fast" : "slow", demux->client->current->bandwidth);
|
||||
|
@ -1065,13 +1178,14 @@ gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean retry)
|
|||
guint avail;
|
||||
const gchar *next_fragment_uri;
|
||||
GstClockTime duration;
|
||||
GstClockTime timestamp;
|
||||
gboolean discont;
|
||||
|
||||
if (!gst_m3u8_client_get_next_fragment (demux->client, &discont,
|
||||
&next_fragment_uri, &duration)) {
|
||||
&next_fragment_uri, &duration, ×tamp)) {
|
||||
GST_INFO_OBJECT (demux, "This playlist doesn't contain more fragments");
|
||||
demux->end_of_playlist = TRUE;
|
||||
GST_TASK_SIGNAL (demux->task);
|
||||
gst_task_start (demux->task);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
|
@ -1083,6 +1197,7 @@ gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean retry)
|
|||
avail = gst_adapter_available (demux->download);
|
||||
buf = gst_adapter_take_buffer (demux->download, avail);
|
||||
GST_BUFFER_DURATION (buf) = duration;
|
||||
GST_BUFFER_TIMESTAMP (buf) = timestamp;
|
||||
|
||||
/* We actually need to do this every time we switch bitrate */
|
||||
if (G_UNLIKELY (demux->do_typefind)) {
|
||||
|
@ -1105,7 +1220,7 @@ gst_hls_demux_get_next_fragment (GstHLSDemux * demux, gboolean retry)
|
|||
}
|
||||
|
||||
g_queue_push_tail (demux->queue, buf);
|
||||
GST_TASK_SIGNAL (demux->task);
|
||||
gst_task_start (demux->task);
|
||||
gst_adapter_clear (demux->download);
|
||||
return TRUE;
|
||||
}
|
||||
|
|
|
@ -89,6 +89,7 @@ struct _GstHLSDemux
|
|||
|
||||
/* Position in the stream */
|
||||
GstClockTime position;
|
||||
gboolean need_segment;
|
||||
};
|
||||
|
||||
struct _GstHLSDemuxClass
|
||||
|
|
|
@ -206,6 +206,7 @@ gst_m3u8_update (GstM3U8 * self, gchar * data, gboolean * updated)
|
|||
|
||||
if (!g_str_has_prefix (data, "#EXTM3U")) {
|
||||
GST_WARNING ("Data doesn't start with #EXTM3U");
|
||||
*updated = FALSE;
|
||||
g_free (data);
|
||||
return FALSE;
|
||||
}
|
||||
|
@ -350,11 +351,22 @@ gst_m3u8_update (GstM3U8 * self, gchar * data, gboolean * updated)
|
|||
}
|
||||
|
||||
/* redorder playlists by bitrate */
|
||||
if (self->lists)
|
||||
if (self->lists) {
|
||||
gchar *top_variant_uri = NULL;
|
||||
|
||||
if (!self->current_variant)
|
||||
top_variant_uri = GST_M3U8 (self->lists->data)->uri;
|
||||
else
|
||||
top_variant_uri = GST_M3U8 (self->current_variant->data)->uri;
|
||||
|
||||
self->lists =
|
||||
g_list_sort (self->lists,
|
||||
(GCompareFunc) gst_m3u8_compare_playlist_by_bitrate);
|
||||
|
||||
self->current_variant = g_list_find_custom (self->lists, top_variant_uri,
|
||||
(GCompareFunc) _m3u8_compare_uri);
|
||||
}
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
|
@ -416,7 +428,7 @@ gst_m3u8_client_update (GstM3U8Client * self, gchar * data)
|
|||
/* select the first playlist, for now */
|
||||
if (!self->current) {
|
||||
if (self->main->lists) {
|
||||
self->current = g_list_first (self->main->lists)->data;
|
||||
self->current = self->main->current_variant->data;
|
||||
} else {
|
||||
self->current = self->main;
|
||||
}
|
||||
|
@ -442,9 +454,11 @@ _find_next (GstM3U8MediaFile * file, GstM3U8Client * client)
|
|||
|
||||
gboolean
|
||||
gst_m3u8_client_get_next_fragment (GstM3U8Client * client,
|
||||
gboolean * discontinuity, const gchar ** uri, GstClockTime * duration)
|
||||
gboolean * discontinuity, const gchar ** uri, GstClockTime * duration,
|
||||
GstClockTime * timestamp)
|
||||
{
|
||||
GList *l;
|
||||
GList *walk;
|
||||
GstM3U8MediaFile *file;
|
||||
|
||||
g_return_val_if_fail (client != NULL, FALSE);
|
||||
|
@ -464,6 +478,15 @@ gst_m3u8_client_get_next_fragment (GstM3U8Client * client,
|
|||
|
||||
*uri = file->uri;
|
||||
*duration = file->duration * GST_SECOND;
|
||||
|
||||
*timestamp = 0;
|
||||
for (walk = client->current->files; walk; walk = walk->next) {
|
||||
if (walk == l)
|
||||
break;
|
||||
*timestamp += GST_M3U8_MEDIA_FILE (walk->data)->duration;
|
||||
}
|
||||
*timestamp *= GST_SECOND;
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ G_BEGIN_DECLS typedef struct _GstM3U8 GstM3U8;
|
|||
typedef struct _GstM3U8MediaFile GstM3U8MediaFile;
|
||||
typedef struct _GstM3U8Client GstM3U8Client;
|
||||
|
||||
#define GST_M3U8(m) ((GstM3U8*)m)
|
||||
#define GST_M3U8_MEDIA_FILE(f) ((GstM3U8MediaFile*)f)
|
||||
|
||||
struct _GstM3U8
|
||||
|
@ -50,6 +51,7 @@ struct _GstM3U8
|
|||
/*< private > */
|
||||
gchar *last_data;
|
||||
GList *lists; /* list of GstM3U8 from the main playlist */
|
||||
GList *current_variant; /* Current variant playlist used */
|
||||
GstM3U8 *parent; /* main playlist (if any) */
|
||||
guint mediasequence; /* EXT-X-MEDIA-SEQUENCE & increased with new media file */
|
||||
};
|
||||
|
@ -76,7 +78,8 @@ void gst_m3u8_client_free (GstM3U8Client * client);
|
|||
gboolean gst_m3u8_client_update (GstM3U8Client * client, gchar * data);
|
||||
void gst_m3u8_client_set_current (GstM3U8Client * client, GstM3U8 * m3u8);
|
||||
gboolean gst_m3u8_client_get_next_fragment (GstM3U8Client * client,
|
||||
gboolean * discontinuity, const gchar ** uri, GstClockTime * duration);
|
||||
gboolean * discontinuity, const gchar ** uri, GstClockTime * duration,
|
||||
GstClockTime * timestamp);
|
||||
GstClockTime gst_m3u8_client_get_duration (GstM3U8Client * client);
|
||||
const gchar *gst_m3u8_client_get_uri(GstM3U8Client * client);
|
||||
gboolean gst_m3u8_client_has_variant_playlist(GstM3U8Client * client);
|
||||
|
|
|
@ -215,6 +215,8 @@ mpegts_base_reset (MpegTSBase * base)
|
|||
base->mode = BASE_MODE_STREAMING;
|
||||
base->seen_pat = FALSE;
|
||||
base->first_pat_offset = -1;
|
||||
base->in_gap = 0;
|
||||
base->first_buf_ts = GST_CLOCK_TIME_NONE;
|
||||
|
||||
if (klass->reset)
|
||||
klass->reset (base);
|
||||
|
@ -1180,14 +1182,22 @@ mpegts_base_sink_event (GstPad * pad, GstEvent * event)
|
|||
gst_segment_set_newsegment_full (&base->segment, update, rate,
|
||||
applied_rate, format, start, stop, position);
|
||||
gst_event_unref (event);
|
||||
base->in_gap = GST_CLOCK_TIME_NONE;
|
||||
base->first_buf_ts = GST_CLOCK_TIME_NONE;
|
||||
}
|
||||
break;
|
||||
case GST_EVENT_EOS:
|
||||
res = gst_mpegts_base_handle_eos (base);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_START:
|
||||
gst_segment_init (&base->segment, GST_FORMAT_UNDEFINED);
|
||||
mpegts_packetizer_flush (base->packetizer);
|
||||
res = GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, event);
|
||||
gst_event_unref (event);
|
||||
break;
|
||||
case GST_EVENT_FLUSH_STOP:
|
||||
gst_segment_init (&base->segment, GST_FORMAT_UNDEFINED);
|
||||
base->seen_pat = FALSE;
|
||||
base->first_pat_offset = -1;
|
||||
/* Passthrough */
|
||||
default:
|
||||
res = GST_MPEGTS_BASE_GET_CLASS (base)->push_event (base, event);
|
||||
|
@ -1226,6 +1236,13 @@ mpegts_base_chain (GstPad * pad, GstBuffer * buf)
|
|||
base = GST_MPEGTS_BASE (gst_object_get_parent (GST_OBJECT (pad)));
|
||||
packetizer = base->packetizer;
|
||||
|
||||
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (base->first_buf_ts)) &&
|
||||
GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
|
||||
base->first_buf_ts = GST_BUFFER_TIMESTAMP (buf);
|
||||
GST_DEBUG_OBJECT (base, "first buffer timestamp %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (base->first_buf_ts));
|
||||
}
|
||||
|
||||
mpegts_packetizer_push (base->packetizer, buf);
|
||||
while (((pret =
|
||||
mpegts_packetizer_next_packet (base->packetizer,
|
||||
|
@ -1399,6 +1416,13 @@ mpegts_base_handle_seek_event (MpegTSBase * base, GstPad * pad,
|
|||
if (format != GST_FORMAT_TIME)
|
||||
return FALSE;
|
||||
|
||||
/* First try if upstream supports seeking in TIME format */
|
||||
if (gst_pad_push_event (base->sinkpad, gst_event_ref (event))) {
|
||||
GST_DEBUG ("upstream handled SEEK event");
|
||||
gst_event_unref (event);
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
GST_DEBUG ("seek event, rate: %f start: %" GST_TIME_FORMAT
|
||||
" stop: %" GST_TIME_FORMAT, rate, GST_TIME_ARGS (start),
|
||||
GST_TIME_ARGS (stop));
|
||||
|
|
|
@ -130,6 +130,10 @@ struct _MpegTSBase {
|
|||
/* Offset from the origin to the first PAT (pullmode) */
|
||||
guint64 first_pat_offset;
|
||||
|
||||
/* interpolation gap between the upstream timestamp and the pts */
|
||||
GstClockTime in_gap;
|
||||
GstClockTime first_buf_ts;
|
||||
|
||||
/* Upstream segment */
|
||||
GstSegment segment;
|
||||
};
|
||||
|
|
|
@ -387,14 +387,17 @@ gst_ts_demux_srcpad_query (GstPad * pad, GstQuery * query)
|
|||
gboolean res = TRUE;
|
||||
GstFormat format;
|
||||
GstTSDemux *demux;
|
||||
MpegTSBase *base;
|
||||
|
||||
demux = GST_TS_DEMUX (gst_pad_get_parent (pad));
|
||||
base = GST_MPEGTS_BASE (demux);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_DURATION:
|
||||
GST_DEBUG ("query duration");
|
||||
gst_query_parse_duration (query, &format, NULL);
|
||||
if (format == GST_FORMAT_TIME) {
|
||||
if (!gst_pad_peer_query (base->sinkpad, query))
|
||||
gst_query_set_duration (query, GST_FORMAT_TIME,
|
||||
demux->segment.duration);
|
||||
} else {
|
||||
|
@ -406,6 +409,14 @@ gst_ts_demux_srcpad_query (GstPad * pad, GstQuery * query)
|
|||
GST_DEBUG ("query seeking");
|
||||
gst_query_parse_seeking (query, &format, NULL, NULL, NULL);
|
||||
if (format == GST_FORMAT_TIME) {
|
||||
gboolean seekable = FALSE;
|
||||
|
||||
if (gst_pad_peer_query (base->sinkpad, query))
|
||||
gst_query_parse_seeking (query, NULL, &seekable, NULL, NULL);
|
||||
|
||||
/* If upstream is not seekable in TIME format we use
|
||||
* our own values here */
|
||||
if (!seekable)
|
||||
gst_query_set_seeking (query, GST_FORMAT_TIME,
|
||||
demux->parent.mode != BASE_MODE_PUSHING, 0,
|
||||
demux->segment.duration);
|
||||
|
@ -871,9 +882,10 @@ gst_ts_demux_srcpad_event (GstPad * pad, GstEvent * event)
|
|||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_SEEK:
|
||||
res = mpegts_base_handle_seek_event ((MpegTSBase *) demux, pad, event);
|
||||
if (!res) {
|
||||
if (res)
|
||||
demux->need_newsegment = TRUE;
|
||||
else
|
||||
GST_WARNING ("seeking failed");
|
||||
}
|
||||
gst_event_unref (event);
|
||||
break;
|
||||
default:
|
||||
|
@ -1217,13 +1229,24 @@ gst_ts_demux_stream_added (MpegTSBase * base, MpegTSBaseStream * bstream,
|
|||
static void
|
||||
gst_ts_demux_stream_removed (MpegTSBase * base, MpegTSBaseStream * bstream)
|
||||
{
|
||||
GstTSDemux *demux = GST_TS_DEMUX (base);
|
||||
TSDemuxStream *stream = (TSDemuxStream *) bstream;
|
||||
|
||||
if (stream->pad) {
|
||||
if (gst_pad_is_active (stream->pad)) {
|
||||
GST_DEBUG_OBJECT (stream->pad, "Flushing out pending data");
|
||||
gboolean need_newsegment = demux->need_newsegment;
|
||||
|
||||
/* We must not send the newsegment when flushing the pending data
|
||||
on the removed stream. We should only push it when the newly added
|
||||
stream finishes parsing its PTS */
|
||||
demux->need_newsegment = FALSE;
|
||||
|
||||
/* Flush out all data */
|
||||
GST_DEBUG_OBJECT (stream->pad, "Flushing out pending data");
|
||||
gst_ts_demux_push_pending_data ((GstTSDemux *) base, stream);
|
||||
|
||||
demux->need_newsegment = need_newsegment;
|
||||
|
||||
GST_DEBUG_OBJECT (stream->pad, "Pushing out EOS");
|
||||
gst_pad_push_event (stream->pad, gst_event_new_eos ());
|
||||
GST_DEBUG_OBJECT (stream->pad, "Deactivating and removing pad");
|
||||
|
@ -1570,6 +1593,7 @@ find_timestamps (MpegTSBase * base, guint64 initoff, guint64 * offset)
|
|||
gst_structure_free (base->pat);
|
||||
base->pat = NULL;
|
||||
mpegts_base_remove_program (base, demux->current_program_number);
|
||||
demux->program = NULL;
|
||||
|
||||
/* Find end position */
|
||||
if (G_UNLIKELY (!gst_pad_query_peer_duration (base->sinkpad, &format,
|
||||
|
@ -1628,6 +1652,7 @@ beach:
|
|||
base->pat = NULL;
|
||||
}
|
||||
mpegts_base_remove_program (base, demux->current_program_number);
|
||||
demux->program = NULL;
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
@ -1750,6 +1775,7 @@ beach:
|
|||
pcroffset->pcr = pcrs[nbpcr - 1];
|
||||
pcroffset->offset = pcroffs[nbpcr - 1];
|
||||
}
|
||||
if (nbpcr > 1) {
|
||||
GST_DEBUG ("pcrdiff:%" GST_TIME_FORMAT " offsetdiff %" G_GUINT64_FORMAT,
|
||||
GST_TIME_ARGS (PCRTIME_TO_GSTTIME (pcrs[nbpcr - 1] - pcrs[0])),
|
||||
pcroffs[nbpcr - 1] - pcroffs[0]);
|
||||
|
@ -1759,6 +1785,7 @@ beach:
|
|||
GST_DEBUG ("Average PCR interval %" G_GUINT64_FORMAT,
|
||||
(pcroffs[nbpcr - 1] - pcroffs[0]) / nbpcr);
|
||||
}
|
||||
}
|
||||
/* Swallow any errors if it happened during the end scanning */
|
||||
if (!isinitial)
|
||||
ret = GST_FLOW_OK;
|
||||
|
@ -1897,6 +1924,7 @@ TSPcrOffset_find_offset (gconstpointer a, gconstpointer b, gpointer user_data)
|
|||
static GstFlowReturn
|
||||
gst_ts_demux_parse_pes_header (GstTSDemux * demux, TSDemuxStream * stream)
|
||||
{
|
||||
MpegTSBase *base = (MpegTSBase *) demux;
|
||||
PESHeader header;
|
||||
GstFlowReturn res = GST_FLOW_OK;
|
||||
gint offset = 0;
|
||||
|
@ -1945,7 +1973,38 @@ gst_ts_demux_parse_pes_header (GstTSDemux * demux, TSDemuxStream * stream)
|
|||
#endif
|
||||
|
||||
stream->pts = time = MPEGTIME_TO_GSTTIME (header.PTS);
|
||||
GST_BUFFER_TIMESTAMP (stream->pendingbuffers[0]) = time;
|
||||
GST_DEBUG_OBJECT (base, "stream PTS %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (stream->pts));
|
||||
|
||||
/* safe default if insufficient upstream info */
|
||||
if (G_UNLIKELY (!GST_CLOCK_TIME_IS_VALID (base->in_gap) &&
|
||||
GST_CLOCK_TIME_IS_VALID (base->first_buf_ts) &&
|
||||
base->mode == BASE_MODE_PUSHING &&
|
||||
base->segment.format == GST_FORMAT_TIME)) {
|
||||
/* Find the earliest current PTS we're going to push */
|
||||
GstClockTime firstpts = GST_CLOCK_TIME_NONE;
|
||||
GList *tmp;
|
||||
|
||||
for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
|
||||
TSDemuxStream *pstream = (TSDemuxStream *) tmp->data;
|
||||
if (!GST_CLOCK_TIME_IS_VALID (firstpts) || pstream->pts < firstpts)
|
||||
firstpts = pstream->pts;
|
||||
}
|
||||
|
||||
base->in_gap = base->first_buf_ts - firstpts;
|
||||
GST_DEBUG_OBJECT (base, "upstream segment start %" GST_TIME_FORMAT
|
||||
", first buffer timestamp: %" GST_TIME_FORMAT
|
||||
", first PTS: %" GST_TIME_FORMAT
|
||||
", interpolation gap: %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (base->segment.start),
|
||||
GST_TIME_ARGS (base->first_buf_ts), GST_TIME_ARGS (firstpts),
|
||||
GST_TIME_ARGS (base->in_gap));
|
||||
}
|
||||
|
||||
if (!GST_CLOCK_TIME_IS_VALID (base->in_gap))
|
||||
base->in_gap = 0;
|
||||
|
||||
GST_BUFFER_TIMESTAMP (stream->pendingbuffers[0]) = time + base->in_gap;
|
||||
}
|
||||
|
||||
if (header.DTS != -1)
|
||||
|
@ -2038,12 +2097,12 @@ static void
|
|||
calculate_and_push_newsegment (GstTSDemux * demux, TSDemuxStream * stream)
|
||||
{
|
||||
MpegTSBase *base = (MpegTSBase *) demux;
|
||||
GstClockTime firstpts = GST_CLOCK_TIME_NONE;
|
||||
GstEvent *newsegmentevent;
|
||||
GList *tmp;
|
||||
gint64 start, stop, position;
|
||||
GstClockTime firstpts = GST_CLOCK_TIME_NONE;
|
||||
GList *tmp;
|
||||
|
||||
GST_DEBUG ("Creating new newsegment");
|
||||
GST_DEBUG ("Creating new newsegment for stream %p", stream);
|
||||
|
||||
/* Outgoing newsegment values
|
||||
* start : The first/start PTS
|
||||
|
@ -2055,9 +2114,9 @@ calculate_and_push_newsegment (GstTSDemux * demux, TSDemuxStream * stream)
|
|||
* PTS to that remote clock).
|
||||
*/
|
||||
|
||||
/* Find the earliest current PTS we're going to push */
|
||||
for (tmp = demux->program->stream_list; tmp; tmp = tmp->next) {
|
||||
TSDemuxStream *pstream = (TSDemuxStream *) tmp->data;
|
||||
|
||||
if (!GST_CLOCK_TIME_IS_VALID (firstpts) || pstream->pts < firstpts)
|
||||
firstpts = pstream->pts;
|
||||
}
|
||||
|
@ -2077,13 +2136,14 @@ calculate_and_push_newsegment (GstTSDemux * demux, TSDemuxStream * stream)
|
|||
GST_TIME_ARGS (demux->segment.duration),
|
||||
GST_TIME_ARGS (demux->segment.time));
|
||||
|
||||
if (demux->segment.time == 0 && base->segment.format == GST_FORMAT_TIME)
|
||||
demux->segment.time = base->segment.time;
|
||||
GST_DEBUG ("stream pts: %" GST_TIME_FORMAT " first pts: %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (stream->pts), GST_TIME_ARGS (firstpts));
|
||||
|
||||
start = firstpts;
|
||||
stop = GST_CLOCK_TIME_NONE;
|
||||
position = demux->segment.time ? firstpts - demux->segment.time : 0;
|
||||
demux->segment.time = start;
|
||||
start = base->segment.start;
|
||||
stop = base->segment.stop;
|
||||
/* Shift the start depending on our position in the stream */
|
||||
start += firstpts + base->in_gap - base->first_buf_ts;
|
||||
position = start;
|
||||
} else {
|
||||
/* pull mode */
|
||||
GST_DEBUG ("pull-based. Segment start:%" GST_TIME_FORMAT " duration:%"
|
||||
|
@ -2107,7 +2167,7 @@ calculate_and_push_newsegment (GstTSDemux * demux, TSDemuxStream * stream)
|
|||
GST_TIME_FORMAT " time: %" GST_TIME_FORMAT, GST_TIME_ARGS (start),
|
||||
GST_TIME_ARGS (stop), GST_TIME_ARGS (position));
|
||||
newsegmentevent =
|
||||
gst_event_new_new_segment (0, 1.0, GST_FORMAT_TIME, start, stop,
|
||||
gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, start, stop,
|
||||
position);
|
||||
|
||||
push_event ((MpegTSBase *) demux, newsegmentevent);
|
||||
|
|
|
@ -687,7 +687,6 @@ mpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux)
|
|||
}
|
||||
|
||||
if (G_UNLIKELY (prog->pcr_stream == NULL)) {
|
||||
if (best) {
|
||||
/* Take the first data stream for the PCR */
|
||||
GST_DEBUG_OBJECT (COLLECT_DATA_PAD (best),
|
||||
"Use stream (pid=%d) from pad as PCR for program (prog_id = %d)",
|
||||
|
@ -696,7 +695,6 @@ mpegtsmux_collected (GstCollectPads * pads, MpegTsMux * mux)
|
|||
/* Set the chosen PCR stream */
|
||||
tsmux_program_set_pcr_stream (prog, best->stream);
|
||||
}
|
||||
}
|
||||
|
||||
g_return_val_if_fail (buf != NULL, GST_FLOW_ERROR);
|
||||
if (best->stream->is_video_stream)
|
||||
|
|
12
gst/removesilence/Makefile.am
Normal file
12
gst/removesilence/Makefile.am
Normal file
|
@ -0,0 +1,12 @@
|
|||
|
||||
plugin_LTLIBRARIES = libgstremovesilence.la
|
||||
|
||||
libgstremovesilence_la_SOURCES = gstremovesilence.c vad_private.c
|
||||
libgstremovesilence_la_CFLAGS = $(GST_PLUGINS_BASE_CFLAGS) $(GST_CFLAGS)
|
||||
libgstremovesilence_la_LIBADD = $(GST_PLUGINS_BASE_LIBS) $(GST_BASE_LIBS) $(GST_LIBS)
|
||||
libgstremovesilence_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
|
||||
libgstremovesilence_la_LIBTOOLFLAGS = --tag=disable-static
|
||||
|
||||
noinst_HEADERS = \
|
||||
gstremovesilence.h \
|
||||
vad_private.h
|
262
gst/removesilence/gstremovesilence.c
Normal file
262
gst/removesilence/gstremovesilence.c
Normal file
|
@ -0,0 +1,262 @@
|
|||
/* GStreamer
|
||||
* Copyright (C) 2011 Tiago Katcipis <tiagokatcipis@gmail.com>
|
||||
* Copyright (C) 2011 Paulo Pizarro <paulo.pizarro@gmail.com>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Library General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Library General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Library General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||
* Boston, MA 02111-1307, USA.
|
||||
*/
|
||||
|
||||
/**
|
||||
* SECTION:element-removesilence
|
||||
*
|
||||
* Removes all silence periods from an audio stream, dropping silence buffers.
|
||||
*
|
||||
* <refsect2>
|
||||
* <title>Example launch line</title>
|
||||
* |[
|
||||
* gst-launch -v -m filesrc location="audiofile" ! decodebin2 ! removesilence remove=true ! wavenc ! filesink location=without_audio.wav
|
||||
* ]|
|
||||
* </refsect2>
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
#endif
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/base/gstbasetransform.h>
|
||||
#include <gst/controller/gstcontroller.h>
|
||||
|
||||
#include "gstremovesilence.h"
|
||||
|
||||
|
||||
GST_DEBUG_CATEGORY_STATIC (gst_remove_silence_debug);
|
||||
#define GST_CAT_DEFAULT gst_remove_silence_debug
|
||||
#define DEFAULT_VAD_HYSTERESIS 480 /* 60 mseg */
|
||||
|
||||
/* Filter signals and args */
|
||||
enum
|
||||
{
|
||||
/* FILL ME */
|
||||
LAST_SIGNAL
|
||||
};
|
||||
|
||||
enum
|
||||
{
|
||||
PROP_0,
|
||||
PROP_REMOVE,
|
||||
PROP_HYSTERESIS
|
||||
};
|
||||
|
||||
|
||||
static GstStaticPadTemplate sink_template = GST_STATIC_PAD_TEMPLATE ("sink",
|
||||
GST_PAD_SINK,
|
||||
GST_PAD_ALWAYS,
|
||||
GST_STATIC_CAPS
|
||||
("audio/x-raw-int, "
|
||||
"rate=[1, MAX], channels=1, endianness=BYTE_ORDER, "
|
||||
"width=16, depth=16, signed=true"));
|
||||
|
||||
static GstStaticPadTemplate src_template = GST_STATIC_PAD_TEMPLATE ("src",
|
||||
GST_PAD_SRC,
|
||||
GST_PAD_ALWAYS,
|
||||
GST_STATIC_CAPS
|
||||
("audio/x-raw-int, "
|
||||
"rate=[1, MAX], channels=1, endianness=BYTE_ORDER, "
|
||||
"width=16, depth=16, signed=true"));
|
||||
|
||||
|
||||
#define DEBUG_INIT(bla) \
|
||||
GST_DEBUG_CATEGORY_INIT (gst_remove_silence_debug, "removesilence", 0, "removesilence element")
|
||||
|
||||
GST_BOILERPLATE_FULL (GstRemoveSilence, gst_remove_silence, GstBaseTransform,
|
||||
GST_TYPE_BASE_TRANSFORM, DEBUG_INIT);
|
||||
|
||||
|
||||
static void gst_remove_silence_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec);
|
||||
static void gst_remove_silence_get_property (GObject * object, guint prop_id,
|
||||
GValue * value, GParamSpec * pspec);
|
||||
|
||||
static GstFlowReturn gst_remove_silence_transform_ip (GstBaseTransform * base,
|
||||
GstBuffer * buf);
|
||||
static void gst_remove_silence_finalize (GObject * obj);
|
||||
static void gst_remove_silence_reset (GstRemoveSilence * filter);
|
||||
|
||||
/* GObject vmethod implementations */
|
||||
|
||||
static void
|
||||
gst_remove_silence_base_init (gpointer gclass)
|
||||
{
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (gclass);
|
||||
|
||||
gst_element_class_set_details_simple (element_class,
|
||||
"RemoveSilence",
|
||||
"Filter/Effect/Audio",
|
||||
"Removes all the silence periods from the audio stream.",
|
||||
"Tiago Katcipis <tiagokatcipis@gmail.com>\n \
|
||||
Paulo Pizarro <paulo.pizarro@gmail.com>");
|
||||
|
||||
gst_element_class_add_pad_template (element_class,
|
||||
gst_static_pad_template_get (&src_template));
|
||||
gst_element_class_add_pad_template (element_class,
|
||||
gst_static_pad_template_get (&sink_template));
|
||||
}
|
||||
|
||||
/* initialize the removesilence's class */
|
||||
static void
|
||||
gst_remove_silence_class_init (GstRemoveSilenceClass * klass)
|
||||
{
|
||||
GObjectClass *gobject_class;
|
||||
|
||||
gobject_class = (GObjectClass *) klass;
|
||||
gobject_class->finalize = gst_remove_silence_finalize;
|
||||
gobject_class->set_property = gst_remove_silence_set_property;
|
||||
gobject_class->get_property = gst_remove_silence_get_property;
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_REMOVE,
|
||||
g_param_spec_boolean ("remove", "Remove",
|
||||
"Set to true to remove silence from the stream, false otherwhise",
|
||||
FALSE, G_PARAM_READWRITE));
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_HYSTERESIS,
|
||||
g_param_spec_uint64 ("hysteresis",
|
||||
"Hysteresis",
|
||||
"Set the hysteresis (on samples) used on the internal VAD",
|
||||
1, G_MAXUINT64, DEFAULT_VAD_HYSTERESIS, G_PARAM_READWRITE));
|
||||
|
||||
|
||||
GST_BASE_TRANSFORM_CLASS (klass)->transform_ip =
|
||||
GST_DEBUG_FUNCPTR (gst_remove_silence_transform_ip);
|
||||
}
|
||||
|
||||
/* initialize the new element
|
||||
* instantiate pads and add them to element
|
||||
* set pad calback functions
|
||||
* initialize instance structure
|
||||
*/
|
||||
static void
|
||||
gst_remove_silence_init (GstRemoveSilence * filter,
|
||||
GstRemoveSilenceClass * gclass)
|
||||
{
|
||||
filter->vad = vad_new (DEFAULT_VAD_HYSTERESIS);
|
||||
filter->remove = FALSE;
|
||||
|
||||
if (!filter->vad) {
|
||||
GST_DEBUG ("Error initializing VAD !!");
|
||||
return;
|
||||
}
|
||||
|
||||
gst_remove_silence_reset (filter);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_remove_silence_reset (GstRemoveSilence * filter)
|
||||
{
|
||||
GST_DEBUG ("Reseting VAD");
|
||||
if (filter->vad) {
|
||||
vad_reset (filter->vad);
|
||||
}
|
||||
GST_DEBUG ("VAD Reseted");
|
||||
}
|
||||
|
||||
static void
|
||||
gst_remove_silence_finalize (GObject * obj)
|
||||
{
|
||||
GstRemoveSilence *filter = GST_REMOVE_SILENCE (obj);
|
||||
GST_DEBUG ("Destroying VAD");
|
||||
vad_destroy (filter->vad);
|
||||
filter->vad = NULL;
|
||||
GST_DEBUG ("VAD Destroyed");
|
||||
G_OBJECT_CLASS (parent_class)->finalize (obj);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_remove_silence_set_property (GObject * object, guint prop_id,
|
||||
const GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstRemoveSilence *filter = GST_REMOVE_SILENCE (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_REMOVE:
|
||||
filter->remove = g_value_get_boolean (value);
|
||||
break;
|
||||
case PROP_HYSTERESIS:
|
||||
vad_set_hysteresis (filter->vad, g_value_get_uint64 (value));
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_remove_silence_get_property (GObject * object, guint prop_id,
|
||||
GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstRemoveSilence *filter = GST_REMOVE_SILENCE (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_REMOVE:
|
||||
g_value_set_boolean (value, filter->remove);
|
||||
break;
|
||||
case PROP_HYSTERESIS:
|
||||
g_value_set_uint64 (value, vad_get_hysteresis (filter->vad));
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_remove_silence_transform_ip (GstBaseTransform * trans, GstBuffer * inbuf)
|
||||
{
|
||||
GstRemoveSilence *filter = NULL;
|
||||
int frame_type;
|
||||
|
||||
filter = GST_REMOVE_SILENCE (trans);
|
||||
|
||||
frame_type =
|
||||
vad_update (filter->vad, (gint16 *) GST_BUFFER_DATA (inbuf),
|
||||
GST_BUFFER_SIZE (inbuf) / sizeof (gint16));
|
||||
|
||||
if (frame_type == VAD_SILENCE) {
|
||||
|
||||
GST_DEBUG ("Silence detected");
|
||||
|
||||
if (filter->remove) {
|
||||
GST_DEBUG ("Removing silence");
|
||||
return GST_BASE_TRANSFORM_FLOW_DROPPED;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return GST_FLOW_OK;
|
||||
}
|
||||
|
||||
/*Plugin init functions*/
|
||||
static gboolean
|
||||
plugin_init (GstPlugin * plugin)
|
||||
{
|
||||
return gst_element_register (plugin, "removesilence", GST_RANK_NONE,
|
||||
gst_remove_silence_get_type ());
|
||||
}
|
||||
|
||||
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
|
||||
GST_VERSION_MINOR,
|
||||
"removesilence",
|
||||
"Removes silence from an audio stream",
|
||||
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN);
|
56
gst/removesilence/gstremovesilence.h
Normal file
56
gst/removesilence/gstremovesilence.h
Normal file
|
@ -0,0 +1,56 @@
|
|||
/* GStreamer
|
||||
* Copyright (C) 2011 Tiago Katcipis <tiagokatcipis@gmail.com>
|
||||
* Copyright (C) 2011 Paulo Pizarro <paulo.pizarro@gmail.com>
|
||||
*
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Library General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Library General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Library General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||
* Boston, MA 02111-1307, USA.
|
||||
*/
|
||||
|
||||
#ifndef __GST_REMOVE_SILENCE_H__
|
||||
#define __GST_REMOVE_SILENCE_H__
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include <gst/base/gstbasetransform.h>
|
||||
#include "vad_private.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
#define GST_TYPE_REMOVE_SILENCE \
|
||||
(gst_remove_silence_get_type())
|
||||
#define GST_REMOVE_SILENCE(obj) \
|
||||
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_REMOVE_SILENCE,GstRemoveSilence))
|
||||
#define GST_REMOVE_SILENCE_CLASS(klass) \
|
||||
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_REMOVE_SILENCE,GstRemoveSilenceClass))
|
||||
#define GST_IS_REMOVESILENCE(obj) \
|
||||
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_REMOVE_SILENCE))
|
||||
#define GST_IS_REMOVESILENCE_CLASS(klass) \
|
||||
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_REMOVE_SILENCE))
|
||||
|
||||
typedef struct _GstRemoveSilence {
|
||||
GstBaseTransform parent;
|
||||
VADFilter* vad;
|
||||
gboolean remove;
|
||||
} GstRemoveSilence;
|
||||
|
||||
typedef struct _GstRemoveSilenceClass {
|
||||
GstBaseTransformClass parent_class;
|
||||
} GstRemoveSilenceClass;
|
||||
|
||||
GType gst_remove_silence_get_type (void);
|
||||
|
||||
G_END_DECLS
|
||||
|
||||
#endif /* __GST_REMOVE_SILENCE_H__ */
|
152
gst/removesilence/vad_private.c
Normal file
152
gst/removesilence/vad_private.c
Normal file
|
@ -0,0 +1,152 @@
|
|||
/* GStreamer
|
||||
* Copyright (C) 2009 Tiago Katcipis <tiagokatcipis@gmail.com>
|
||||
* Copyright (C) 2009 Paulo Pizarro <paulo.pizarro@gmail.com>
|
||||
* Copyright (C) 2009 Rogério Santos <rogerio.santos@digitro.com.br>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Library General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Library General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Library General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||
* Boston, MA 02111-1307, USA.
|
||||
*/
|
||||
|
||||
#include <string.h>
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <glib.h>
|
||||
#include "vad_private.h"
|
||||
|
||||
#define VAD_POWER_ALPHA 0x0800 /* Q16 */
|
||||
#define VAD_POWER_THRESHOLD 0x000010C7 /* -60 dB (square wave) */
|
||||
#define VAD_ZCR_THRESHOLD 0
|
||||
#define VAD_BUFFER_SIZE 256
|
||||
|
||||
|
||||
union pgen
|
||||
{
|
||||
guint64 a;
|
||||
gpointer v;
|
||||
guint64 *l;
|
||||
guchar *b;
|
||||
guint16 *w;
|
||||
gint16 *s;
|
||||
};
|
||||
|
||||
struct _cqueue_s
|
||||
{
|
||||
union pgen base;
|
||||
union pgen tail;
|
||||
union pgen head;
|
||||
gint size;
|
||||
};
|
||||
|
||||
typedef struct _cqueue_s cqueue_t;
|
||||
|
||||
struct _vad_s
|
||||
{
|
||||
gint16 vad_buffer[VAD_BUFFER_SIZE];
|
||||
cqueue_t cqueue;
|
||||
gint vad_state;
|
||||
guint64 hysteresis;
|
||||
guint64 vad_samples;
|
||||
guint64 vad_power;
|
||||
long vad_zcr;
|
||||
};
|
||||
|
||||
VADFilter *
|
||||
vad_new (guint64 hysteresis)
|
||||
{
|
||||
VADFilter *vad = malloc (sizeof (VADFilter));
|
||||
vad_reset (vad);
|
||||
vad->hysteresis = hysteresis;
|
||||
return vad;
|
||||
}
|
||||
|
||||
void
|
||||
vad_reset (VADFilter * vad)
|
||||
{
|
||||
memset (vad, 0, sizeof (vad));
|
||||
vad->cqueue.base.s = vad->vad_buffer;
|
||||
vad->cqueue.tail.a = vad->cqueue.head.a = 0;
|
||||
vad->cqueue.size = VAD_BUFFER_SIZE;
|
||||
vad->vad_state = VAD_SILENCE;
|
||||
}
|
||||
|
||||
void
|
||||
vad_destroy (VADFilter * p)
|
||||
{
|
||||
free (p);
|
||||
}
|
||||
|
||||
void
|
||||
vad_set_hysteresis (struct _vad_s *p, guint64 hysteresis)
|
||||
{
|
||||
p->hysteresis = hysteresis;
|
||||
}
|
||||
|
||||
guint64
|
||||
vad_get_hysteresis (struct _vad_s *p)
|
||||
{
|
||||
return p->hysteresis;
|
||||
}
|
||||
|
||||
gint
|
||||
vad_update (struct _vad_s * p, gint16 * data, gint len)
|
||||
{
|
||||
guint64 tail;
|
||||
gint frame_type;
|
||||
gint16 sample;
|
||||
gint i;
|
||||
|
||||
for (i = 0; i < len; i++) {
|
||||
p->vad_power = VAD_POWER_ALPHA * ((data[i] * data[i] >> 14) & 0xFFFF) +
|
||||
(0xFFFF - VAD_POWER_ALPHA) * (p->vad_power >> 16) +
|
||||
((0xFFFF - VAD_POWER_ALPHA) * (p->vad_power & 0xFFFF) >> 16);
|
||||
/* Update VAD buffer */
|
||||
p->cqueue.base.s[p->cqueue.head.a] = data[i];
|
||||
p->cqueue.head.a = (p->cqueue.head.a + 1) & (p->cqueue.size - 1);
|
||||
if (p->cqueue.head.a == p->cqueue.tail.a)
|
||||
p->cqueue.tail.a = (p->cqueue.tail.a + 1) & (p->cqueue.size - 1);
|
||||
}
|
||||
|
||||
tail = p->cqueue.tail.a;
|
||||
p->vad_zcr = 0;
|
||||
for (;;) {
|
||||
sample = p->cqueue.base.s[tail];
|
||||
tail = (tail + 1) & (p->cqueue.size - 1);
|
||||
if (tail == p->cqueue.head.a)
|
||||
break;
|
||||
p->vad_zcr +=
|
||||
((sample & 0x8000) != (p->cqueue.base.s[tail] & 0x8000)) ? 1 : -1;
|
||||
}
|
||||
|
||||
frame_type = (p->vad_power > VAD_POWER_THRESHOLD
|
||||
&& p->vad_zcr < VAD_ZCR_THRESHOLD) ? VAD_VOICE : VAD_SILENCE;
|
||||
|
||||
if (p->vad_state != frame_type) {
|
||||
/* Voice to silence transition */
|
||||
if (p->vad_state == VAD_VOICE) {
|
||||
p->vad_samples += len;
|
||||
if (p->vad_samples >= p->hysteresis) {
|
||||
p->vad_state = frame_type;
|
||||
p->vad_samples = 0;
|
||||
}
|
||||
} else {
|
||||
p->vad_state = frame_type;
|
||||
p->vad_samples = 0;
|
||||
}
|
||||
} else {
|
||||
p->vad_samples = 0;
|
||||
}
|
||||
|
||||
return p->vad_state;
|
||||
}
|
44
gst/removesilence/vad_private.h
Normal file
44
gst/removesilence/vad_private.h
Normal file
|
@ -0,0 +1,44 @@
|
|||
/* GStreamer
|
||||
* Copyright (C) 2009 Tiago Katcipis <tiagokatcipis@gmail.com>
|
||||
* Copyright (C) 2009 Paulo Pizarro <paulo.pizarro@gmail.com>
|
||||
* Copyright (C) 2009 Rogério Santos <rogerio.santos@digitro.com.br>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Library General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Library General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Library General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||
* Boston, MA 02111-1307, USA.
|
||||
*/
|
||||
|
||||
|
||||
#ifndef __VAD_FILTER_H__
|
||||
#define __VAD_FILTER_H__
|
||||
|
||||
#define VAD_SILENCE 0
|
||||
#define VAD_VOICE 1
|
||||
|
||||
|
||||
typedef struct _vad_s VADFilter;
|
||||
|
||||
gint vad_update(VADFilter *p, gint16 *data, gint len);
|
||||
|
||||
void vad_set_hysteresis(VADFilter *p, guint64 hysteresis);
|
||||
|
||||
guint64 vad_get_hysteresis(VADFilter *p);
|
||||
|
||||
VADFilter* vad_new(guint64 hysteresis);
|
||||
|
||||
void vad_reset(VADFilter *p);
|
||||
|
||||
void vad_destroy(VADFilter *p);
|
||||
|
||||
#endif /* __VAD_FILTER__ */
|
|
@ -17,13 +17,18 @@ libgstdecklink_la_SOURCES = \
|
|||
gstdecklinksink.cpp \
|
||||
gstdecklink.cpp \
|
||||
capture.cpp \
|
||||
DeckLinkAPIDispatch.cpp
|
||||
linux/DeckLinkAPIDispatch.cpp
|
||||
|
||||
noinst_HEADERS = \
|
||||
gstdecklink.h \
|
||||
gstdecklinksrc.h \
|
||||
gstdecklinksink.h \
|
||||
capture.h \
|
||||
DeckLinkAPI.h \
|
||||
LinuxCOM.h
|
||||
linux/DeckLinkAPI.h \
|
||||
linux/LinuxCOM.h
|
||||
|
||||
EXTRA_DIST = \
|
||||
win/DeckLinkAPI.h \
|
||||
win/DeckLinkAPIDispatch.cpp \
|
||||
win/DeckLinkAPI_i.c
|
||||
|
||||
|
|
|
@ -28,41 +28,37 @@
|
|||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <pthread.h>
|
||||
#include <unistd.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include "gstdecklinksrc.h"
|
||||
|
||||
#include "DeckLinkAPI.h"
|
||||
#include "capture.h"
|
||||
|
||||
|
||||
int videoOutputFile = -1;
|
||||
int audioOutputFile = -1;
|
||||
#define GST_CAT_DEFAULT gst_decklink_src_debug_category
|
||||
|
||||
IDeckLink *deckLink;
|
||||
IDeckLinkInput *deckLinkInput;
|
||||
IDeckLinkDisplayModeIterator *displayModeIterator;
|
||||
|
||||
static BMDTimecodeFormat g_timecodeFormat = 0;
|
||||
static BMDTimecodeFormat g_timecodeFormat = (BMDTimecodeFormat)0;
|
||||
|
||||
DeckLinkCaptureDelegate::DeckLinkCaptureDelegate ():m_refCount (0)
|
||||
{
|
||||
pthread_mutex_init (&m_mutex, NULL);
|
||||
m_mutex = g_mutex_new();
|
||||
}
|
||||
|
||||
DeckLinkCaptureDelegate::~DeckLinkCaptureDelegate ()
|
||||
{
|
||||
pthread_mutex_destroy (&m_mutex);
|
||||
g_mutex_free (m_mutex);
|
||||
}
|
||||
|
||||
ULONG
|
||||
DeckLinkCaptureDelegate::AddRef (void)
|
||||
{
|
||||
pthread_mutex_lock (&m_mutex);
|
||||
g_mutex_lock (m_mutex);
|
||||
m_refCount++;
|
||||
pthread_mutex_unlock (&m_mutex);
|
||||
g_mutex_unlock (m_mutex);
|
||||
|
||||
return (ULONG) m_refCount;
|
||||
}
|
||||
|
@ -70,9 +66,9 @@ DeckLinkCaptureDelegate::AddRef (void)
|
|||
ULONG
|
||||
DeckLinkCaptureDelegate::Release (void)
|
||||
{
|
||||
pthread_mutex_lock (&m_mutex);
|
||||
g_mutex_lock (m_mutex);
|
||||
m_refCount--;
|
||||
pthread_mutex_unlock (&m_mutex);
|
||||
g_mutex_unlock (m_mutex);
|
||||
|
||||
if (m_refCount == 0) {
|
||||
delete this;
|
||||
|
@ -103,6 +99,7 @@ HRESULT
|
|||
IDeckLinkTimecode *timecode;
|
||||
if (videoFrame->GetTimecode (g_timecodeFormat, &timecode) == S_OK) {
|
||||
timecode->GetString (&timecodeString);
|
||||
CONVERT_COM_STRING (timecodeString);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -111,7 +108,7 @@ HRESULT
|
|||
"Valid Frame", videoFrame->GetRowBytes () * videoFrame->GetHeight ());
|
||||
|
||||
if (timecodeString)
|
||||
free ((void *) timecodeString);
|
||||
FREE_COM_STRING (timecodeString);
|
||||
|
||||
g_mutex_lock (decklinksrc->mutex);
|
||||
if (decklinksrc->video_frame != NULL) {
|
||||
|
@ -124,6 +121,10 @@ HRESULT
|
|||
decklinksrc->audio_frame = audioFrame;
|
||||
}
|
||||
}
|
||||
|
||||
/* increment regardless whether frame was dropped or not */
|
||||
decklinksrc->frame_num++;
|
||||
|
||||
g_cond_signal (decklinksrc->cond);
|
||||
g_mutex_unlock (decklinksrc->mutex);
|
||||
}
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
#ifndef __CAPTURE_H__
|
||||
#define __CAPTURE_H__
|
||||
|
||||
#include "DeckLinkAPI.h"
|
||||
#include "gstdecklink.h"
|
||||
|
||||
class DeckLinkCaptureDelegate : public IDeckLinkInputCallback
|
||||
{
|
||||
|
@ -19,7 +19,7 @@ class DeckLinkCaptureDelegate : public IDeckLinkInputCallback
|
|||
|
||||
private:
|
||||
ULONG m_refCount;
|
||||
pthread_mutex_t m_mutex;
|
||||
GMutex *m_mutex;
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -21,8 +21,25 @@
|
|||
#define _GST_DECKLINK_H_
|
||||
|
||||
#include <gst/gst.h>
|
||||
#include "DeckLinkAPI.h"
|
||||
#ifdef G_OS_UNIX
|
||||
#include "linux/DeckLinkAPI.h"
|
||||
#endif
|
||||
|
||||
#ifdef G_OS_WIN32
|
||||
#include "win/DeckLinkAPI.h"
|
||||
|
||||
#include <comutil.h>
|
||||
|
||||
#define bool BOOL
|
||||
|
||||
#define COMSTR_T BSTR*
|
||||
#define FREE_COM_STRING(s) delete[] s;
|
||||
#define CONVERT_COM_STRING(s) BSTR _s = (BSTR)s; s = _com_util::ConvertBSTRToString(_s); ::SysFreeString(_s);
|
||||
#else
|
||||
#define COMSTR_T char*
|
||||
#define FREE_COM_STRING(s) free ((void *) s)
|
||||
#define CONVERT_COM_STRING(s)
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
typedef enum {
|
||||
GST_DECKLINK_MODE_NTSC,
|
||||
|
|
|
@ -40,6 +40,7 @@
|
|||
#include "gstdecklinksink.h"
|
||||
#include <string.h>
|
||||
|
||||
|
||||
GST_DEBUG_CATEGORY_STATIC (gst_decklink_sink_debug_category);
|
||||
#define GST_CAT_DEFAULT gst_decklink_sink_debug_category
|
||||
|
||||
|
@ -116,6 +117,10 @@ static GstFlowReturn gst_decklink_sink_audiosink_bufferalloc (GstPad * pad,
|
|||
guint64 offset, guint size, GstCaps * caps, GstBuffer ** buf);
|
||||
static GstIterator *gst_decklink_sink_audiosink_iterintlink (GstPad * pad);
|
||||
|
||||
#ifdef _MSC_VER
|
||||
/* COM initialization/uninitialization thread */
|
||||
static void gst_decklink_sink_com_thread (GstDecklinkSink * src);
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
enum
|
||||
{
|
||||
|
@ -268,6 +273,24 @@ gst_decklink_sink_init (GstDecklinkSink * decklinksink,
|
|||
|
||||
decklinksink->callback = new Output;
|
||||
decklinksink->callback->decklinksink = decklinksink;
|
||||
|
||||
#ifdef _MSC_VER
|
||||
decklinksink->com_init_lock = g_mutex_new();
|
||||
decklinksink->com_deinit_lock = g_mutex_new();
|
||||
decklinksink->com_initialized = g_cond_new();
|
||||
decklinksink->com_uninitialize = g_cond_new();
|
||||
decklinksink->com_uninitialized = g_cond_new();
|
||||
|
||||
g_mutex_lock (decklinksink->com_init_lock);
|
||||
|
||||
/* create the COM initialization thread */
|
||||
g_thread_create ((GThreadFunc)gst_decklink_sink_com_thread,
|
||||
decklinksink, FALSE, NULL);
|
||||
|
||||
/* wait until the COM thread signals that COM has been initialized */
|
||||
g_cond_wait (decklinksink->com_initialized, decklinksink->com_init_lock);
|
||||
g_mutex_unlock (decklinksink->com_init_lock);
|
||||
#endif /* _MSC_VER */
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -308,6 +331,45 @@ gst_decklink_sink_get_property (GObject * object, guint property_id,
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef _MSC_VER
|
||||
static void
|
||||
gst_decklink_sink_com_thread (GstDecklinkSink * src)
|
||||
{
|
||||
HRESULT res;
|
||||
|
||||
g_mutex_lock (src->com_init_lock);
|
||||
|
||||
/* Initialize COM with a MTA for this process. This thread will
|
||||
* be the first one to enter the apartement and the last one to leave
|
||||
* it, unitializing COM properly */
|
||||
|
||||
res = CoInitializeEx (0, COINIT_MULTITHREADED);
|
||||
if (res == S_FALSE)
|
||||
GST_WARNING_OBJECT (src, "COM has been already initialized in the same process");
|
||||
else if (res == RPC_E_CHANGED_MODE)
|
||||
GST_WARNING_OBJECT (src, "The concurrency model of COM has changed.");
|
||||
else
|
||||
GST_INFO_OBJECT (src, "COM intialized succesfully");
|
||||
|
||||
src->comInitialized = TRUE;
|
||||
|
||||
/* Signal other threads waiting on this condition that COM was initialized */
|
||||
g_cond_signal (src->com_initialized);
|
||||
|
||||
g_mutex_unlock (src->com_init_lock);
|
||||
|
||||
/* Wait until the unitialize condition is met to leave the COM apartement */
|
||||
g_mutex_lock (src->com_deinit_lock);
|
||||
g_cond_wait (src->com_uninitialize, src->com_deinit_lock);
|
||||
|
||||
CoUninitialize ();
|
||||
GST_INFO_OBJECT (src, "COM unintialized succesfully");
|
||||
src->comInitialized = FALSE;
|
||||
g_cond_signal (src->com_uninitialized);
|
||||
g_mutex_unlock (src->com_deinit_lock);
|
||||
}
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
void
|
||||
gst_decklink_sink_dispose (GObject * object)
|
||||
{
|
||||
|
@ -333,6 +395,22 @@ gst_decklink_sink_finalize (GObject * object)
|
|||
|
||||
delete decklinksink->callback;
|
||||
|
||||
#ifdef _MSC_VER
|
||||
/* signal the COM thread that it should uninitialize COM */
|
||||
if (decklinksink->comInitialized) {
|
||||
g_mutex_lock (decklinksink->com_deinit_lock);
|
||||
g_cond_signal (decklinksink->com_uninitialize);
|
||||
g_cond_wait (decklinksink->com_uninitialized, decklinksink->com_deinit_lock);
|
||||
g_mutex_unlock (decklinksink->com_deinit_lock);
|
||||
}
|
||||
|
||||
g_mutex_free (decklinksink->com_init_lock);
|
||||
g_mutex_free (decklinksink->com_deinit_lock);
|
||||
g_cond_free (decklinksink->com_initialized);
|
||||
g_cond_free (decklinksink->com_uninitialize);
|
||||
g_cond_free (decklinksink->com_uninitialized);
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
|
@ -342,6 +420,7 @@ gst_decklink_sink_start (GstDecklinkSink * decklinksink)
|
|||
IDeckLinkIterator *iterator;
|
||||
HRESULT ret;
|
||||
const GstDecklinkMode *mode;
|
||||
BMDAudioSampleType sample_depth;
|
||||
|
||||
iterator = CreateDeckLinkIteratorInstance ();
|
||||
if (iterator == NULL) {
|
||||
|
@ -377,8 +456,9 @@ gst_decklink_sink_start (GstDecklinkSink * decklinksink)
|
|||
decklinksink->output->
|
||||
SetScheduledFrameCompletionCallback (decklinksink->callback);
|
||||
|
||||
sample_depth = bmdAudioSampleType16bitInteger;
|
||||
ret = decklinksink->output->EnableAudioOutput (bmdAudioSampleRate48kHz,
|
||||
16, 2, bmdAudioOutputStreamContinuous);
|
||||
sample_depth, 2, bmdAudioOutputStreamContinuous);
|
||||
if (ret != S_OK) {
|
||||
GST_ERROR ("failed to enable audio output");
|
||||
return FALSE;
|
||||
|
@ -1003,10 +1083,8 @@ HRESULT Output::ScheduledPlaybackHasStopped ()
|
|||
|
||||
HRESULT Output::RenderAudioSamples (bool preroll)
|
||||
{
|
||||
guint
|
||||
samplesWritten;
|
||||
GstBuffer *
|
||||
buffer;
|
||||
uint32_t samplesWritten;
|
||||
GstBuffer * buffer;
|
||||
|
||||
// guint64 samplesToWrite;
|
||||
|
||||
|
@ -1016,7 +1094,9 @@ HRESULT Output::RenderAudioSamples (bool preroll)
|
|||
// running = true;
|
||||
} else {
|
||||
g_mutex_lock (decklinksink->audio_mutex);
|
||||
decklinksink->output->ScheduleAudioSamples (GST_BUFFER_DATA (decklinksink->audio_buffer), GST_BUFFER_SIZE (decklinksink->audio_buffer) / 4, // 2 bytes per sample, stereo
|
||||
decklinksink->output->ScheduleAudioSamples (
|
||||
GST_BUFFER_DATA (decklinksink->audio_buffer),
|
||||
GST_BUFFER_SIZE (decklinksink->audio_buffer) / 4, // 2 bytes per sample, stereo
|
||||
0, 0, &samplesWritten);
|
||||
|
||||
buffer =
|
||||
|
|
|
@ -22,7 +22,6 @@
|
|||
|
||||
#include <gst/gst.h>
|
||||
#include "gstdecklink.h"
|
||||
#include "DeckLinkAPI.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
|
@ -76,6 +75,14 @@ struct _GstDecklinkSink
|
|||
/* properties */
|
||||
GstDecklinkModeEnum mode;
|
||||
|
||||
#ifdef _MSC_VER
|
||||
gboolean comInitialized;
|
||||
GMutex *com_init_lock;
|
||||
GMutex *com_deinit_lock;
|
||||
GCond *com_initialized;
|
||||
GCond *com_uninitialize;
|
||||
GCond *com_uninitialized;
|
||||
#endif /* _MSC_VER */
|
||||
};
|
||||
|
||||
struct _GstDecklinkSinkClass
|
||||
|
|
|
@ -41,7 +41,7 @@
|
|||
#include "capture.h"
|
||||
#include <string.h>
|
||||
|
||||
GST_DEBUG_CATEGORY_STATIC (gst_decklink_src_debug_category);
|
||||
GST_DEBUG_CATEGORY (gst_decklink_src_debug_category);
|
||||
#define GST_CAT_DEFAULT gst_decklink_src_debug_category
|
||||
|
||||
/* prototypes */
|
||||
|
@ -115,12 +115,18 @@ static GstIterator *gst_decklink_src_video_src_iterintlink (GstPad * pad);
|
|||
|
||||
static void gst_decklink_src_task (void *priv);
|
||||
|
||||
#ifdef _MSC_VER
|
||||
/* COM initialization/uninitialization thread */
|
||||
static void gst_decklink_src_com_thread (GstDecklinkSrc * src);
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
enum
|
||||
{
|
||||
PROP_0,
|
||||
PROP_MODE,
|
||||
PROP_CONNECTION,
|
||||
PROP_AUDIO_INPUT
|
||||
PROP_AUDIO_INPUT,
|
||||
PROP_SUBDEVICE
|
||||
};
|
||||
|
||||
/* pad templates */
|
||||
|
@ -195,7 +201,14 @@ gst_decklink_src_class_init (GstDecklinkSrcClass * klass)
|
|||
|
||||
g_object_class_install_property (gobject_class, PROP_AUDIO_INPUT,
|
||||
g_param_spec_enum ("audio-input", "Audio Input", "Audio Input Connection",
|
||||
GST_TYPE_DECKLINK_AUDIO_CONNECTION, GST_DECKLINK_AUDIO_CONNECTION_AUTO,
|
||||
GST_TYPE_DECKLINK_AUDIO_CONNECTION,
|
||||
GST_DECKLINK_AUDIO_CONNECTION_AUTO,
|
||||
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
|
||||
G_PARAM_CONSTRUCT)));
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_SUBDEVICE,
|
||||
g_param_spec_int ("subdevice", "Subdevice", "Subdevice",
|
||||
0, 3, 0,
|
||||
(GParamFlags) (G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS |
|
||||
G_PARAM_CONSTRUCT)));
|
||||
}
|
||||
|
@ -239,9 +252,9 @@ gst_decklink_src_init (GstDecklinkSrc * decklinksrc,
|
|||
|
||||
|
||||
|
||||
decklinksrc->videosrcpad = gst_pad_new_from_template (
|
||||
gst_element_class_get_pad_template(GST_ELEMENT_CLASS(decklinksrc_class),
|
||||
"videosrc"), "videosrc");
|
||||
decklinksrc->videosrcpad =
|
||||
gst_pad_new_from_template (gst_element_class_get_pad_template
|
||||
(GST_ELEMENT_CLASS (decklinksrc_class), "videosrc"), "videosrc");
|
||||
gst_pad_set_getcaps_function (decklinksrc->videosrcpad,
|
||||
GST_DEBUG_FUNCPTR (gst_decklink_src_video_src_getcaps));
|
||||
gst_pad_set_setcaps_function (decklinksrc->videosrcpad,
|
||||
|
@ -276,7 +289,30 @@ gst_decklink_src_init (GstDecklinkSrc * decklinksrc,
|
|||
decklinksrc->mode = GST_DECKLINK_MODE_NTSC;
|
||||
decklinksrc->connection = GST_DECKLINK_CONNECTION_SDI;
|
||||
decklinksrc->audio_connection = GST_DECKLINK_AUDIO_CONNECTION_AUTO;
|
||||
decklinksrc->subdevice = 0;
|
||||
|
||||
decklinksrc->stop = FALSE;
|
||||
decklinksrc->dropped_frames = 0;
|
||||
decklinksrc->dropped_frames_old = 0;
|
||||
decklinksrc->frame_num = -1; /* -1 so will be 0 after incrementing */
|
||||
|
||||
#ifdef _MSC_VER
|
||||
decklinksrc->com_init_lock = g_mutex_new();
|
||||
decklinksrc->com_deinit_lock = g_mutex_new();
|
||||
decklinksrc->com_initialized = g_cond_new();
|
||||
decklinksrc->com_uninitialize = g_cond_new();
|
||||
decklinksrc->com_uninitialized = g_cond_new();
|
||||
|
||||
g_mutex_lock (decklinksrc->com_init_lock);
|
||||
|
||||
/* create the COM initialization thread */
|
||||
g_thread_create ((GThreadFunc)gst_decklink_src_com_thread,
|
||||
decklinksrc, FALSE, NULL);
|
||||
|
||||
/* wait until the COM thread signals that COM has been initialized */
|
||||
g_cond_wait (decklinksrc->com_initialized, decklinksrc->com_init_lock);
|
||||
g_mutex_unlock (decklinksrc->com_init_lock);
|
||||
#endif /* _MSC_VER */
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -300,6 +336,9 @@ gst_decklink_src_set_property (GObject * object, guint property_id,
|
|||
decklinksrc->audio_connection =
|
||||
(GstDecklinkAudioConnectionEnum) g_value_get_enum (value);
|
||||
break;
|
||||
case PROP_SUBDEVICE:
|
||||
decklinksrc->subdevice = g_value_get_int (value);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
|
||||
break;
|
||||
|
@ -325,12 +364,54 @@ gst_decklink_src_get_property (GObject * object, guint property_id,
|
|||
case PROP_AUDIO_INPUT:
|
||||
g_value_set_enum (value, decklinksrc->audio_connection);
|
||||
break;
|
||||
case PROP_SUBDEVICE:
|
||||
g_value_set_int (value, decklinksrc->subdevice);
|
||||
break;
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef _MSC_VER
|
||||
static void
|
||||
gst_decklink_src_com_thread (GstDecklinkSrc * src)
|
||||
{
|
||||
HRESULT res;
|
||||
|
||||
g_mutex_lock (src->com_init_lock);
|
||||
|
||||
/* Initialize COM with a MTA for this process. This thread will
|
||||
* be the first one to enter the apartement and the last one to leave
|
||||
* it, unitializing COM properly */
|
||||
|
||||
res = CoInitializeEx (0, COINIT_MULTITHREADED);
|
||||
if (res == S_FALSE)
|
||||
GST_WARNING_OBJECT (src, "COM has been already initialized in the same process");
|
||||
else if (res == RPC_E_CHANGED_MODE)
|
||||
GST_WARNING_OBJECT (src, "The concurrency model of COM has changed.");
|
||||
else
|
||||
GST_INFO_OBJECT (src, "COM intialized succesfully");
|
||||
|
||||
src->comInitialized = TRUE;
|
||||
|
||||
/* Signal other threads waiting on this condition that COM was initialized */
|
||||
g_cond_signal (src->com_initialized);
|
||||
|
||||
g_mutex_unlock (src->com_init_lock);
|
||||
|
||||
/* Wait until the unitialize condition is met to leave the COM apartement */
|
||||
g_mutex_lock (src->com_deinit_lock);
|
||||
g_cond_wait (src->com_uninitialize, src->com_deinit_lock);
|
||||
|
||||
CoUninitialize ();
|
||||
GST_INFO_OBJECT (src, "COM unintialized succesfully");
|
||||
src->comInitialized = FALSE;
|
||||
g_cond_signal (src->com_uninitialized);
|
||||
g_mutex_unlock (src->com_deinit_lock);
|
||||
}
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
void
|
||||
gst_decklink_src_dispose (GObject * object)
|
||||
{
|
||||
|
@ -362,6 +443,22 @@ gst_decklink_src_finalize (GObject * object)
|
|||
gst_caps_unref (decklinksrc->video_caps);
|
||||
}
|
||||
|
||||
#ifdef _MSC_VER
|
||||
/* signal the COM thread that it should uninitialize COM */
|
||||
if (decklinksrc->comInitialized) {
|
||||
g_mutex_lock (decklinksrc->com_deinit_lock);
|
||||
g_cond_signal (decklinksrc->com_uninitialize);
|
||||
g_cond_wait (decklinksrc->com_uninitialized, decklinksrc->com_deinit_lock);
|
||||
g_mutex_unlock (decklinksrc->com_deinit_lock);
|
||||
}
|
||||
|
||||
g_mutex_free (decklinksrc->com_init_lock);
|
||||
g_mutex_free (decklinksrc->com_deinit_lock);
|
||||
g_cond_free (decklinksrc->com_initialized);
|
||||
g_cond_free (decklinksrc->com_uninitialize);
|
||||
g_cond_free (decklinksrc->com_uninitialized);
|
||||
#endif /* _MSC_VER */
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
||||
|
@ -389,13 +486,14 @@ gst_decklink_src_start (GstElement * element)
|
|||
DeckLinkCaptureDelegate *delegate;
|
||||
//IDeckLinkDisplayModeIterator *mode_iterator;
|
||||
//IDeckLinkDisplayMode *mode;
|
||||
int sample_depth;
|
||||
BMDAudioSampleType sample_depth;
|
||||
int channels;
|
||||
HRESULT ret;
|
||||
const GstDecklinkMode *mode;
|
||||
IDeckLinkConfiguration *config;
|
||||
BMDVideoConnection conn;
|
||||
BMDAudioConnection aconn;
|
||||
int i;
|
||||
|
||||
GST_DEBUG_OBJECT (decklinksrc, "start");
|
||||
|
||||
|
@ -410,6 +508,13 @@ gst_decklink_src_start (GstElement * element)
|
|||
GST_ERROR ("no card");
|
||||
return FALSE;
|
||||
}
|
||||
for (i = 0; i < decklinksrc->subdevice; i++) {
|
||||
ret = iterator->Next (&decklinksrc->decklink);
|
||||
if (ret != S_OK) {
|
||||
GST_ERROR ("no card");
|
||||
return FALSE;
|
||||
}
|
||||
}
|
||||
|
||||
ret = decklinksrc->decklink->QueryInterface (IID_IDeckLinkInput,
|
||||
(void **) &decklinksrc->input);
|
||||
|
@ -491,7 +596,6 @@ gst_decklink_src_start (GstElement * element)
|
|||
GST_ERROR ("set configuration (audio input connection)");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
#if 0
|
||||
ret = decklinksrc->input->GetDisplayModeIterator (&mode_iterator);
|
||||
if (ret != S_OK) {
|
||||
|
@ -520,7 +624,7 @@ gst_decklink_src_start (GstElement * element)
|
|||
return FALSE;
|
||||
}
|
||||
|
||||
sample_depth = 16;
|
||||
sample_depth = bmdAudioSampleType16bitInteger;
|
||||
channels = 2;
|
||||
ret = decklinksrc->input->EnableAudioInput (bmdAudioSampleRate48kHz,
|
||||
sample_depth, channels);
|
||||
|
@ -1056,7 +1160,6 @@ gst_decklink_src_task (void *priv)
|
|||
GstBuffer *audio_buffer;
|
||||
IDeckLinkVideoInputFrame *video_frame;
|
||||
IDeckLinkAudioInputPacket *audio_frame;
|
||||
int dropped_frames;
|
||||
void *data;
|
||||
int n_samples;
|
||||
GstFlowReturn ret;
|
||||
|
@ -1070,7 +1173,6 @@ gst_decklink_src_task (void *priv)
|
|||
}
|
||||
video_frame = decklinksrc->video_frame;
|
||||
audio_frame = decklinksrc->audio_frame;
|
||||
dropped_frames = decklinksrc->dropped_frames;
|
||||
decklinksrc->video_frame = NULL;
|
||||
decklinksrc->audio_frame = NULL;
|
||||
g_mutex_unlock (decklinksrc->mutex);
|
||||
|
@ -1080,10 +1182,14 @@ gst_decklink_src_task (void *priv)
|
|||
return;
|
||||
}
|
||||
|
||||
if (dropped_frames > 0) {
|
||||
GST_ELEMENT_ERROR (decklinksrc, RESOURCE, READ, (NULL), (NULL));
|
||||
/* ERROR */
|
||||
return;
|
||||
/* warning on dropped frames */
|
||||
if (decklinksrc->dropped_frames - decklinksrc->dropped_frames_old > 0) {
|
||||
GST_ELEMENT_WARNING (decklinksrc, RESOURCE, READ,
|
||||
("Dropped %d frame(s), for a total of %d frame(s)",
|
||||
decklinksrc->dropped_frames - decklinksrc->dropped_frames_old,
|
||||
decklinksrc->dropped_frames),
|
||||
(NULL));
|
||||
decklinksrc->dropped_frames_old = decklinksrc->dropped_frames;
|
||||
}
|
||||
|
||||
mode = gst_decklink_get_mode (decklinksrc->mode);
|
||||
|
@ -1106,16 +1212,28 @@ gst_decklink_src_task (void *priv)
|
|||
}
|
||||
|
||||
GST_BUFFER_TIMESTAMP (buffer) =
|
||||
gst_util_uint64_scale_int (decklinksrc->num_frames * GST_SECOND,
|
||||
gst_util_uint64_scale_int (decklinksrc->frame_num * GST_SECOND,
|
||||
mode->fps_d, mode->fps_n);
|
||||
GST_BUFFER_DURATION (buffer) =
|
||||
gst_util_uint64_scale_int ((decklinksrc->num_frames + 1) * GST_SECOND,
|
||||
gst_util_uint64_scale_int ((decklinksrc->frame_num + 1) * GST_SECOND,
|
||||
mode->fps_d, mode->fps_n) - GST_BUFFER_TIMESTAMP (buffer);
|
||||
GST_BUFFER_OFFSET (buffer) = decklinksrc->num_frames;
|
||||
if (decklinksrc->num_frames == 0) {
|
||||
GST_BUFFER_OFFSET (buffer) = decklinksrc->frame_num;
|
||||
GST_BUFFER_OFFSET_END (buffer) = decklinksrc->frame_num;
|
||||
if (decklinksrc->frame_num == 0) {
|
||||
GstEvent *event;
|
||||
gboolean ret;
|
||||
|
||||
GST_BUFFER_FLAG_SET (buffer, GST_BUFFER_FLAG_DISCONT);
|
||||
|
||||
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
|
||||
GST_CLOCK_TIME_NONE, 0);
|
||||
|
||||
ret = gst_pad_push_event (decklinksrc->videosrcpad, event);
|
||||
if (!ret) {
|
||||
GST_ERROR_OBJECT (decklinksrc, "new segment event ret=%d", ret);
|
||||
return;
|
||||
}
|
||||
}
|
||||
decklinksrc->num_frames++;
|
||||
|
||||
if (decklinksrc->video_caps == NULL) {
|
||||
decklinksrc->video_caps = gst_decklink_mode_get_caps (decklinksrc->mode);
|
||||
|
@ -1144,7 +1262,7 @@ gst_decklink_src_task (void *priv)
|
|||
|
||||
if (decklinksrc->audio_caps == NULL) {
|
||||
decklinksrc->audio_caps = gst_caps_new_simple ("audio/x-raw-int",
|
||||
"endianness", G_TYPE_INT, LITTLE_ENDIAN,
|
||||
"endianness", G_TYPE_INT, G_LITTLE_ENDIAN,
|
||||
"signed", G_TYPE_BOOLEAN, TRUE,
|
||||
"depth", G_TYPE_INT, 16,
|
||||
"width", G_TYPE_INT, 16,
|
||||
|
|
|
@ -22,10 +22,9 @@
|
|||
|
||||
#include <gst/gst.h>
|
||||
#include "gstdecklink.h"
|
||||
#include "DeckLinkAPI.h"
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
||||
GST_DEBUG_CATEGORY_EXTERN (gst_decklink_src_debug_category);
|
||||
#define GST_TYPE_DECKLINK_SRC (gst_decklink_src_get_type())
|
||||
#define GST_DECKLINK_SRC(obj) (G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_DECKLINK_SRC,GstDecklinkSrc))
|
||||
#define GST_DECKLINK_SRC_CLASS(klass) (G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_DECKLINK_SRC,GstDecklinkSrcClass))
|
||||
|
@ -50,6 +49,7 @@ struct _GstDecklinkSrc
|
|||
GMutex *mutex;
|
||||
GCond *cond;
|
||||
int dropped_frames;
|
||||
int dropped_frames_old;
|
||||
gboolean stop;
|
||||
IDeckLinkVideoInputFrame *video_frame;
|
||||
IDeckLinkAudioInputPacket * audio_frame;
|
||||
|
@ -60,7 +60,7 @@ struct _GstDecklinkSrc
|
|||
guint64 num_audio_samples;
|
||||
|
||||
GstCaps *video_caps;
|
||||
guint64 num_frames;
|
||||
guint64 frame_num;
|
||||
int fps_n;
|
||||
int fps_d;
|
||||
int width;
|
||||
|
@ -73,6 +73,16 @@ struct _GstDecklinkSrc
|
|||
GstDecklinkModeEnum mode;
|
||||
GstDecklinkConnectionEnum connection;
|
||||
GstDecklinkAudioConnectionEnum audio_connection;
|
||||
int subdevice;
|
||||
|
||||
#ifdef _MSC_VER
|
||||
gboolean comInitialized;
|
||||
GMutex *com_init_lock;
|
||||
GMutex *com_deinit_lock;
|
||||
GCond *com_initialized;
|
||||
GCond *com_uninitialize;
|
||||
GCond *com_uninitialized;
|
||||
#endif /* _MSC_VER */
|
||||
};
|
||||
|
||||
struct _GstDecklinkSrcClass
|
||||
|
|
9217
sys/decklink/win/DeckLinkAPI.h
Normal file
9217
sys/decklink/win/DeckLinkAPI.h
Normal file
File diff suppressed because it is too large
Load diff
40
sys/decklink/win/DeckLinkAPIDispatch.cpp
Normal file
40
sys/decklink/win/DeckLinkAPIDispatch.cpp
Normal file
|
@ -0,0 +1,40 @@
|
|||
/* -LICENSE-START-
|
||||
** Copyright (c) 2011 Blackmagic Design
|
||||
**
|
||||
** Permission is hereby granted, free of charge, to any person or organization
|
||||
** obtaining a copy of the software and accompanying documentation covered by
|
||||
** this license (the "Software") to use, reproduce, display, distribute,
|
||||
** execute, and transmit the Software, and to prepare derivative works of the
|
||||
** Software, and to permit third-parties to whom the Software is furnished to
|
||||
** do so, all subject to the following:
|
||||
**
|
||||
** The copyright notices in the Software and this entire statement, including
|
||||
** the above license grant, this restriction and the following disclaimer,
|
||||
** must be included in all copies of the Software, in whole or in part, and
|
||||
** all derivative works of the Software, unless such copies or derivative
|
||||
** works are solely in the form of machine-executable object code generated by
|
||||
** a source language processor.
|
||||
**
|
||||
** THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
** IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
** FITNESS FOR A PARTICULAR PURPOSE, TITLE AND NON-INFRINGEMENT. IN NO EVENT
|
||||
** SHALL THE COPYRIGHT HOLDERS OR ANYONE DISTRIBUTING THE SOFTWARE BE LIABLE
|
||||
** FOR ANY DAMAGES OR OTHER LIABILITY, WHETHER IN CONTRACT, TORT OR OTHERWISE,
|
||||
** ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
** DEALINGS IN THE SOFTWARE.
|
||||
** -LICENSE-END-
|
||||
**/
|
||||
|
||||
#include "DeckLinkAPI.h"
|
||||
|
||||
extern "C" {
|
||||
|
||||
IDeckLinkIterator* CreateDeckLinkIteratorInstance (void)
|
||||
{
|
||||
IDeckLinkIterator *deckLinkIterator = NULL;
|
||||
HRESULT res = CoCreateInstance(CLSID_CDeckLinkIterator, NULL, CLSCTX_ALL,
|
||||
IID_IDeckLinkIterator, (void**)&deckLinkIterator);
|
||||
return deckLinkIterator;
|
||||
}
|
||||
|
||||
};
|
319
sys/decklink/win/DeckLinkAPI_i.c
Normal file
319
sys/decklink/win/DeckLinkAPI_i.c
Normal file
|
@ -0,0 +1,319 @@
|
|||
|
||||
|
||||
/* this ALWAYS GENERATED file contains the IIDs and CLSIDs */
|
||||
|
||||
/* link this file in with the server and any clients */
|
||||
|
||||
|
||||
/* File created by MIDL compiler version 7.00.0500 */
|
||||
/* at Fri Jun 17 10:43:51 2011
|
||||
*/
|
||||
/* Compiler settings for .\win\DeckLinkAPI.idl:
|
||||
Oicf, W1, Zp8, env=Win32 (32b run)
|
||||
protocol : dce , ms_ext, c_ext, robust
|
||||
error checks: allocation ref bounds_check enum stub_data
|
||||
VC __declspec() decoration level:
|
||||
__declspec(uuid()), __declspec(selectany), __declspec(novtable)
|
||||
DECLSPEC_UUID(), MIDL_INTERFACE()
|
||||
*/
|
||||
//@@MIDL_FILE_HEADING( )
|
||||
|
||||
#pragma warning( disable: 4049 ) /* more than 64k source lines */
|
||||
|
||||
|
||||
#ifdef __cplusplus
|
||||
extern "C"
|
||||
{
|
||||
#endif
|
||||
|
||||
|
||||
#include <rpc.h>
|
||||
#include <rpcndr.h>
|
||||
|
||||
#ifdef _MIDL_USE_GUIDDEF_
|
||||
|
||||
#ifndef INITGUID
|
||||
#define INITGUID
|
||||
#include <guiddef.h>
|
||||
#undef INITGUID
|
||||
#else
|
||||
#include <guiddef.h>
|
||||
#endif
|
||||
|
||||
#define MIDL_DEFINE_GUID(type,name,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8) \
|
||||
DEFINE_GUID(name,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8)
|
||||
|
||||
#else // !_MIDL_USE_GUIDDEF_
|
||||
|
||||
#ifndef __IID_DEFINED__
|
||||
#define __IID_DEFINED__
|
||||
|
||||
typedef struct _IID
|
||||
{
|
||||
unsigned long x;
|
||||
unsigned short s1;
|
||||
unsigned short s2;
|
||||
unsigned char c[8];
|
||||
} IID;
|
||||
|
||||
#endif // __IID_DEFINED__
|
||||
|
||||
#ifndef CLSID_DEFINED
|
||||
#define CLSID_DEFINED
|
||||
typedef IID CLSID;
|
||||
#endif // CLSID_DEFINED
|
||||
|
||||
#define MIDL_DEFINE_GUID(type,name,l,w1,w2,b1,b2,b3,b4,b5,b6,b7,b8) \
|
||||
const type name = {l,w1,w2,{b1,b2,b3,b4,b5,b6,b7,b8}}
|
||||
|
||||
#endif /* !_MIDL_USE_GUIDDEF_ */
|
||||
|
||||
MIDL_DEFINE_GUID (IID, LIBID_DeckLinkAPI, 0xD864517A, 0xEDD5, 0x466D, 0x86,
|
||||
0x7D, 0xC8, 0x19, 0xF1, 0xC0, 0x52, 0xBB);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoOutputCallback, 0x20AA5225, 0x1958,
|
||||
0x47CB, 0x82, 0x0B, 0x80, 0xA8, 0xD5, 0x21, 0xA6, 0xEE);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInputCallback, 0xDD04E5EC, 0x7415,
|
||||
0x42AB, 0xAE, 0x4A, 0xE8, 0x0C, 0x4D, 0xFC, 0x04, 0x4A);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkMemoryAllocator, 0xB36EB6E7, 0x9D29,
|
||||
0x4AA8, 0x92, 0xEF, 0x84, 0x3B, 0x87, 0xA2, 0x89, 0xE8);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkAudioOutputCallback, 0x403C681B, 0x7F46,
|
||||
0x4A12, 0xB9, 0x93, 0x2B, 0xB1, 0x27, 0x08, 0x4E, 0xE6);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkIterator, 0x74E936FC, 0xCC28, 0x4A67,
|
||||
0x81, 0xA0, 0x1E, 0x94, 0xE5, 0x2D, 0x4E, 0x69);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkAPIInformation, 0x7BEA3C68, 0x730D,
|
||||
0x4322, 0xAF, 0x34, 0x8A, 0x71, 0x52, 0xB5, 0x32, 0xA4);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDisplayModeIterator, 0x9C88499F, 0xF601,
|
||||
0x4021, 0xB8, 0x0B, 0x03, 0x2E, 0x4E, 0xB4, 0x1C, 0x35);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDisplayMode, 0x3EB2C1AB, 0x0A3D, 0x4523,
|
||||
0xA3, 0xAD, 0xF4, 0x0D, 0x7F, 0xB1, 0x4E, 0x78);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLink, 0x62BFF75D, 0x6569, 0x4E55, 0x8D,
|
||||
0x4D, 0x66, 0xAA, 0x03, 0x82, 0x9A, 0xBC);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkOutput, 0xA3EF0963, 0x0862, 0x44ED,
|
||||
0x92, 0xA9, 0xEE, 0x89, 0xAB, 0xF4, 0x31, 0xC7);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInput, 0x6D40EF78, 0x28B9, 0x4E21, 0x99,
|
||||
0x0D, 0x95, 0xBB, 0x77, 0x50, 0xA0, 0x4F);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkTimecode, 0xBC6CFBD3, 0x8317, 0x4325,
|
||||
0xAC, 0x1C, 0x12, 0x16, 0x39, 0x1E, 0x93, 0x40);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoFrame, 0x3F716FE0, 0xF023, 0x4111,
|
||||
0xBE, 0x5D, 0xEF, 0x44, 0x14, 0xC0, 0x5B, 0x17);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkMutableVideoFrame, 0x69E2639F, 0x40DA,
|
||||
0x4E19, 0xB6, 0xF2, 0x20, 0xAC, 0xE8, 0x15, 0xC3, 0x90);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoFrame3DExtensions, 0xDA0F7E4A,
|
||||
0xEDC7, 0x48A8, 0x9C, 0xDD, 0x2D, 0xB5, 0x1C, 0x72, 0x9C, 0xD7);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoInputFrame, 0x05CFE374, 0x537C,
|
||||
0x4094, 0x9A, 0x57, 0x68, 0x05, 0x25, 0x11, 0x8F, 0x44);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoFrameAncillary, 0x732E723C, 0xD1A4,
|
||||
0x4E29, 0x9E, 0x8E, 0x4A, 0x88, 0x79, 0x7A, 0x00, 0x04);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkAudioInputPacket, 0xE43D5870, 0x2894,
|
||||
0x11DE, 0x8C, 0x30, 0x08, 0x00, 0x20, 0x0C, 0x9A, 0x66);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkScreenPreviewCallback, 0xB1D3F49A,
|
||||
0x85FE, 0x4C5D, 0x95, 0xC8, 0x0B, 0x5D, 0x5D, 0xCC, 0xD4, 0x38);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkGLScreenPreviewHelper, 0x504E2209,
|
||||
0xCAC7, 0x4C1A, 0x9F, 0xB4, 0xC5, 0xBB, 0x62, 0x74, 0xD2, 0x2F);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkConfiguration, 0xC679A35B, 0x610C,
|
||||
0x4D09, 0xB7, 0x48, 0x1D, 0x04, 0x78, 0x10, 0x0F, 0xC0);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkAttributes, 0xABC11843, 0xD966, 0x44CB,
|
||||
0x96, 0xE2, 0xA1, 0xCB, 0x5D, 0x31, 0x35, 0xC4);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkKeyer, 0x89AFCAF5, 0x65F8, 0x421E, 0x98,
|
||||
0xF7, 0x96, 0xFE, 0x5F, 0x5B, 0xFB, 0xA3);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoConversion, 0x3BBCB8A2, 0xDA2C,
|
||||
0x42D9, 0xB5, 0xD8, 0x88, 0x08, 0x36, 0x44, 0xE9, 0x9A);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDeckControlStatusCallback, 0xE5F693C1,
|
||||
0x4283, 0x4716, 0xB1, 0x8F, 0xC1, 0x43, 0x15, 0x21, 0x95, 0x5B);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDeckControl, 0x522A9E39, 0x0F3C, 0x4742,
|
||||
0x94, 0xEE, 0xD8, 0x0D, 0xE3, 0x35, 0xDA, 0x1D);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (CLSID, CLSID_CDeckLinkIterator, 0xD9EDA3B3, 0x2887,
|
||||
0x41FA, 0xB7, 0x24, 0x01, 0x7C, 0xF1, 0xEB, 0x1D, 0x37);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (CLSID, CLSID_CDeckLinkAPIInformation, 0x263CA19F, 0xED09,
|
||||
0x482E, 0x9F, 0x9D, 0x84, 0x00, 0x57, 0x83, 0xA2, 0x37);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (CLSID, CLSID_CDeckLinkGLScreenPreviewHelper, 0xF63E77C7,
|
||||
0xB655, 0x4A4A, 0x9A, 0xD0, 0x3C, 0xA8, 0x5D, 0x39, 0x43, 0x43);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (CLSID, CLSID_CDeckLinkVideoConversion, 0x7DBBBB11, 0x5B7B,
|
||||
0x467D, 0xAE, 0xA4, 0xCE, 0xA4, 0x68, 0xFD, 0x36, 0x8C);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDeckControl_v7_9, 0xA4D81043, 0x0619,
|
||||
0x42B7, 0x8E, 0xD6, 0x60, 0x2D, 0x29, 0x04, 0x1D, 0xF7);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDisplayModeIterator_v7_6, 0x455D741F,
|
||||
0x1779, 0x4800, 0x86, 0xF5, 0x0B, 0x5D, 0x13, 0xD7, 0x97, 0x51);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDisplayMode_v7_6, 0x87451E84, 0x2B7E,
|
||||
0x439E, 0xA6, 0x29, 0x43, 0x93, 0xEA, 0x4A, 0x85, 0x50);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkOutput_v7_6, 0x29228142, 0xEB8C, 0x4141,
|
||||
0xA6, 0x21, 0xF7, 0x40, 0x26, 0x45, 0x09, 0x55);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInput_v7_6, 0x300C135A, 0x9F43, 0x48E2,
|
||||
0x99, 0x06, 0x6D, 0x79, 0x11, 0xD9, 0x3C, 0xF1);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkTimecode_v7_6, 0xEFB9BCA6, 0xA521,
|
||||
0x44F7, 0xBD, 0x69, 0x23, 0x32, 0xF2, 0x4D, 0x9E, 0xE6);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoFrame_v7_6, 0xA8D8238E, 0x6B18,
|
||||
0x4196, 0x99, 0xE1, 0x5A, 0xF7, 0x17, 0xB8, 0x3D, 0x32);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkMutableVideoFrame_v7_6, 0x46FCEE00,
|
||||
0xB4E6, 0x43D0, 0x91, 0xC0, 0x02, 0x3A, 0x7F, 0xCE, 0xB3, 0x4F);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoInputFrame_v7_6, 0x9A74FA41,
|
||||
0xAE9F, 0x47AC, 0x8C, 0xF4, 0x01, 0xF4, 0x2D, 0xD5, 0x99, 0x65);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkScreenPreviewCallback_v7_6, 0x373F499D,
|
||||
0x4B4D, 0x4518, 0xAD, 0x22, 0x63, 0x54, 0xE5, 0xA5, 0x82, 0x5E);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkGLScreenPreviewHelper_v7_6, 0xBA575CD9,
|
||||
0xA15E, 0x497B, 0xB2, 0xC2, 0xF9, 0xAF, 0xE7, 0xBE, 0x4E, 0xBA);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoConversion_v7_6, 0x3EB504C9,
|
||||
0xF97D, 0x40FE, 0xA1, 0x58, 0xD4, 0x07, 0xD4, 0x8C, 0xB5, 0x3B);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkConfiguration_v7_6, 0xB8EAD569, 0xB764,
|
||||
0x47F0, 0xA7, 0x3F, 0xAE, 0x40, 0xDF, 0x6C, 0xBF, 0x10);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoOutputCallback_v7_6, 0xE763A626,
|
||||
0x4A3C, 0x49D1, 0xBF, 0x13, 0xE7, 0xAD, 0x36, 0x92, 0xAE, 0x52);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInputCallback_v7_6, 0x31D28EE7, 0x88B6,
|
||||
0x4CB1, 0x89, 0x7A, 0xCD, 0xBF, 0x79, 0xA2, 0x64, 0x14);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (CLSID, CLSID_CDeckLinkGLScreenPreviewHelper_v7_6,
|
||||
0xD398CEE7, 0x4434, 0x4CA3, 0x9B, 0xA6, 0x5A, 0xE3, 0x45, 0x56, 0xB9,
|
||||
0x05);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (CLSID, CLSID_CDeckLinkVideoConversion_v7_6, 0xFFA84F77,
|
||||
0x73BE, 0x4FB7, 0xB0, 0x3E, 0xB5, 0xE4, 0x4B, 0x9F, 0x75, 0x9B);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInputCallback_v7_3, 0xFD6F311D, 0x4D00,
|
||||
0x444B, 0x9E, 0xD4, 0x1F, 0x25, 0xB5, 0x73, 0x0A, 0xD0);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkOutput_v7_3, 0x271C65E3, 0xC323, 0x4344,
|
||||
0xA3, 0x0F, 0xD9, 0x08, 0xBC, 0xB2, 0x0A, 0xA3);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInput_v7_3, 0x4973F012, 0x9925, 0x458C,
|
||||
0x87, 0x1C, 0x18, 0x77, 0x4C, 0xDB, 0xBE, 0xCB);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoInputFrame_v7_3, 0xCF317790,
|
||||
0x2894, 0x11DE, 0x8C, 0x30, 0x08, 0x00, 0x20, 0x0C, 0x9A, 0x66);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDisplayModeIterator_v7_1, 0xB28131B6,
|
||||
0x59AC, 0x4857, 0xB5, 0xAC, 0xCD, 0x75, 0xD5, 0x88, 0x3E, 0x2F);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkDisplayMode_v7_1, 0xAF0CD6D5, 0x8376,
|
||||
0x435E, 0x84, 0x33, 0x54, 0xF9, 0xDD, 0x53, 0x0A, 0xC3);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoFrame_v7_1, 0x333F3A10, 0x8C2D,
|
||||
0x43CF, 0xB7, 0x9D, 0x46, 0x56, 0x0F, 0xEE, 0xA1, 0xCE);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoInputFrame_v7_1, 0xC8B41D95,
|
||||
0x8848, 0x40EE, 0x9B, 0x37, 0x6E, 0x34, 0x17, 0xFB, 0x11, 0x4B);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkAudioInputPacket_v7_1, 0xC86DE4F6,
|
||||
0xA29F, 0x42E3, 0xAB, 0x3A, 0x13, 0x63, 0xE2, 0x9F, 0x07, 0x88);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkVideoOutputCallback_v7_1, 0xEBD01AFA,
|
||||
0xE4B0, 0x49C6, 0xA0, 0x1D, 0xED, 0xB9, 0xD1, 0xB5, 0x5F, 0xD9);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInputCallback_v7_1, 0x7F94F328, 0x5ED4,
|
||||
0x4E9F, 0x97, 0x29, 0x76, 0xA8, 0x6B, 0xDC, 0x99, 0xCC);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkOutput_v7_1, 0xAE5B3E9B, 0x4E1E, 0x4535,
|
||||
0xB6, 0xE8, 0x48, 0x0F, 0xF5, 0x2F, 0x6C, 0xE5);
|
||||
|
||||
|
||||
MIDL_DEFINE_GUID (IID, IID_IDeckLinkInput_v7_1, 0x2B54EDEF, 0x5B32, 0x429F,
|
||||
0xBA, 0x11, 0xBB, 0x99, 0x05, 0x96, 0xEA, 0xCD);
|
||||
|
||||
#undef MIDL_DEFINE_GUID
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
|
@ -145,11 +145,6 @@ VALGRIND_TO_FIX = \
|
|||
VALGRIND_TESTS_DISABLE = \
|
||||
$(VALGRIND_TO_FIX)
|
||||
|
||||
if BUILD_EXPERIMENTAL
|
||||
EXPERIMENTAL_CHECKS=elements/camerabin2 \
|
||||
elements/viewfinderbin
|
||||
endif
|
||||
|
||||
# these tests don't even pass
|
||||
# neon: too flaky (almost always fails 'the first time')
|
||||
noinst_PROGRAMS = \
|
||||
|
@ -171,6 +166,7 @@ check_PROGRAMS = \
|
|||
elements/asfmux \
|
||||
elements/baseaudiovisualizer \
|
||||
elements/camerabin \
|
||||
elements/camerabin2 \
|
||||
elements/dataurisrc \
|
||||
elements/legacyresample \
|
||||
$(check_jifmux) \
|
||||
|
@ -189,6 +185,7 @@ check_PROGRAMS = \
|
|||
elements/rtpmux \
|
||||
$(check_schro) \
|
||||
$(check_vp8) \
|
||||
elements/viewfinderbin \
|
||||
$(check_zbar) \
|
||||
$(check_orc) \
|
||||
$(EXPERIMENTAL_CHECKS)
|
||||
|
|
|
@ -174,6 +174,9 @@ static GstTagList *tags_found;
|
|||
static gboolean
|
||||
validity_bus_cb (GstBus * bus, GstMessage * message, gpointer data);
|
||||
|
||||
static GstMessage *wait_for_element_message (GstElement * camera,
|
||||
const gchar * name, GstClockTime timeout);
|
||||
|
||||
static void
|
||||
validate_taglist_foreach (const GstTagList * list, const gchar * tag,
|
||||
gpointer user_data)
|
||||
|
@ -279,8 +282,14 @@ capture_bus_cb (GstBus * bus, GstMessage * message, gpointer data)
|
|||
}
|
||||
|
||||
static void
|
||||
check_preview_image (void)
|
||||
check_preview_image (GstElement * camera)
|
||||
{
|
||||
if (!preview_buffer && camera) {
|
||||
GstMessage *msg = wait_for_element_message (camera,
|
||||
GST_BASE_CAMERA_SRC_PREVIEW_MESSAGE_NAME, GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
}
|
||||
fail_unless (preview_buffer != NULL);
|
||||
if (preview_caps) {
|
||||
fail_unless (GST_BUFFER_CAPS (preview_buffer) != NULL);
|
||||
|
@ -324,6 +333,7 @@ setup_wrappercamerabinsrc_videotestsrc (void)
|
|||
GstElement *fakevideosink;
|
||||
GstElement *src;
|
||||
GstElement *testsrc;
|
||||
GstElement *audiosrc;
|
||||
|
||||
GST_INFO ("init");
|
||||
|
||||
|
@ -336,16 +346,19 @@ setup_wrappercamerabinsrc_videotestsrc (void)
|
|||
fakevideosink = gst_element_factory_make ("fakesink", NULL);
|
||||
src = gst_element_factory_make ("wrappercamerabinsrc", NULL);
|
||||
testsrc = gst_element_factory_make ("videotestsrc", NULL);
|
||||
audiosrc = gst_element_factory_make ("audiotestsrc", NULL);
|
||||
|
||||
preview_caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT,
|
||||
320, "height", G_TYPE_INT, 240, NULL);
|
||||
|
||||
g_object_set (G_OBJECT (testsrc), "is-live", TRUE, "peer-alloc", FALSE, NULL);
|
||||
g_object_set (G_OBJECT (audiosrc), "is-live", TRUE, NULL);
|
||||
g_object_set (G_OBJECT (src), "video-source", testsrc, NULL);
|
||||
g_object_set (G_OBJECT (camera), "camera-source", src, "preview-caps",
|
||||
preview_caps, NULL);
|
||||
preview_caps, "audio-source", audiosrc, NULL);
|
||||
gst_object_unref (src);
|
||||
gst_object_unref (testsrc);
|
||||
gst_object_unref (audiosrc);
|
||||
|
||||
vfbin = gst_bin_get_by_name (GST_BIN (camera), "vf-bin");
|
||||
g_object_set (G_OBJECT (vfbin), "video-sink", fakevideosink, NULL);
|
||||
|
@ -530,9 +543,52 @@ filter_buffer_count (GstPad * pad, GstMiniObject * obj, gpointer data)
|
|||
return TRUE;
|
||||
}
|
||||
|
||||
static GstMessage *
|
||||
wait_for_element_message (GstElement * camera, const gchar * name,
|
||||
GstClockTime timeout)
|
||||
{
|
||||
GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (camera));
|
||||
GstMessage *msg;
|
||||
|
||||
while (1) {
|
||||
msg = gst_bus_timed_pop_filtered (bus, timeout, GST_MESSAGE_ERROR |
|
||||
GST_MESSAGE_EOS | GST_MESSAGE_ELEMENT);
|
||||
|
||||
if (msg) {
|
||||
if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ELEMENT) {
|
||||
const GstStructure *st = gst_message_get_structure (msg);
|
||||
if (gst_structure_has_name (st,
|
||||
GST_BASE_CAMERA_SRC_PREVIEW_MESSAGE_NAME)) {
|
||||
GstBuffer *buf;
|
||||
const GValue *value;
|
||||
|
||||
value = gst_structure_get_value (st, "buffer");
|
||||
fail_unless (value != NULL);
|
||||
buf = gst_value_get_buffer (value);
|
||||
|
||||
if (preview_buffer)
|
||||
gst_buffer_unref (preview_buffer);
|
||||
preview_buffer = gst_buffer_ref (buf);
|
||||
}
|
||||
|
||||
if (gst_structure_has_name (st, name))
|
||||
break;
|
||||
} else {
|
||||
gst_message_unref (msg);
|
||||
msg = NULL;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
gst_object_unref (bus);
|
||||
return msg;
|
||||
}
|
||||
|
||||
GST_START_TEST (test_single_image_capture)
|
||||
{
|
||||
gboolean idle;
|
||||
GstMessage *msg;
|
||||
if (!camera)
|
||||
return;
|
||||
|
||||
|
@ -553,11 +609,12 @@ GST_START_TEST (test_single_image_capture)
|
|||
fail_unless (idle);
|
||||
g_signal_emit_by_name (camera, "start-capture", NULL);
|
||||
|
||||
g_timeout_add_seconds (3, (GSourceFunc) g_main_loop_quit, main_loop);
|
||||
g_main_loop_run (main_loop);
|
||||
msg = wait_for_element_message (camera, "image-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
/* check that we got a preview image */
|
||||
check_preview_image ();
|
||||
check_preview_image (camera);
|
||||
|
||||
g_object_get (camera, "idle", &idle, NULL);
|
||||
fail_unless (idle);
|
||||
|
@ -595,6 +652,7 @@ GST_START_TEST (test_multiple_image_captures)
|
|||
GST_INFO ("starting capture");
|
||||
|
||||
for (i = 0; i < 3; i++) {
|
||||
GstMessage *msg;
|
||||
GstCaps *caps;
|
||||
|
||||
caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT,
|
||||
|
@ -605,13 +663,13 @@ GST_START_TEST (test_multiple_image_captures)
|
|||
|
||||
g_signal_emit_by_name (camera, "start-capture", NULL);
|
||||
|
||||
g_timeout_add_seconds (3, (GSourceFunc) g_main_loop_quit, main_loop);
|
||||
g_main_loop_run (main_loop);
|
||||
msg = wait_for_element_message (camera, "image-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
check_preview_image ();
|
||||
check_preview_image (camera);
|
||||
}
|
||||
|
||||
g_usleep (G_USEC_PER_SEC * 3);
|
||||
g_object_get (camera, "idle", &idle, NULL);
|
||||
fail_unless (idle);
|
||||
gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
|
||||
|
@ -625,6 +683,7 @@ GST_END_TEST;
|
|||
|
||||
GST_START_TEST (test_single_video_recording)
|
||||
{
|
||||
GstMessage *msg;
|
||||
gboolean idle;
|
||||
if (!camera)
|
||||
return;
|
||||
|
@ -657,9 +716,11 @@ GST_START_TEST (test_single_video_recording)
|
|||
|
||||
g_signal_emit_by_name (camera, "stop-capture", NULL);
|
||||
|
||||
check_preview_image ();
|
||||
check_preview_image (camera);
|
||||
|
||||
g_usleep (G_USEC_PER_SEC * 3);
|
||||
msg = wait_for_element_message (camera, "video-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
g_object_get (camera, "idle", &idle, NULL);
|
||||
fail_unless (idle);
|
||||
|
@ -697,6 +758,7 @@ GST_START_TEST (test_multiple_video_recordings)
|
|||
g_object_get (camera, "idle", &idle, NULL);
|
||||
fail_unless (idle);
|
||||
for (i = 0; i < 3; i++) {
|
||||
GstMessage *msg;
|
||||
GstCaps *caps;
|
||||
|
||||
caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT,
|
||||
|
@ -718,10 +780,12 @@ GST_START_TEST (test_multiple_video_recordings)
|
|||
g_main_loop_run (main_loop);
|
||||
g_signal_emit_by_name (camera, "stop-capture", NULL);
|
||||
|
||||
check_preview_image ();
|
||||
msg = wait_for_element_message (camera, "video-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
check_preview_image (camera);
|
||||
|
||||
g_timeout_add_seconds (3, (GSourceFunc) g_main_loop_quit, main_loop);
|
||||
g_main_loop_run (main_loop);
|
||||
g_object_get (camera, "idle", &idle, NULL);
|
||||
fail_unless (idle);
|
||||
}
|
||||
|
@ -753,6 +817,7 @@ GST_START_TEST (test_image_video_cycle)
|
|||
|
||||
GST_INFO ("starting capture");
|
||||
for (i = 0; i < 2; i++) {
|
||||
GstMessage *msg;
|
||||
g_object_get (camera, "idle", &idle, NULL);
|
||||
fail_unless (idle);
|
||||
|
||||
|
@ -761,10 +826,12 @@ GST_START_TEST (test_image_video_cycle)
|
|||
g_object_set (camera, "location", make_test_file_name (IMAGE_FILENAME, i),
|
||||
NULL);
|
||||
g_signal_emit_by_name (camera, "start-capture", NULL);
|
||||
g_timeout_add_seconds (3, (GSourceFunc) g_main_loop_quit, main_loop);
|
||||
g_main_loop_run (main_loop);
|
||||
|
||||
check_preview_image ();
|
||||
msg = wait_for_element_message (camera, "image-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
check_preview_image (camera);
|
||||
|
||||
/* now go to video */
|
||||
g_object_set (camera, "mode", 2, NULL);
|
||||
|
@ -776,7 +843,11 @@ GST_START_TEST (test_image_video_cycle)
|
|||
g_main_loop_run (main_loop);
|
||||
g_signal_emit_by_name (camera, "stop-capture", NULL);
|
||||
|
||||
check_preview_image ();
|
||||
msg = wait_for_element_message (camera, "video-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
check_preview_image (camera);
|
||||
|
||||
/* wait for capture to finish */
|
||||
g_usleep (G_USEC_PER_SEC);
|
||||
|
@ -817,6 +888,7 @@ GST_START_TEST (test_image_capture_previews)
|
|||
GST_INFO ("starting capture");
|
||||
|
||||
for (i = 0; i < 3; i++) {
|
||||
GstMessage *msg;
|
||||
GstCaps *caps;
|
||||
|
||||
caps = gst_caps_new_simple ("video/x-raw-rgb", "width", G_TYPE_INT,
|
||||
|
@ -828,10 +900,11 @@ GST_START_TEST (test_image_capture_previews)
|
|||
|
||||
g_signal_emit_by_name (camera, "start-capture", NULL);
|
||||
|
||||
g_timeout_add_seconds (3, (GSourceFunc) g_main_loop_quit, main_loop);
|
||||
g_main_loop_run (main_loop);
|
||||
msg = wait_for_element_message (camera, "image-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
check_preview_image ();
|
||||
check_preview_image (camera);
|
||||
|
||||
if (preview_buffer)
|
||||
gst_buffer_unref (preview_buffer);
|
||||
|
@ -896,13 +969,15 @@ GST_START_TEST (test_image_capture_with_tags)
|
|||
GST_INFO ("starting capture");
|
||||
|
||||
for (i = 0; i < 3; i++) {
|
||||
GstMessage *msg;
|
||||
gst_tag_setter_merge_tags (GST_TAG_SETTER (camera), taglists[i],
|
||||
GST_TAG_MERGE_REPLACE);
|
||||
|
||||
g_signal_emit_by_name (camera, "start-capture", NULL);
|
||||
|
||||
g_timeout_add_seconds (3, (GSourceFunc) g_main_loop_quit, main_loop);
|
||||
g_main_loop_run (main_loop);
|
||||
msg = wait_for_element_message (camera, "image-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
}
|
||||
|
||||
gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
|
||||
|
@ -966,6 +1041,8 @@ GST_START_TEST (test_video_capture_with_tags)
|
|||
GST_INFO ("starting capture");
|
||||
|
||||
for (i = 0; i < 3; i++) {
|
||||
GstMessage *msg;
|
||||
|
||||
gst_tag_setter_merge_tags (GST_TAG_SETTER (camera), taglists[i],
|
||||
GST_TAG_MERGE_REPLACE);
|
||||
|
||||
|
@ -975,7 +1052,10 @@ GST_START_TEST (test_video_capture_with_tags)
|
|||
g_main_loop_run (main_loop);
|
||||
|
||||
g_signal_emit_by_name (camera, "stop-capture", NULL);
|
||||
g_usleep (G_USEC_PER_SEC * 3);
|
||||
|
||||
msg = wait_for_element_message (camera, "video-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
}
|
||||
|
||||
gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
|
||||
|
@ -1037,6 +1117,7 @@ GST_END_TEST;
|
|||
|
||||
GST_START_TEST (test_idle_property)
|
||||
{
|
||||
GstMessage *msg;
|
||||
gboolean idle;
|
||||
if (!camera)
|
||||
return;
|
||||
|
@ -1073,9 +1154,12 @@ GST_START_TEST (test_idle_property)
|
|||
|
||||
g_signal_emit_by_name (camera, "stop-capture", NULL);
|
||||
|
||||
check_preview_image ();
|
||||
msg = wait_for_element_message (camera, "video-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
check_preview_image (camera);
|
||||
|
||||
g_usleep (3 * G_USEC_PER_SEC);
|
||||
g_object_get (camera, "idle", &idle, NULL);
|
||||
fail_unless (idle);
|
||||
|
||||
|
@ -1144,7 +1228,7 @@ GST_START_TEST (test_image_custom_filter)
|
|||
g_main_loop_run (main_loop);
|
||||
|
||||
/* check that we got a preview image */
|
||||
check_preview_image ();
|
||||
check_preview_image (camera);
|
||||
|
||||
gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
|
||||
check_file_validity (IMAGE_FILENAME, 0, NULL, 0, 0, NO_AUDIO);
|
||||
|
@ -1225,7 +1309,7 @@ GST_START_TEST (test_video_custom_filter)
|
|||
g_signal_emit_by_name (camera, "stop-capture", NULL);
|
||||
|
||||
/* check that we got a preview image */
|
||||
check_preview_image ();
|
||||
check_preview_image (camera);
|
||||
|
||||
gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
|
||||
check_file_validity (VIDEO_FILENAME, 0, NULL, 0, 0, WITH_AUDIO);
|
||||
|
@ -1283,6 +1367,7 @@ GST_START_TEST (test_image_location_switching)
|
|||
glong notify_id;
|
||||
GstCaps *caps;
|
||||
GstElement *src;
|
||||
GstMessage *msg;
|
||||
|
||||
if (!camera)
|
||||
return;
|
||||
|
@ -1319,7 +1404,10 @@ GST_START_TEST (test_image_location_switching)
|
|||
g_idle_add (image_location_switch_do_capture, filenames);
|
||||
g_main_loop_run (main_loop);
|
||||
|
||||
g_usleep (G_USEC_PER_SEC * 3);
|
||||
msg = wait_for_element_message (camera, "image-done", GST_CLOCK_TIME_NONE);
|
||||
fail_unless (msg != NULL);
|
||||
gst_message_unref (msg);
|
||||
|
||||
gst_element_set_state (GST_ELEMENT (camera), GST_STATE_NULL);
|
||||
|
||||
for (i = 0; i < LOCATION_SWITCHING_FILENAMES_COUNT; i++) {
|
||||
|
|
|
@ -10,12 +10,6 @@ else
|
|||
DIRECTFB_DIR=
|
||||
endif
|
||||
|
||||
if BUILD_EXPERIMENTAL
|
||||
CAMERABIN2=camerabin2
|
||||
else
|
||||
CAMERABIN2=
|
||||
endif
|
||||
|
||||
OPENCV_EXAMPLES=opencv
|
||||
|
||||
SUBDIRS= $(DIRECTFB_DIR) $(GTK_EXAMPLES) $(OPENCV_EXAMPLES)
|
||||
|
|
Loading…
Reference in a new issue