Merge branch 'master' into 0.11

Conflicts:
	ext/ogg/gstoggdemux.c
	ext/pango/gsttextoverlay.c
	gst-libs/gst/audio/gstaudioencoder.c
	gst-libs/gst/audio/gstbaseaudiosrc.c
	gst/playback/gstsubtitleoverlay.c
	gst/videorate/gstvideorate.c
This commit is contained in:
Edward Hervey 2011-09-23 18:27:11 +02:00
commit 17bfba09f1
25 changed files with 1917 additions and 372 deletions

View file

@ -255,40 +255,20 @@ for baseline profile, and software fallback for main/high profile; or a DSP
codec only supporting certain resolutions, with a software fallback for
unusual resolutions). So if decodebin just plugged the most highest-ranking
decoder, that decoder might not be be able to handle the actual stream later
on, which would yield in an error (this is a data flow error then which would
on, which would yield an error (this is a data flow error then which would
be hard to intercept and avoid in decodebin). In other words, we can't solve
this issue by plugging a decoder right away with the parser.
So decodebin need to communicate to the parser the set of available decoder
So decodebin needs to communicate to the parser the set of available decoder
caps (which would contain the relevant capabilities/restrictions such as
supported profiles, resolutions, etc.), after the usual "autoplug-*" signal
filtering/sorting of course.
This could be done in multiple ways, e.g.
This is done by plugging a capsfilter element right after the parser, and
constructing set of filter caps from the list of available decoders (one
appends at the end just the name(s) of the caps structures from the parser
pad template caps to function as an 'ANY other' caps equivalent). This let
the parser negotiate to a supported stream format in the same way as with
the static pipeline mentioned above, but of course incur some overhead
through the additional capsfilter element.
- plug a capsfilter element right after the parser, and construct
a set of filter caps from the list of available decoders (one
could append at the end just the name(s) of the caps structures
from the parser pad template caps to function as an 'ANY other'
caps equivalent). This would let the parser negotiate to a
supported stream format in the same way as with the static
pipeline mentioned above, but of course incur some overhead
through the additional capsfilter element.
- one could add a filter-caps equivalent property to the parsers
(and/or GstBaseParse class) (e.g. "prefered-caps" or so).
- one could add some kind of "fixate-caps" or "fixate-format"
signal to such parsers
Alternatively, one could simply make all decoders incorporate parsers, so
that always all formats are supported. This is problematic for other reasons
though (e.g. we would not be able to detect the profile in all cases then
before plugging a decoder, which would make it hard to just play the audio
part of a stream and not the video if a suitable decoder was missing, for
example).
Additional considerations: the same problem exists with sinks that support
non-raw formats. Consider, for example, an audio sink that accepts DTS audio,
but only the 14-bit variant, not the 16-bit variant (or only native endiannes).
Ideally dcaparse would convert into the required stream format here.

File diff suppressed because it is too large Load diff

View file

@ -113,6 +113,10 @@ struct _GstOggPad
gboolean is_eos;
gboolean added;
/* push mode seeking */
GstClockTime push_kf_time;
GstClockTime push_sync_time;
};
struct _GstOggPadClass
@ -161,6 +165,29 @@ struct _GstOggDemux
gint64 basetime;
gint64 prestime;
/* push mode seeking support */
GMutex *push_lock; /* we need the lock to protect the push mode variables */
gint64 push_byte_offset; /* where were are at in the stream, in bytes */
gint64 push_byte_length; /* length in bytes of the stream, -1 if unknown */
GstClockTime push_time_length; /* length in time of the stream */
GstClockTime push_start_time; /* start time of the stream */
GstClockTime push_time_offset; /* where were are at in the stream, in time */
enum { PUSH_PLAYING, PUSH_DURATION, PUSH_BISECT1, PUSH_LINEAR1, PUSH_BISECT2, PUSH_LINEAR2 } push_state;
GstClockTime push_seek_time_original_target;
GstClockTime push_seek_time_target;
gint64 push_last_seek_offset;
GstClockTime push_last_seek_time;
gint64 push_offset0, push_offset1; /* bisection search offset bounds */
GstClockTime push_time0, push_time1; /* bisection search time bounds */
double push_seek_rate;
GstSeekFlags push_seek_flags;
GstEvent *push_mode_seek_delayed_event;
gboolean push_disable_seeking;
gint push_bisection_steps[2];
/* ogg stuff */
ogg_sync_state sync;
};

View file

@ -1153,13 +1153,23 @@ gst_ogg_map_add_fisbone (GstOggStream * pad, GstOggStream * skel_pad,
pad->have_fisbone = TRUE;
/* we just overwrite whatever was set before by the format-specific setup */
pad->granulerate_n = GST_READ_UINT64_LE (data);
pad->granulerate_d = GST_READ_UINT64_LE (data + 8);
/* We don't overwrite whatever was set before by the format-specific
setup: skeleton contains wrong information sometimes, and the codec
headers are authoritative.
So we only gather information that was not already filled out by
the mapper setup. This should hopefully allow handling unknown
streams a bit better, while not trashing correct setup from bad
skeleton data. */
if (pad->granulerate_n == 0 || pad->granulerate_d == 0) {
pad->granulerate_n = GST_READ_UINT64_LE (data);
pad->granulerate_d = GST_READ_UINT64_LE (data + 8);
}
if (pad->granuleshift < 0) {
pad->granuleshift = GST_READ_UINT8 (data + 28);
}
start_granule = GST_READ_UINT64_LE (data + 16);
pad->preroll = GST_READ_UINT32_LE (data + 24);
pad->granuleshift = GST_READ_UINT8 (data + 28);
start_time = granulepos_to_granule_default (pad, start_granule);

View file

@ -999,6 +999,8 @@ gst_base_text_overlay_src_query (GstPad * pad, GstQuery * query)
GstBaseTextOverlay *overlay = NULL;
overlay = GST_BASE_TEXT_OVERLAY (gst_pad_get_parent (pad));
if (G_UNLIKELY (!overlay))
return FALSE;
ret = gst_pad_peer_query (overlay->video_sinkpad, query);
@ -1014,6 +1016,10 @@ gst_base_text_overlay_src_event (GstPad * pad, GstEvent * event)
GstBaseTextOverlay *overlay = NULL;
overlay = GST_BASE_TEXT_OVERLAY (gst_pad_get_parent (pad));
if (G_UNLIKELY (!overlay)) {
gst_event_unref (event);
return FALSE;
}
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_SEEK:{
@ -1076,6 +1082,8 @@ gst_base_text_overlay_getcaps (GstPad * pad, GstCaps * filter)
GstCaps *caps;
overlay = GST_BASE_TEXT_OVERLAY (gst_pad_get_parent (pad));
if (G_UNLIKELY (!overlay))
return gst_caps_copy (gst_pad_get_pad_template_caps (pad));
if (pad == overlay->srcpad)
otherpad = overlay->video_sinkpad;
@ -2102,6 +2110,8 @@ gst_base_text_overlay_text_pad_link (GstPad * pad, GstPad * peer)
GstBaseTextOverlay *overlay;
overlay = GST_BASE_TEXT_OVERLAY (gst_pad_get_parent (pad));
if (G_UNLIKELY (!overlay))
return GST_PAD_LINK_REFUSED;
GST_DEBUG_OBJECT (overlay, "Text pad linked");
@ -2134,6 +2144,10 @@ gst_base_text_overlay_text_event (GstPad * pad, GstEvent * event)
GstBaseTextOverlay *overlay = NULL;
overlay = GST_BASE_TEXT_OVERLAY (gst_pad_get_parent (pad));
if (G_UNLIKELY (!overlay)) {
gst_event_unref (event);
return FALSE;
}
GST_LOG_OBJECT (pad, "received event %s", GST_EVENT_TYPE_NAME (event));
@ -2224,6 +2238,10 @@ gst_base_text_overlay_video_event (GstPad * pad, GstEvent * event)
GstBaseTextOverlay *overlay = NULL;
overlay = GST_BASE_TEXT_OVERLAY (gst_pad_get_parent (pad));
if (G_UNLIKELY (!overlay)) {
gst_event_unref (event);
return FALSE;
}
GST_DEBUG_OBJECT (pad, "received event %s", GST_EVENT_TYPE_NAME (event));

View file

@ -360,17 +360,17 @@ gst_theora_enc_class_init (GstTheoraEncClass * klass)
THEORA_DEF_VP3_COMPATIBLE,
(GParamFlags) G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_DROP_FRAMES,
g_param_spec_boolean ("drop-frames", "VP3 Compatible",
g_param_spec_boolean ("drop-frames", "Drop Frames",
"Allow or disallow frame dropping",
THEORA_DEF_DROP_FRAMES,
(GParamFlags) G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_CAP_OVERFLOW,
g_param_spec_boolean ("cap-overflow", "VP3 Compatible",
g_param_spec_boolean ("cap-overflow", "Cap Overflow",
"Enable capping of bit reservoir overflows",
THEORA_DEF_CAP_OVERFLOW,
(GParamFlags) G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
g_object_class_install_property (gobject_class, PROP_CAP_UNDERFLOW,
g_param_spec_boolean ("cap-underflow", "VP3 Compatible",
g_param_spec_boolean ("cap-underflow", "Cap Underflow",
"Enable capping of bit reservoir underflows",
THEORA_DEF_CAP_UNDERFLOW,
(GParamFlags) G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));

View file

@ -290,6 +290,14 @@ struct _GstAudioInfo {
#define GST_AUDIO_INFO_DEPTH(i) (GST_AUDIO_FORMAT_INFO_DEPTH((i)->finfo))
#define GST_AUDIO_INFO_BPS(info) (GST_AUDIO_INFO_DEPTH(info) >> 3)
#define GST_AUDIO_INFO_IS_INTEGER(i) (GST_AUDIO_FORMAT_INFO_IS_INTEGER((i)->finfo))
#define GST_AUDIO_INFO_IS_FLOAT(i) (GST_AUDIO_FORMAT_INFO_IS_FLOAT((i)->finfo))
#define GST_AUDIO_INFO_IS_SIGNED(i) (GST_AUDIO_FORMAT_INFO_IS_SIGNED((i)->finfo))
#define GST_AUDIO_INFO_ENDIANNESS(i) (GST_AUDIO_FORMAT_INFO_ENDIANNES((i)->finfo))
#define GST_AUDIO_INFO_IS_LITTLE_ENDIAN(i) (GST_AUDIO_FORMAT_INFO_IS_LITTLE_ENDIAN((i)->finfo))
#define GST_AUDIO_INFO_IS_BIG_ENDIAN(i) (GST_AUDIO_FORMAT_INFO_IS_BIG_ENDIAN((i)->finfo))
#define GST_AUDIO_INFO_FLAGS(info) ((info)->flags)
#define GST_AUDIO_INFO_HAS_DEFAULT_POSITIONS(info) ((info)->flags & GST_AUDIO_FLAG_DEFAULT_POSITIONS)

View file

@ -54,7 +54,7 @@
* </listitem>
* As of configuration stage, and throughout processing, GstAudioDecoder
* provides various (context) parameters, e.g. describing the format of
* output audio data (valid when output caps have been caps) or current parsing state.
* output audio data (valid when output caps have been set) or current parsing state.
* Conversely, subclass can and should configure context to inform
* base class of its expectation w.r.t. buffer handling.
* <listitem>

View file

@ -941,6 +941,8 @@ audio_info_is_equal (GstAudioInfo * from, GstAudioInfo * to)
{
if (from == to)
return TRUE;
if (from->finfo == NULL || to->finfo == NULL)
return FALSE;
if (GST_AUDIO_INFO_FORMAT (from) != GST_AUDIO_INFO_FORMAT (to))
return FALSE;
if (GST_AUDIO_INFO_RATE (from) != GST_AUDIO_INFO_RATE (to))
@ -985,7 +987,7 @@ gst_audio_encoder_sink_setcaps (GstAudioEncoder * enc, GstCaps * caps)
if (!gst_audio_info_from_caps (&state, caps))
goto refuse_caps;
changed = audio_info_is_equal (&state, &ctx->info);
changed = !audio_info_is_equal (&state, &ctx->info);
if (changed) {
GstClockTime old_min_latency;
@ -1086,6 +1088,18 @@ gst_audio_encoder_proxy_getcaps (GstAudioEncoder * enc, GstCaps * caps)
gst_structure_set_value (s, "rate", val);
if ((val = gst_structure_get_value (allowed_s, "channels")))
gst_structure_set_value (s, "channels", val);
/* following might also make sense for some encoded formats,
* e.g. wavpack */
if ((val = gst_structure_get_value (allowed_s, "width")))
gst_structure_set_value (s, "width", val);
if ((val = gst_structure_get_value (allowed_s, "depth")))
gst_structure_set_value (s, "depth", val);
if ((val = gst_structure_get_value (allowed_s, "endianness")))
gst_structure_set_value (s, "endianness", val);
if ((val = gst_structure_get_value (allowed_s, "signed")))
gst_structure_set_value (s, "signed", val);
if ((val = gst_structure_get_value (allowed_s, "channel-positions")))
gst_structure_set_value (s, "channel-positions", val);
gst_caps_merge_structure (filter_caps, s);
}

View file

@ -127,7 +127,11 @@ gst_color_balance_class_init (GstColorBalanceClass * klass)
const GList *
gst_color_balance_list_channels (GstColorBalance * balance)
{
GstColorBalanceClass *klass = GST_COLOR_BALANCE_GET_CLASS (balance);
GstColorBalanceClass *klass;
g_return_val_if_fail (GST_IS_COLOR_BALANCE (balance), NULL);
klass = GST_COLOR_BALANCE_GET_CLASS (balance);
if (klass->list_channels) {
return klass->list_channels (balance);
@ -178,7 +182,11 @@ gint
gst_color_balance_get_value (GstColorBalance * balance,
GstColorBalanceChannel * channel)
{
GstColorBalanceClass *klass = GST_COLOR_BALANCE_GET_CLASS (balance);
GstColorBalanceClass *klass;
g_return_val_if_fail (GST_IS_COLOR_BALANCE (balance), 0);
klass = GST_COLOR_BALANCE_GET_CLASS (balance);
if (klass->get_value) {
return klass->get_value (balance, channel);
@ -200,7 +208,12 @@ gst_color_balance_get_value (GstColorBalance * balance,
GstColorBalanceType
gst_color_balance_get_balance_type (GstColorBalance * balance)
{
GstColorBalanceClass *klass = GST_COLOR_BALANCE_GET_CLASS (balance);
GstColorBalanceClass *klass;
g_return_val_if_fail (GST_IS_COLOR_BALANCE (balance),
GST_COLOR_BALANCE_SOFTWARE);
klass = GST_COLOR_BALANCE_GET_CLASS (balance);
return klass->balance_type;
}
@ -220,6 +233,9 @@ void
gst_color_balance_value_changed (GstColorBalance * balance,
GstColorBalanceChannel * channel, gint value)
{
g_return_if_fail (GST_IS_COLOR_BALANCE (balance));
g_signal_emit (G_OBJECT (balance),
gst_color_balance_signals[VALUE_CHANGED], 0, channel, value);

View file

@ -258,7 +258,7 @@ struct _GstDiscoverer {
struct _GstDiscovererClass {
GObjectClass parentclass;
/*< signals >*/
/* signals */
void (*finished) (GstDiscoverer *discoverer);
void (*starting) (GstDiscoverer *discoverer);
void (*discovered) (GstDiscoverer *discoverer,

View file

@ -177,6 +177,9 @@ static struct rtsp_header rtsp_headers[] = {
{"X-Server-IP-Address", FALSE},
{"X-Sessioncookie", FALSE},
/* Since 0.10.36 */
{"RTCP-Interval", FALSE},
{NULL, FALSE}
};

View file

@ -333,6 +333,9 @@ typedef enum {
GST_RTSP_HDR_X_SERVER_IP_ADDRESS, /* X-Server-IP-Address */
GST_RTSP_HDR_X_SESSIONCOOKIE, /* X-Sessioncookie */
/* Since 0.10.36 */
GST_RTSP_HDR_RTCP_INTERVAL, /* RTCP-Interval */
GST_RTSP_HDR_LAST
} GstRTSPHeaderField;

View file

@ -445,7 +445,8 @@ GType gst_tag_image_type_get_type (void);
/**
* GST_TAG_ID3V2_HEADER_SIZE:
*
* ID3V2 header size considered minimum input for some functions.
* ID3V2 header size considered minimum input for some functions such as
* gst_tag_list_from_id3v2_tag() and gst_tag_get_id3v2_tag_size() for example.
*
* Since: 0.10.36
*/

View file

@ -1174,8 +1174,7 @@ gst_adder_collected (GstCollectPads * pads, gpointer user_data)
if (event) {
if (!gst_pad_push_event (adder->srcpad, event)) {
GST_WARNING_OBJECT (adder->srcpad, "Sending event %p (%s) failed.",
event, GST_EVENT_TYPE_NAME (event));
GST_WARNING_OBJECT (adder->srcpad, "Sending event failed");
}
} else {
GST_WARNING_OBJECT (adder->srcpad, "Creating new segment event for "

View file

@ -426,7 +426,6 @@ static void gst_decode_group_free (GstDecodeGroup * group);
static GstDecodeGroup *gst_decode_group_new (GstDecodeBin * dbin,
GstDecodeChain * chain);
static gboolean gst_decode_chain_is_complete (GstDecodeChain * chain);
static gboolean gst_decode_chain_handle_eos (GstDecodeChain * chain);
static gboolean gst_decode_chain_expose (GstDecodeChain * chain,
GList ** endpads, gboolean * missing_plugin);
static gboolean gst_decode_chain_is_drained (GstDecodeChain * chain);
@ -1340,6 +1339,7 @@ analyze_new_pad (GstDecodeBin * dbin, GstElement * src, GstPad * pad,
GstElementFactory *factory;
const gchar *classification;
gboolean is_parser_converter = FALSE;
gboolean res;
GST_DEBUG_OBJECT (dbin, "Pad %s:%s caps:%" GST_PTR_FORMAT,
GST_DEBUG_PAD_NAME (pad), caps);
@ -1384,10 +1384,17 @@ analyze_new_pad (GstDecodeBin * dbin, GstElement * src, GstPad * pad,
dpad = gst_decode_pad_new (dbin, pad, chain);
/* 1. Emit 'autoplug-continue' the result will tell us if this pads needs
* further autoplugging. */
g_signal_emit (G_OBJECT (dbin),
gst_decode_bin_signals[SIGNAL_AUTOPLUG_CONTINUE], 0, dpad, caps,
&apcontinue);
* further autoplugging. Only do this for fixed caps, for unfixed caps
* we will later come here again from the notify::caps handler. The
* problem with unfixed caps is that we can reliably tell if the output
* is e.g. accepted by a sink because only parts of the possible final
* caps might be accepted by the sink. */
if (gst_caps_is_fixed (caps))
g_signal_emit (G_OBJECT (dbin),
gst_decode_bin_signals[SIGNAL_AUTOPLUG_CONTINUE], 0, dpad, caps,
&apcontinue);
else
apcontinue = TRUE;
/* 1.a if autoplug-continue is FALSE or caps is a raw format, goto pad_is_final */
if ((!apcontinue) || are_final_caps (dbin, caps))
@ -1448,7 +1455,7 @@ analyze_new_pad (GstDecodeBin * dbin, GstElement * src, GstPad * pad,
/* At this point we have a potential decoder, but we might not need it
* if it doesn't match the output caps */
if (!dbin->expose_allstreams) {
if (!dbin->expose_allstreams && gst_caps_is_fixed (caps)) {
guint i;
const GList *tmps;
gboolean dontuse = FALSE;
@ -1566,7 +1573,7 @@ analyze_new_pad (GstDecodeBin * dbin, GstElement * src, GstPad * pad,
/* 1.h else continue autoplugging something from the list. */
GST_LOG_OBJECT (pad, "Let's continue discovery on this pad");
connect_pad (dbin, src, dpad, pad, caps, factories, chain);
res = connect_pad (dbin, src, dpad, pad, caps, factories, chain);
/* Need to unref the capsfilter srcpad here if
* we inserted a capsfilter */
@ -1576,6 +1583,9 @@ analyze_new_pad (GstDecodeBin * dbin, GstElement * src, GstPad * pad,
gst_object_unref (dpad);
g_value_array_free (factories);
if (!res)
goto unknown_type;
return;
expose_pad:
@ -3033,95 +3043,152 @@ out:
return complete;
}
static gboolean
drain_and_switch_chains (GstDecodeChain * chain, GstDecodePad * drainpad,
gboolean * last_group, gboolean * drained, gboolean * switched);
/* drain_and_switch_chains/groups:
*
* CALL WITH CHAIN LOCK (or group parent) TAKEN !
*
* Goes down the chains/groups until it finds the chain
* to which the drainpad belongs.
*
* It marks that pad/chain as drained and then will figure
* out which group to switch to or not.
*
* last_chain will be set to TRUE if the group to which the
* pad belongs is the last one.
*
* drained will be set to TRUE if the chain/group is drained.
*
* Returns: TRUE if the chain contained the target pad */
static gboolean
drain_and_switch_group (GstDecodeGroup * group, GstDecodePad * drainpad,
gboolean * last_group, gboolean * drained, gboolean * switched)
{
gboolean handled = FALSE;
gboolean alldrained = TRUE;
GList *tmp;
GST_DEBUG ("Checking group %p (target pad %s:%s)",
group, GST_DEBUG_PAD_NAME (drainpad));
/* Definitely can't be in drained groups */
if (G_UNLIKELY (group->drained)) {
goto beach;
}
/* Figure out if all our chains are drained with the
* new information */
for (tmp = group->children; tmp; tmp = tmp->next) {
GstDecodeChain *chain = (GstDecodeChain *) tmp->data;
gboolean subdrained = FALSE;
handled |=
drain_and_switch_chains (chain, drainpad, last_group, &subdrained,
switched);
if (!subdrained)
alldrained = FALSE;
}
beach:
GST_DEBUG ("group %p (last_group:%d, drained:%d, switched:%d, handled:%d)",
group, *last_group, alldrained, *switched, handled);
*drained = alldrained;
return handled;
}
static gboolean
drain_and_switch_chains (GstDecodeChain * chain, GstDecodePad * drainpad,
gboolean * last_group, gboolean * drained, gboolean * switched)
{
gboolean handled = FALSE;
GstDecodeBin *dbin = chain->dbin;
GST_DEBUG ("Checking chain %p (target pad %s:%s)",
chain, GST_DEBUG_PAD_NAME (drainpad));
CHAIN_MUTEX_LOCK (chain);
if (chain->endpad) {
/* Check if we're reached the target endchain */
if (chain == drainpad->chain) {
GST_DEBUG ("Found the target chain");
drainpad->drained = TRUE;
handled = TRUE;
}
*drained = chain->endpad->drained;
goto beach;
}
/* We known there are groups to switch to */
if (chain->next_groups)
*last_group = FALSE;
/* Check the active group */
if (chain->active_group) {
gboolean subdrained = FALSE;
handled = drain_and_switch_group (chain->active_group, drainpad,
last_group, &subdrained, switched);
/* The group is drained, see if we can switch to another */
if (handled && subdrained && !*switched) {
if (chain->next_groups) {
/* Switch to next group */
GST_DEBUG_OBJECT (dbin, "Hiding current group %p", chain->active_group);
gst_decode_group_hide (chain->active_group);
chain->old_groups =
g_list_prepend (chain->old_groups, chain->active_group);
GST_DEBUG_OBJECT (dbin, "Switching to next group %p",
chain->next_groups->data);
chain->active_group = chain->next_groups->data;
chain->next_groups =
g_list_delete_link (chain->next_groups, chain->next_groups);
*switched = TRUE;
*drained = FALSE;
} else {
GST_DEBUG ("Group %p was the last in chain %p", chain->active_group,
chain);
*drained = TRUE;
/* We're drained ! */
}
}
}
beach:
CHAIN_MUTEX_UNLOCK (chain);
GST_DEBUG ("Chain %p (handled:%d, last_group:%d, drained:%d, switched:%d)",
chain, handled, *last_group, *drained, *switched);
return handled;
}
/* check if the group is drained, meaning all pads have seen an EOS
* event. */
static gboolean
gst_decode_pad_handle_eos (GstDecodePad * pad)
{
gboolean last_group = TRUE;
gboolean switched = FALSE;
gboolean drained = FALSE;
GstDecodeChain *chain = pad->chain;
GstDecodeBin *dbin = chain->dbin;
GST_LOG_OBJECT (pad->dbin, "chain : %p, pad %p", chain, pad);
pad->drained = TRUE;
return gst_decode_chain_handle_eos (chain);
}
GST_LOG_OBJECT (dbin, "pad %p", pad);
drain_and_switch_chains (dbin->decode_chain, pad, &last_group, &drained,
&switched);
/* gst_decode_chain_handle_eos:
*
* Checks if there are next groups in any parent chain
* to which we can switch or if everything is drained.
*
* If there are groups to switch to, hide the current active
* one and expose the new one.
*
* If a group isn't completely drained (i.e. we received EOS
* only on one of the streams) this function will return FALSE
* to indicate the EOS on the given chain should be dropped
* to avoid it from going downstream.
*
* MT-safe, don't call with chain lock!
*/
static gboolean
gst_decode_chain_handle_eos (GstDecodeChain * eos_chain)
{
GstDecodeBin *dbin = eos_chain->dbin;
GstDecodeGroup *group;
GstDecodeChain *chain = eos_chain;
gboolean drained;
gboolean forward_eos = TRUE;
g_return_val_if_fail (eos_chain->endpad, TRUE);
CHAIN_MUTEX_LOCK (chain);
while ((group = chain->parent)) {
CHAIN_MUTEX_UNLOCK (chain);
chain = group->parent;
CHAIN_MUTEX_LOCK (chain);
if (gst_decode_group_is_drained (group)) {
continue;
}
break;
}
drained = chain->active_group ?
gst_decode_group_is_drained (chain->active_group) : TRUE;
/* Now either group == NULL and chain == dbin->decode_chain
* or chain is the lowest chain that has a non-drained group */
if (chain->active_group && drained && chain->next_groups) {
/* There's an active group which is drained and we have another
* one to switch to. */
GST_DEBUG_OBJECT (dbin, "Hiding current group %p", chain->active_group);
gst_decode_group_hide (chain->active_group);
chain->old_groups = g_list_prepend (chain->old_groups, chain->active_group);
GST_DEBUG_OBJECT (dbin, "Switching to next group %p",
chain->next_groups->data);
chain->active_group = chain->next_groups->data;
chain->next_groups =
g_list_delete_link (chain->next_groups, chain->next_groups);
CHAIN_MUTEX_UNLOCK (chain);
if (switched) {
/* If we resulted in a group switch, expose what's needed */
EXPOSE_LOCK (dbin);
if (gst_decode_chain_is_complete (dbin->decode_chain))
gst_decode_bin_expose (dbin);
EXPOSE_UNLOCK (dbin);
} else if (!chain->active_group || drained) {
/* The group is drained and there isn't a future one */
g_assert (chain == dbin->decode_chain);
CHAIN_MUTEX_UNLOCK (chain);
GST_LOG_OBJECT (dbin, "all groups drained, fire signal");
g_signal_emit (G_OBJECT (dbin), gst_decode_bin_signals[SIGNAL_DRAINED], 0,
NULL);
} else {
CHAIN_MUTEX_UNLOCK (chain);
GST_DEBUG_OBJECT (dbin,
"Current active group in chain %p is not drained yet", chain);
/* Instruct caller to drop EOS event if we have future groups */
if (chain->next_groups)
forward_eos = FALSE;
}
return forward_eos;
return last_group;
}
/* gst_decode_group_is_drained:

View file

@ -1272,6 +1272,34 @@ gst_play_bin_finalize (GObject * object)
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
gst_playbin_uri_is_valid (GstPlayBin * playbin, const gchar * uri)
{
const gchar *c;
GST_LOG_OBJECT (playbin, "checking uri '%s'", uri);
/* this just checks the protocol */
if (!gst_uri_is_valid (uri))
return FALSE;
for (c = uri; *c != '\0'; ++c) {
if (!g_ascii_isprint (*c))
goto invalid;
if (*c == ' ')
goto invalid;
}
return TRUE;
invalid:
{
GST_WARNING_OBJECT (playbin, "uri '%s' not valid, character #%u",
uri, (guint) ((guintptr) c - (guintptr) uri));
return FALSE;
}
}
static void
gst_play_bin_set_uri (GstPlayBin * playbin, const gchar * uri)
{
@ -1282,6 +1310,17 @@ gst_play_bin_set_uri (GstPlayBin * playbin, const gchar * uri)
return;
}
if (!gst_playbin_uri_is_valid (playbin, uri)) {
if (g_str_has_prefix (uri, "file:")) {
GST_ERROR_OBJECT (playbin, "malformed file URI '%s' - make sure to "
"escape spaces and non-ASCII characters properly and specify an "
"absolute path. Use gst_filename_to_uri() to convert filenames "
"to URIs", uri);
} else {
GST_ERROR_OBJECT (playbin, "malformed URI '%s'", uri);
}
}
GST_PLAY_BIN_LOCK (playbin);
group = playbin->next_group;

View file

@ -2113,8 +2113,7 @@ gst_play_sink_reconfigure (GstPlaySink * playsink)
GST_OBJECT_UNLOCK (playsink);
/* figure out which components we need */
if (flags & GST_PLAY_FLAG_TEXT && playsink->video_pad_raw
&& playsink->text_pad) {
if (flags & GST_PLAY_FLAG_TEXT && playsink->text_pad) {
/* we have subtitles and we are requested to show it */
need_text = TRUE;
}

View file

@ -227,6 +227,31 @@ static const gchar *_sub_pad_names[] = { "subpicture", "subpicture_sink",
"subtitle_sink", "subtitle"
};
static inline gboolean
_is_raw_video (GstStructure * s)
{
const gchar *name;
name = gst_structure_get_name (s);
if (g_str_has_prefix (name, "video/x-raw"))
return TRUE;
return FALSE;
}
static gboolean
_is_raw_video_pad (GstPad * pad)
{
GstCaps *caps = gst_pad_get_current_caps (pad);
gboolean raw;
raw = _is_raw_video (gst_caps_get_structure (caps, 0));
gst_caps_unref (caps);
return raw;
}
static GstCaps *
_get_sub_caps (GstElementFactory * factory)
{
@ -1075,6 +1100,7 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
} else {
const gchar *name =
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE_CAST (factory));
gboolean is_raw_video = _is_raw_video_pad (self->video_sinkpad);
if (strcmp (name, "textoverlay") == 0) {
/* Set some textoverlay specific properties */
@ -1094,66 +1120,123 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
g_object_set (self->renderer, "font-desc", self->font_desc, NULL);
}
/* First link everything internally */
if (G_UNLIKELY (!_create_element (self, &self->post_colorspace,
COLORSPACE, NULL, "post-colorspace", FALSE))) {
continue;
}
if (is_raw_video) {
/* First check that renderer also supports raw video */
sink = _get_video_pad (element);
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get video sink from renderer");
continue;
}
src = gst_element_get_static_pad (element, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get src pad from renderer");
continue;
}
if (G_UNLIKELY (!_is_raw_video_pad (sink))) {
GST_DEBUG_OBJECT (self, "Renderer doesn't support raw video");
gst_object_unref (sink);
continue;
}
gst_object_unref (sink);
sink = gst_element_get_static_pad (self->post_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
gst_object_unref (src);
continue;
}
/* First link everything internally */
if (G_UNLIKELY (!_create_element (self, &self->post_colorspace,
COLORSPACE, NULL, "post-colorspace", FALSE))) {
continue;
}
src = gst_element_get_static_pad (element, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get src pad from renderer");
continue;
}
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link renderer with " COLORSPACE);
sink = gst_element_get_static_pad (self->post_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
gst_object_unref (src);
continue;
}
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link renderer with " COLORSPACE);
gst_object_unref (src);
gst_object_unref (sink);
continue;
}
gst_object_unref (src);
gst_object_unref (sink);
continue;
}
gst_object_unref (src);
gst_object_unref (sink);
if (G_UNLIKELY (!_create_element (self, &self->pre_colorspace,
COLORSPACE, NULL, "pre-colorspace", FALSE))) {
continue;
}
if (G_UNLIKELY (!_create_element (self, &self->pre_colorspace,
COLORSPACE, NULL, "pre-colorspace", FALSE))) {
continue;
}
sink = _get_video_pad (element);
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get video sink from renderer");
continue;
}
sink = _get_video_pad (element);
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get video sink from renderer");
continue;
}
src = gst_element_get_static_pad (self->pre_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get srcpad from " COLORSPACE);
gst_object_unref (sink);
continue;
}
src = gst_element_get_static_pad (self->pre_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get srcpad from " COLORSPACE);
gst_object_unref (sink);
continue;
}
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link " COLORSPACE " to renderer");
if (G_UNLIKELY (gst_pad_link (src, sink) != GST_PAD_LINK_OK)) {
GST_WARNING_OBJECT (self, "Can't link " COLORSPACE " to renderer");
gst_object_unref (src);
gst_object_unref (sink);
continue;
}
gst_object_unref (src);
gst_object_unref (sink);
continue;
}
gst_object_unref (src);
gst_object_unref (sink);
/* Set src ghostpad target */
src = gst_element_get_static_pad (self->post_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get src pad from " COLORSPACE);
continue;
/* Set src ghostpad target */
src = gst_element_get_static_pad (self->post_colorspace, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get src pad from " COLORSPACE);
continue;
}
} else { /* No raw video pad */
GstCaps *allowed_caps, *video_caps = NULL;
GstPad *video_peer;
gboolean can_intersect = FALSE;
video_peer = gst_pad_get_peer (self->video_sinkpad);
if (video_peer) {
video_caps = gst_pad_get_current_caps (video_peer);
if (!video_caps) {
video_caps = gst_pad_get_caps (video_peer, NULL);
}
gst_object_unref (video_peer);
}
sink = _get_video_pad (element);
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get video sink from renderer");
continue;
}
allowed_caps = gst_pad_get_caps (sink, NULL);
gst_object_unref (sink);
if (allowed_caps && video_caps)
can_intersect = gst_caps_can_intersect (allowed_caps, video_caps);
if (allowed_caps)
gst_caps_unref (allowed_caps);
if (video_caps)
gst_caps_unref (video_caps);
if (G_UNLIKELY (!can_intersect)) {
GST_WARNING_OBJECT (self, "Renderer with custom caps is not "
"compatible with video stream");
continue;
}
src = gst_element_get_static_pad (element, "src");
if (G_UNLIKELY (!src)) {
GST_WARNING_OBJECT (self, "Can't get src pad from renderer");
continue;
}
}
if (G_UNLIKELY (!gst_ghost_pad_set_target (GST_GHOST_PAD_CAST
@ -1199,10 +1282,19 @@ _pad_blocked_cb (GstPad * pad, GstProbeType type, gpointer type_data,
}
/* Set the sink ghostpad targets */
sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
continue;
if (self->pre_colorspace) {
sink = gst_element_get_static_pad (self->pre_colorspace, "sink");
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from " COLORSPACE);
continue;
}
} else {
sink = _get_video_pad (element);
if (G_UNLIKELY (!sink)) {
GST_WARNING_OBJECT (self, "Can't get sink pad from %" GST_PTR_FORMAT,
element);
continue;
}
}
if (G_UNLIKELY (!gst_ghost_pad_set_target (GST_GHOST_PAD_CAST
@ -1606,6 +1698,7 @@ static gboolean
gst_subtitle_overlay_video_sink_setcaps (GstSubtitleOverlay * self,
GstCaps * caps)
{
GstPad *target;
gboolean ret = TRUE;
GstVideoInfo info;
@ -1614,9 +1707,21 @@ gst_subtitle_overlay_video_sink_setcaps (GstSubtitleOverlay * self,
if (!gst_video_info_from_caps (&info, caps)) {
GST_ERROR_OBJECT (self, "Failed to parse caps");
ret = FALSE;
GST_SUBTITLE_OVERLAY_UNLOCK (self);
goto out;
}
target = gst_ghost_pad_get_target (GST_GHOST_PAD_CAST (self->video_sinkpad));
GST_SUBTITLE_OVERLAY_LOCK (self);
if (!target || !gst_pad_accept_caps (target, caps)) {
GST_DEBUG_OBJECT (target, "Target did not accept caps -- reconfiguring");
block_subtitle (self);
block_video (self);
}
GST_SUBTITLE_OVERLAY_LOCK (self);
if (self->fps_n != info.fps_n || self->fps_d != info.fps_d) {
GST_DEBUG_OBJECT (self, "New video fps: %d/%d", info.fps_n, info.fps_d);
@ -1627,6 +1732,8 @@ gst_subtitle_overlay_video_sink_setcaps (GstSubtitleOverlay * self,
GST_SUBTITLE_OVERLAY_UNLOCK (self);
out:
if (target)
gst_object_unref (target);
return ret;
}

View file

@ -1211,9 +1211,9 @@ gst_sub_parse_data_format_autodetect_regex_once (GstSubParseRegex regtype)
}
break;
case GST_SUB_PARSE_REGEX_SUBRIP:
result = (gpointer) g_regex_new ("^([ 0-9]){0,3}[0-9]\\s*(\x0d)?\x0a"
"[ 0-9][0-9]:[ 0-9][0-9]:[ 0-9][0-9][,.][ 0-9]{0,2}[0-9]"
" +--> +([ 0-9])?[0-9]:[ 0-9][0-9]:[ 0-9][0-9][,.][ 0-9]{0,2}[0-9]",
result = (gpointer) g_regex_new ("^ {0,3}[ 0-9]{1,4}\\s*(\x0d)?\x0a"
" ?[0-9]{1,2}: ?[0-9]{1,2}: ?[0-9]{1,2}[,.] {0,2}[0-9]{1,3}"
" +--> +[0-9]{1,2}: ?[0-9]{1,2}: ?[0-9]{1,2}[,.] {0,2}[0-9]{1,2}",
G_REGEX_RAW | G_REGEX_OPTIMIZE, 0, &gerr);
if (result == NULL) {
g_warning ("Compilation of subrip regex failed: %s", gerr->message);

View file

@ -2887,6 +2887,13 @@ mod_type_find (GstTypeFind * tf, gpointer unused)
return;
}
}
/* AMF */
if ((data = gst_type_find_peek (tf, 0, 19)) != NULL) {
if (memcmp (data, "ASYLUM Music Format", 19) == 0) {
gst_type_find_suggest (tf, GST_TYPE_FIND_MAXIMUM, MOD_CAPS);
return;
}
}
}
/*** application/x-shockwave-flash ***/

View file

@ -88,6 +88,7 @@ enum
#define DEFAULT_SKIP_TO_FIRST FALSE
#define DEFAULT_DROP_ONLY FALSE
#define DEFAULT_AVERAGE_PERIOD 0
#define DEFAULT_MAX_RATE G_MAXINT
enum
{
@ -100,7 +101,8 @@ enum
ARG_NEW_PREF,
ARG_SKIP_TO_FIRST,
ARG_DROP_ONLY,
ARG_AVERAGE_PERIOD
ARG_AVERAGE_PERIOD,
ARG_MAX_RATE
/* FILL ME */
};
@ -232,6 +234,20 @@ gst_video_rate_class_init (GstVideoRateClass * klass)
0, G_MAXINT64, DEFAULT_AVERAGE_PERIOD,
G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
/**
* GstVideoRate:max-rate:
*
* maximum framerate to pass through
*
* Since: 0.10.36
*/
g_object_class_install_property (object_class, ARG_MAX_RATE,
g_param_spec_int ("max-rate", "maximum framerate",
"Maximum framerate allowed to pass through "
"(in frames per second, implies drop-only)",
1, G_MAXINT, DEFAULT_MAX_RATE,
G_PARAM_READWRITE | G_PARAM_CONSTRUCT | G_PARAM_STATIC_STRINGS));
gst_element_class_set_details_simple (element_class,
"Video rate adjuster", "Filter/Effect/Video",
"Drops/duplicates/adjusts timestamps on video frames to make a perfect stream",
@ -241,28 +257,186 @@ gst_video_rate_class_init (GstVideoRateClass * klass)
gst_static_pad_template_get (&gst_video_rate_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_video_rate_src_template));
}
static void
gst_value_fraction_get_extremes (const GValue * v,
gint * min_num, gint * min_denom, gint * max_num, gint * max_denom)
{
if (GST_VALUE_HOLDS_FRACTION (v)) {
*min_num = *max_num = gst_value_get_fraction_numerator (v);
*min_denom = *max_denom = gst_value_get_fraction_denominator (v);
} else if (GST_VALUE_HOLDS_FRACTION_RANGE (v)) {
const GValue *min, *max;
min = gst_value_get_fraction_range_min (v);
*min_num = gst_value_get_fraction_numerator (min);
*min_denom = gst_value_get_fraction_denominator (min);
max = gst_value_get_fraction_range_max (v);
*max_num = gst_value_get_fraction_numerator (max);
*max_denom = gst_value_get_fraction_denominator (max);
} else if (GST_VALUE_HOLDS_LIST (v)) {
gint min_n = G_MAXINT, min_d = 1, max_n = 0, max_d = 1;
int i, n;
*min_num = G_MAXINT;
*min_denom = 1;
*max_num = 0;
*max_denom = 1;
n = gst_value_list_get_size (v);
g_assert (n > 0);
for (i = 0; i < n; i++) {
const GValue *t = gst_value_list_get_value (v, i);
gst_value_fraction_get_extremes (t, &min_n, &min_d, &max_n, &max_d);
if (gst_util_fraction_compare (min_n, min_d, *min_num, *min_denom) < 0) {
*min_num = min_n;
*min_denom = min_d;
}
if (gst_util_fraction_compare (max_n, max_d, *max_num, *max_denom) > 0) {
*max_num = max_n;
*max_denom = max_d;
}
}
} else {
g_warning ("Unknown type for framerate");
*min_num = 0;
*min_denom = 1;
*max_num = G_MAXINT;
*max_denom = 1;
}
}
/* Clamp the framerate in a caps structure to be a smaller range then
* [1...max_rate], otherwise return false */
static gboolean
gst_video_max_rate_clamp_structure (GstStructure * s, gint maxrate,
gint * min_num, gint * min_denom, gint * max_num, gint * max_denom)
{
gboolean ret = FALSE;
if (!gst_structure_has_field (s, "framerate")) {
/* No framerate field implies any framerate, clamping would result in
* [1..max_rate] so not a real subset */
goto out;
} else {
const GValue *v;
GValue intersection = { 0, };
GValue clamp = { 0, };
gint tmp_num, tmp_denom;
g_value_init (&clamp, GST_TYPE_FRACTION_RANGE);
gst_value_set_fraction_range_full (&clamp, 0, 1, maxrate, 1);
v = gst_structure_get_value (s, "framerate");
ret = gst_value_intersect (&intersection, v, &clamp);
g_value_unset (&clamp);
if (!ret)
goto out;
gst_value_fraction_get_extremes (&intersection,
min_num, min_denom, max_num, max_denom);
gst_value_fraction_get_extremes (v,
&tmp_num, &tmp_denom, max_num, max_denom);
if (gst_util_fraction_compare (*max_num, *max_denom, maxrate, 1) > 0) {
*max_num = maxrate;
*max_denom = 1;
}
gst_structure_take_value (s, "framerate", &intersection);
}
out:
return ret;
}
static GstCaps *
gst_video_rate_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps, GstCaps * filter)
{
GstVideoRate *videorate = GST_VIDEO_RATE (trans);
GstCaps *ret;
GstStructure *s;
GstStructure *s, *s2;
GstStructure *s3 = NULL;
int maxrate = g_atomic_int_get (&videorate->max_rate);
/* Should always be called with simple caps */
g_return_val_if_fail (GST_CAPS_IS_SIMPLE (caps), NULL);
ret = gst_caps_copy (caps);
s = gst_structure_copy (gst_caps_get_structure (caps, 0));
s = gst_caps_get_structure (ret, 0);
s2 = gst_structure_copy (s);
/* set the framerate as a range */
gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
G_MAXINT, 1, NULL);
if (videorate->drop_only) {
gint min_num = 0, min_denom = 1;
gint max_num = G_MAXINT, max_denom = 1;
gst_caps_append_structure (ret, s);
/* Clamp the caps to our maximum rate as the first caps if possible */
if (!gst_video_max_rate_clamp_structure (s, maxrate,
&min_num, &min_denom, &max_num, &max_denom)) {
min_num = 0;
min_denom = 1;
max_num = maxrate;
max_denom = 1;
/* clamp wouldn't be a real subset of 1..maxrate, in this case the sink
* caps should become [1..maxrate], [1..maxint] and the src caps just
* [1..maxrate]. In case there was a caps incompatibility things will
* explode later as appropriate :)
*
* In case [X..maxrate] == [X..maxint], skip as we'll set it later
*/
if (direction == GST_PAD_SRC && maxrate != G_MAXINT)
gst_structure_set (s, "framerate", GST_TYPE_FRACTION_RANGE,
min_num, min_denom, maxrate, 1, NULL);
else
gst_caps_remove_structure (ret, 0);
}
if (direction == GST_PAD_SRC) {
/* We can accept anything as long as it's at least the minimal framerate
* the the sink needs */
gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
min_num, min_denom, G_MAXINT, 1, NULL);
/* Also allow unknown framerate, if it isn't already */
if (min_num != 0 || min_denom != 1) {
s3 = gst_structure_copy (s);
gst_structure_set (s3, "framerate", GST_TYPE_FRACTION, 0, 1, NULL);
}
} else if (max_num != 0 || max_denom != 1) {
/* We can provide everything upto the maximum framerate at the src */
gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE,
0, 1, max_num, max_denom, NULL);
}
} else if (direction == GST_PAD_SINK) {
gint min_num = 0, min_denom = 1;
gint max_num = G_MAXINT, max_denom = 1;
if (!gst_video_max_rate_clamp_structure (s, maxrate,
&min_num, &min_denom, &max_num, &max_denom))
gst_caps_remove_structure (ret, 0);
gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
maxrate, 1, NULL);
} else {
/* set the framerate as a range */
gst_structure_set (s2, "framerate", GST_TYPE_FRACTION_RANGE, 0, 1,
G_MAXINT, 1, NULL);
}
gst_caps_merge_structure (ret, s2);
if (s3 != NULL)
gst_caps_merge_structure (ret, s3);
return ret;
}
@ -375,6 +549,7 @@ gst_video_rate_init (GstVideoRate * videorate)
videorate->drop_only = DEFAULT_DROP_ONLY;
videorate->average_period = DEFAULT_AVERAGE_PERIOD;
videorate->average_period_set = DEFAULT_AVERAGE_PERIOD;
videorate->max_rate = DEFAULT_MAX_RATE;
videorate->from_rate_numerator = 0;
videorate->from_rate_denominator = 0;
@ -605,7 +780,6 @@ format_error:
{
GST_WARNING_OBJECT (videorate,
"Got segment but doesn't have GST_FORMAT_TIME value");
gst_event_unref (event);
return FALSE;
}
}
@ -960,15 +1134,25 @@ gst_video_rate_set_property (GObject * object,
break;
case ARG_DROP_ONLY:
videorate->drop_only = g_value_get_boolean (value);
goto reconfigure;
break;
case ARG_AVERAGE_PERIOD:
videorate->average_period_set = g_value_get_uint64 (value);
break;
case ARG_MAX_RATE:
g_atomic_int_set (&videorate->max_rate, g_value_get_int (value));
goto reconfigure;
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
GST_OBJECT_UNLOCK (videorate);
return;
reconfigure:
GST_OBJECT_UNLOCK (videorate);
gst_base_transform_reconfigure (GST_BASE_TRANSFORM (videorate));
}
static void
@ -1006,6 +1190,9 @@ gst_video_rate_get_property (GObject * object,
case ARG_AVERAGE_PERIOD:
g_value_set_uint64 (value, videorate->average_period_set);
break;
case ARG_MAX_RATE:
g_value_set_int (value, g_atomic_int_get (&videorate->max_rate));
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;

View file

@ -76,6 +76,8 @@ struct _GstVideoRate
gboolean skip_to_first;
gboolean drop_only;
guint64 average_period_set;
volatile int max_rate;
};
struct _GstVideoRateClass

View file

@ -27,7 +27,6 @@
#include <gst/check/gstcheck.h>
#include <gst/base/gstbaseparse.h>
#include <gst/base/gstbasetransform.h>
#include <unistd.h>
static const gchar dummytext[] =
@ -370,15 +369,15 @@ static GType gst_fake_h264_decoder_get_type (void);
#undef parent_class
#define parent_class fake_h264_parser_parent_class
typedef struct _GstFakeH264Parser GstFakeH264Parser;
typedef GstBaseTransformClass GstFakeH264ParserClass;
typedef GstElementClass GstFakeH264ParserClass;
struct _GstFakeH264Parser
{
GstBaseTransform parent;
GstElement parent;
};
GST_BOILERPLATE (GstFakeH264Parser, gst_fake_h264_parser, GstBaseTransform,
GST_TYPE_BASE_TRANSFORM);
GST_BOILERPLATE (GstFakeH264Parser, gst_fake_h264_parser, GstElement,
GST_TYPE_ELEMENT);
static void
gst_fake_h264_parser_base_init (gpointer klass)
@ -400,89 +399,87 @@ gst_fake_h264_parser_base_init (gpointer klass)
"FakeH264Parser", "Codec/Parser/Converter/Video", "yep", "me");
}
static GstFlowReturn
gst_fake_h264_parser_transform (GstBaseTransform * trans, GstBuffer * inbuf,
GstBuffer * outbuf)
{
return GST_FLOW_OK;
}
static GstCaps *
gst_fake_h264_parser_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
if (direction == GST_PAD_SRC)
return gst_caps_from_string ("video/x-h264");
else
return gst_caps_from_string ("video/x-h264, "
"stream-format=(string) { avc, byte-stream }");
}
static gboolean
gst_fake_h264_parser_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
guint * size)
{
*size = 1;
return TRUE;
}
static gboolean
gst_fake_h264_parser_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps)
{
GstStructure *s;
const gchar *stream_format;
s = gst_caps_get_structure (incaps, 0);
fail_unless (gst_structure_has_name (s, "video/x-h264"));
s = gst_caps_get_structure (outcaps, 0);
fail_unless (gst_structure_has_name (s, "video/x-h264"));
stream_format = gst_structure_get_string (s, "stream-format");
fail_unless_equals_string ("byte-stream", stream_format);
return TRUE;
}
static GstFlowReturn
gst_fake_h264_parser_prepare_output_buffer (GstBaseTransform * trans,
GstBuffer * inbuf, gint size, GstCaps * caps, GstBuffer ** outbuf)
{
*outbuf = gst_buffer_ref (inbuf);
return GST_FLOW_OK;
}
static void
gst_fake_h264_parser_class_init (GstFakeH264ParserClass * klass)
{
GstBaseTransformClass *basetrans_class = (GstBaseTransformClass *) klass;
}
basetrans_class->transform = gst_fake_h264_parser_transform;
basetrans_class->transform_caps = gst_fake_h264_parser_transform_caps;
basetrans_class->get_unit_size = gst_fake_h264_parser_get_unit_size;
basetrans_class->set_caps = gst_fake_h264_parser_set_caps;
basetrans_class->prepare_output_buffer =
gst_fake_h264_parser_prepare_output_buffer;
static gboolean
gst_fake_h264_parser_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstElement *self = GST_ELEMENT (gst_pad_get_parent (pad));
GstPad *otherpad = gst_element_get_static_pad (self, "src");
GstCaps *accepted_caps;
GstStructure *s;
const gchar *stream_format;
accepted_caps = gst_pad_get_allowed_caps (otherpad);
accepted_caps = gst_caps_make_writable (accepted_caps);
gst_caps_truncate (accepted_caps);
s = gst_caps_get_structure (accepted_caps, 0);
stream_format = gst_structure_get_string (s, "stream-format");
if (!stream_format)
gst_structure_set (s, "stream-format", G_TYPE_STRING, "avc", NULL);
gst_pad_set_caps (otherpad, accepted_caps);
gst_caps_unref (accepted_caps);
gst_object_unref (otherpad);
gst_object_unref (self);
return TRUE;
}
static GstFlowReturn
gst_fake_h264_parser_sink_chain (GstPad * pad, GstBuffer * buf)
{
GstElement *self = GST_ELEMENT (gst_pad_get_parent (pad));
GstPad *otherpad = gst_element_get_static_pad (self, "src");
GstFlowReturn ret = GST_FLOW_OK;
buf = gst_buffer_make_metadata_writable (buf);
gst_buffer_set_caps (buf, GST_PAD_CAPS (otherpad));
ret = gst_pad_push (otherpad, buf);
gst_object_unref (otherpad);
gst_object_unref (self);
return ret;
}
static void
gst_fake_h264_parser_init (GstFakeH264Parser * self,
GstFakeH264ParserClass * klass)
{
GstPad *pad;
pad =
gst_pad_new_from_template (gst_element_class_get_pad_template
(GST_ELEMENT_GET_CLASS (self), "sink"), "sink");
gst_pad_set_setcaps_function (pad, gst_fake_h264_parser_sink_setcaps);
gst_pad_set_chain_function (pad, gst_fake_h264_parser_sink_chain);
gst_element_add_pad (GST_ELEMENT (self), pad);
pad =
gst_pad_new_from_template (gst_element_class_get_pad_template
(GST_ELEMENT_GET_CLASS (self), "src"), "src");
gst_element_add_pad (GST_ELEMENT (self), pad);
}
#undef parent_class
#define parent_class fake_h264_decoder_parent_class
typedef struct _GstFakeH264Decoder GstFakeH264Decoder;
typedef GstBaseTransformClass GstFakeH264DecoderClass;
typedef GstElementClass GstFakeH264DecoderClass;
struct _GstFakeH264Decoder
{
GstBaseTransform parent;
GstElement parent;
};
GST_BOILERPLATE (GstFakeH264Decoder, gst_fake_h264_decoder, GstBaseTransform,
GST_TYPE_BASE_TRANSFORM);
GST_BOILERPLATE (GstFakeH264Decoder, gst_fake_h264_decoder, GstElement,
GST_TYPE_ELEMENT);
static void
gst_fake_h264_decoder_base_init (gpointer klass)
@ -503,75 +500,62 @@ gst_fake_h264_decoder_base_init (gpointer klass)
"FakeH264Decoder", "Codec/Decoder/Video", "yep", "me");
}
static GstFlowReturn
gst_fake_h264_decoder_transform (GstBaseTransform * trans, GstBuffer * inbuf,
GstBuffer * outbuf)
{
return GST_FLOW_OK;
}
static GstCaps *
gst_fake_h264_decoder_transform_caps (GstBaseTransform * trans,
GstPadDirection direction, GstCaps * caps)
{
if (direction == GST_PAD_SRC)
return gst_caps_from_string ("video/x-h264, "
"stream-format=(string) byte-stream");
else
return gst_caps_from_string ("video/x-raw-yuv");
}
static gboolean
gst_fake_h264_decoder_get_unit_size (GstBaseTransform * trans, GstCaps * caps,
guint * size)
{
*size = 1;
return TRUE;
}
static gboolean
gst_fake_h264_decoder_set_caps (GstBaseTransform * trans, GstCaps * incaps,
GstCaps * outcaps)
{
GstStructure *s;
const gchar *stream_format;
s = gst_caps_get_structure (incaps, 0);
fail_unless (gst_structure_has_name (s, "video/x-h264"));
stream_format = gst_structure_get_string (s, "stream-format");
fail_unless_equals_string ("byte-stream", stream_format);
s = gst_caps_get_structure (outcaps, 0);
fail_unless (gst_structure_has_name (s, "video/x-raw-yuv"));
return TRUE;
}
static GstFlowReturn
gst_fake_h264_decoder_prepare_output_buffer (GstBaseTransform * trans,
GstBuffer * inbuf, gint size, GstCaps * caps, GstBuffer ** outbuf)
{
*outbuf = gst_buffer_ref (inbuf);
return GST_FLOW_OK;
}
static void
gst_fake_h264_decoder_class_init (GstFakeH264DecoderClass * klass)
{
GstBaseTransformClass *basetrans_class = (GstBaseTransformClass *) klass;
}
basetrans_class->transform = gst_fake_h264_decoder_transform;
basetrans_class->transform_caps = gst_fake_h264_decoder_transform_caps;
basetrans_class->get_unit_size = gst_fake_h264_decoder_get_unit_size;
basetrans_class->set_caps = gst_fake_h264_decoder_set_caps;
basetrans_class->prepare_output_buffer =
gst_fake_h264_decoder_prepare_output_buffer;
static gboolean
gst_fake_h264_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
{
GstElement *self = GST_ELEMENT (gst_pad_get_parent (pad));
GstPad *otherpad = gst_element_get_static_pad (self, "src");
caps = gst_caps_new_simple ("video/x-raw-yuv", NULL);
gst_pad_set_caps (otherpad, caps);
gst_caps_unref (caps);
gst_object_unref (otherpad);
gst_object_unref (self);
return TRUE;
}
static GstFlowReturn
gst_fake_h264_decoder_sink_chain (GstPad * pad, GstBuffer * buf)
{
GstElement *self = GST_ELEMENT (gst_pad_get_parent (pad));
GstPad *otherpad = gst_element_get_static_pad (self, "src");
GstFlowReturn ret = GST_FLOW_OK;
buf = gst_buffer_make_metadata_writable (buf);
gst_buffer_set_caps (buf, GST_PAD_CAPS (otherpad));
ret = gst_pad_push (otherpad, buf);
gst_object_unref (otherpad);
gst_object_unref (self);
return ret;
}
static void
gst_fake_h264_decoder_init (GstFakeH264Decoder * self,
GstFakeH264DecoderClass * klass)
{
GstPad *pad;
pad =
gst_pad_new_from_template (gst_element_class_get_pad_template
(GST_ELEMENT_GET_CLASS (self), "sink"), "sink");
gst_pad_set_setcaps_function (pad, gst_fake_h264_decoder_sink_setcaps);
gst_pad_set_chain_function (pad, gst_fake_h264_decoder_sink_chain);
gst_element_add_pad (GST_ELEMENT (self), pad);
pad =
gst_pad_new_from_template (gst_element_class_get_pad_template
(GST_ELEMENT_GET_CLASS (self), "src"), "src");
gst_element_add_pad (GST_ELEMENT (self), pad);
}
static void

View file

@ -794,6 +794,276 @@ GST_START_TEST (test_selected_caps)
GST_END_TEST;
/* Caps negotiation tests */
typedef struct
{
const gchar *caps;
gboolean drop_only;
int max_rate;
/* Result of the videomaxrate caps after transforming */
const gchar *expected_sink_caps;
const gchar *expected_src_caps;
} TestInfo;
static TestInfo caps_negotiation_tests[] = {
{
.caps = "video/x-raw-yuv",
.drop_only = FALSE,
.expected_sink_caps = "video/x-raw-yuv",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]"},
{
.caps = "video/x-raw-yuv",
.drop_only = FALSE,
.max_rate = 15,
.expected_sink_caps = "video/x-raw-yuv",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, 15]"},
{
.caps = "video/x-raw-yuv",
.drop_only = TRUE,
.expected_sink_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]"},
{
.caps = "video/x-raw-yuv",
.drop_only = TRUE,
.max_rate = 15,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)[0/1, 15];"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, 15]"},
{
.caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.drop_only = FALSE,
.expected_sink_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]"},
{
.caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.drop_only = FALSE,
.max_rate = 15,
.expected_sink_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, 15]"},
{
.caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.drop_only = TRUE,
.expected_sink_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]"},
{
.caps = "video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.drop_only = TRUE,
.max_rate = 15,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)[0/1, 15];"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps = "video/x-raw-yuv, framerate=(fraction)[0/1, 15]"},
{
.caps = "video/x-raw-yuv, framerate=15/1",
.drop_only = FALSE,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX]"},
{
.caps = "video/x-raw-yuv, framerate=15/1",
.drop_only = FALSE,
.max_rate = 20,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX]",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, 20/1]"},
{
.caps = "video/x-raw-yuv, framerate=15/1",
.drop_only = TRUE,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[15/1, MAX];"
"video/x-raw-yuv, framerate=(fraction)0/1",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, 15/1]"},
{
.caps = "video/x-raw-yuv, framerate=15/1",
.drop_only = TRUE,
.max_rate = 20,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[15/1, MAX];"
"video/x-raw-yuv, framerate=(fraction)0/1",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, 15/1];"},
{
.caps = "video/x-raw-yuv, framerate=[15/1, 30/1]",
.drop_only = FALSE,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 30/1];"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX];",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 30/1];"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX];"},
{
.caps = "video/x-raw-yuv, framerate=[15/1, 30/1]",
.drop_only = FALSE,
.max_rate = 20,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 30/1];"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX];",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 20/1];"
"video/x-raw-yuv, framerate=(fraction)[0/1, 20/1];"},
{
.caps = "video/x-raw-yuv, framerate=[15/1, 30/1]",
.drop_only = TRUE,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 30/1];"
"video/x-raw-yuv, framerate=(fraction)[15/1, MAX];"
"video/x-raw-yuv, framerate=(fraction)0/1",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 30/1];"
"video/x-raw-yuv, framerate=(fraction)[0/1, 30/1]"},
{
.caps = "video/x-raw-yuv, framerate=[15/1, 30/1]",
.drop_only = TRUE,
.max_rate = 20,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 20/1];"
"video/x-raw-yuv, framerate=(fraction)[15/1, 30/1];"
"video/x-raw-yuv, framerate=(fraction)[15/1, MAX];"
"video/x-raw-yuv, framerate=(fraction)0/1",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)[15/1, 20/1];"
"video/x-raw-yuv, framerate=(fraction)[0/1, 20/1]"},
{
.caps = "video/x-raw-yuv, framerate={15/1, 30/1}",
.drop_only = FALSE,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction){15/1, 30/1};"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX];",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction){15/1, 30/1};"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX]"},
{
.caps = "video/x-raw-yuv, framerate={15/1, 30/1}",
.drop_only = FALSE,
.max_rate = 20,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction){15/1, 30/1};"
"video/x-raw-yuv, framerate=(fraction)[0/1, MAX];",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, 20/1];"},
{
.caps = "video/x-raw-yuv, framerate={15/1, 30/1}",
.drop_only = TRUE,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction){15/1, 30/1};"
"video/x-raw-yuv, framerate=(fraction)[15/1, MAX];"
"video/x-raw-yuv, framerate=(fraction)0/1",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction){15/1, 30/1};"
"video/x-raw-yuv, framerate=(fraction)[0/1, 30/1];"},
{
.caps = "video/x-raw-yuv, framerate={15/1, 30/1}",
.drop_only = TRUE,
.max_rate = 20,
.expected_sink_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction){15/1, 30/1};"
"video/x-raw-yuv, framerate=(fraction)[15/1, MAX];"
"video/x-raw-yuv, framerate=(fraction)0/1",
.expected_src_caps =
"video/x-raw-yuv, framerate=(fraction)15/1;"
"video/x-raw-yuv, framerate=(fraction)[0/1, 20/1]"},
};
static GstCaps *
_getcaps_function (GstPad * pad)
{
GstCaps *caps = g_object_get_data (G_OBJECT (pad), "caps");
fail_unless (caps != NULL);
return gst_caps_copy (caps);
}
static void
check_caps_identical (GstCaps * a, GstCaps * b, const char *name)
{
int i;
if (gst_caps_get_size (a) != gst_caps_get_size (b))
goto fail;
for (i = 0; i < gst_caps_get_size (a); i++) {
GstStructure *sa, *sb;
sa = gst_caps_get_structure (a, i);
sb = gst_caps_get_structure (b, i);
if (!gst_structure_is_equal (sa, sb))
goto fail;
}
return;
fail:
fail ("%s caps (%s) is not equal to caps (%s)",
name, gst_caps_to_string (a), gst_caps_to_string (b));
}
static void
check_peer_caps (GstPad * pad, const char *expected, const char *name)
{
GstCaps *caps;
GstCaps *expected_caps;
caps = gst_pad_peer_get_caps (pad);
fail_unless (caps != NULL);
expected_caps = gst_caps_from_string (expected);
fail_unless (expected_caps != NULL);
check_caps_identical (caps, expected_caps, name);
gst_caps_unref (caps);
gst_caps_unref (expected_caps);
}
GST_START_TEST (test_caps_negotiation)
{
GstElement *videorate;
GstCaps *caps;
TestInfo *test = &caps_negotiation_tests[__i__];
videorate = setup_videorate_full (&srctemplate, &sinktemplate);
caps = gst_caps_from_string (test->caps);
g_object_set_data_full (G_OBJECT (mysrcpad), "caps",
gst_caps_ref (caps), (GDestroyNotify) gst_caps_unref);
g_object_set_data_full (G_OBJECT (mysinkpad), "caps",
gst_caps_ref (caps), (GDestroyNotify) gst_caps_unref);
gst_caps_unref (caps);
g_object_set (videorate, "drop-only", test->drop_only, NULL);
if (test->max_rate != 0)
g_object_set (videorate, "max-rate", test->max_rate, NULL);
gst_pad_set_getcaps_function (mysrcpad, _getcaps_function);
gst_pad_set_getcaps_function (mysinkpad, _getcaps_function);
check_peer_caps (mysrcpad, test->expected_sink_caps, "sink");
check_peer_caps (mysinkpad, test->expected_src_caps, "src");
gst_object_unref (videorate);
}
GST_END_TEST;
static Suite *
videorate_suite (void)
{
@ -810,6 +1080,8 @@ videorate_suite (void)
tcase_add_test (tc_chain, test_non_ok_flow);
tcase_add_test (tc_chain, test_upstream_caps_nego);
tcase_add_test (tc_chain, test_selected_caps);
tcase_add_loop_test (tc_chain, test_caps_negotiation,
0, G_N_ELEMENTS (caps_negotiation_tests));
return s;
}