Merge branch 'master' into 0.11-fdo

This commit is contained in:
Wim Taymans 2011-03-28 20:13:59 +02:00
commit e1869fa267
21 changed files with 532 additions and 167 deletions

View file

@ -80,7 +80,7 @@ tool_run "$libtoolize" "--copy --force"
tool_run "$aclocal" "-I m4 -I common/m4 $ACLOCAL_FLAGS"
tool_run "$autoheader"
# touch the stamp-h.in build stamp so we don't re-run autoheader in maintainer mode -- wingo
# touch the stamp-h.in build stamp so we don't re-run autoheader in maintainer mode
echo timestamp > stamp-h.in 2> /dev/null
tool_run "$autoconf"

2
common

@ -1 +1 @@
Subproject commit 6aec6b9716c184c60c4bc6a5916a2471cfa8c8cd
Subproject commit 1ccbe098d6379612fcef09f4000da23585af980a

View file

@ -731,7 +731,6 @@ AG_GST_CHECK_FEATURE(GIO, [GIO library], gio, [
[The GIO library directory.])
], [
HAVE_GIO="no"
AC_MSG_RESULT(no)
])
AC_SUBST(GIO_CFLAGS)
AC_SUBST(GIO_LIBS)

View file

@ -0,0 +1,88 @@
Interlaced Video
================
Video buffers have a number of states identifiable through a combination of caps
and buffer flags.
Possible states:
- Progressive
- Interlaced
- Plain
- One field
- Two fields
- Three fields - this should be a progressive buffer with a repeated 'first'
field that can be used for telecine pulldown
- Telecine
- One field
- Two fields
- Progressive
- Interlaced (a.k.a. 'mixed'; the fields are from different frames)
- Three fields - this should be a progressive buffer with a repeated 'first'
field that can be used for telecine pulldown
Note: it can be seen that the different between the plain interlaced and
telecine states is that in the telecine state, buffers containing two fields may
be progressive.
Tools for identification:
- Caps
- interlaced - boolean
- interlacing-method - string - "unknown"/"telecine"
- Flags - GST_VIDEO_BUFFER_...
- TFF
- RFF
- ONEFIELD
- PROGRESSIVE
Identification of Buffer States
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Note that flags are not necessarily interpreted in the same way for all
different states nor are they necessarily required nor make sense in all cases.
Progressive
...........
If the caps have no interlaced field, or have interlaced=false, then the buffer
is progressive. Note the possibility for progressive buffers in telecine streams
as well.
Plain Interlaced
................
If the caps have interlaced=true and either do not contain the
interlacing-method field or contain interlacing-method=unknown, then the buffer
is plain interlaced.
GST_VIDEO_BUFFER_TFF indicates whether the top or bottom field is to be
displayed first. The timestamp on the buffer corresponds to the first field.
(FIXME - is the duration of the buffer the duration of both fields; each field's
duration is half the buffer duration)
GST_VIDEO_BUFFER_RFF indicates that the first field (indicated by the TFF flag)
should be repeated. This is generally only used for telecine purposes but as the
telecine state was added long after the interlaced state was added and defined,
this flag remains valid for plain interlaced buffers.
GST_VIDEO_BUFFER_ONEFIELD means that only the field indicated through the TFF
flag is to be used. The other field should be ignored.
Telecine
........
If the caps have interlaced=true and interlacing-method=telecine then the
buffers are in some form of telecine state.
The TFF, RFF and ONEFIELD flags have the same semantics as for the plain
interlaced state, however, for the telecine state require one additional flag to
be able to identify progressive buffers.
GST_VIDEO_BUFFER_PROGRESSIVE means that the buffer containing two fields is a
progressive frame. The implication is that if this flag is not set, the buffer
is an 'interlaced' or 'mixed' buffer that contains two fields that, when
combined with fields from adjacent buffers, allow reconstruction of progressive
frames.

View file

@ -48,7 +48,8 @@ SCAN_OPTIONS=
MKDB_OPTIONS=--sgml-mode
# Extra options to supply to gtkdoc-fixref.
FIXXREF_OPTIONS=--extra-dir=$(GLIB_PREFIX)/share/gtk-doc/html \
FIXXREF_OPTIONS=--extra-dir=$(top_builddir)/docs/libs/html \
--extra-dir=$(GLIB_PREFIX)/share/gtk-doc/html \
--extra-dir=$(GST_PREFIX)/share/gtk-doc/html \
--extra-dir=$(datadir)/gtk-doc/html

View file

@ -416,7 +416,7 @@ static void
task_monitor_alsa (gpointer data)
{
struct pollfd *pfds;
unsigned int nfds, rnfds;
int nfds, rnfds;
unsigned short revents;
GstAlsaMixer *mixer = (GstAlsaMixer *) data;
gint ret;
@ -733,7 +733,7 @@ gst_alsa_mixer_set_mute (GstAlsaMixer * mixer, GstMixerTrack * track,
gst_alsa_mixer_track_update (alsa_track);
if (!!(mute) == !!(track->flags & GST_MIXER_TRACK_MUTE)) {
if (! !(mute) == ! !(track->flags & GST_MIXER_TRACK_MUTE)) {
g_static_rec_mutex_unlock (mixer->rec_mutex);
return;
}
@ -785,7 +785,7 @@ gst_alsa_mixer_set_record (GstAlsaMixer * mixer,
gst_alsa_mixer_track_update (alsa_track);
if (!!(record) == !!(track->flags & GST_MIXER_TRACK_RECORD)) {
if (! !(record) == ! !(track->flags & GST_MIXER_TRACK_RECORD)) {
g_static_rec_mutex_unlock (mixer->rec_mutex);
return;
}
@ -917,8 +917,8 @@ gst_alsa_mixer_update_track (GstAlsaMixer * mixer,
return;
}
old_mute = !!(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_MUTE));
old_record = !!(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_RECORD));
old_mute = ! !(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_MUTE));
old_record = ! !(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_RECORD));
old_volumes = g_new (gint, track->num_channels);
n_channels = track->num_channels;
memcpy (old_volumes, alsa_track->volumes,
@ -927,13 +927,13 @@ gst_alsa_mixer_update_track (GstAlsaMixer * mixer,
gst_alsa_mixer_track_update (alsa_track);
if (old_record !=
!!(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_RECORD))) {
! !(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_RECORD))) {
gst_mixer_record_toggled (mixer->interface, track,
!!GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_RECORD));
! !GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_RECORD));
}
if (old_mute != !!(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_MUTE))) {
if (old_mute != ! !(GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_MUTE))) {
gst_mixer_mute_toggled (mixer->interface, track,
!!GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_MUTE));
! !GST_MIXER_TRACK_HAS_FLAG (track, GST_MIXER_TRACK_MUTE));
}
n_channels = MIN (n_channels, track->num_channels);

View file

@ -573,11 +573,11 @@ gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
for (l = ogg->oggstreams; l != NULL; l = l->next) {
GstOggStream *stream = (GstOggStream *) l->data;
int j;
GList *j;
for (j = 1; j < g_list_length (stream->headers); j++) {
gst_ogg_parse_append_header (&array,
GST_BUFFER (g_list_nth_data (stream->headers, j)));
/* already appended the first header, now do headers 2-N */
for (j = stream->headers->next; j != NULL; j = j->next) {
gst_ogg_parse_append_header (&array, GST_BUFFER (j->data));
count++;
}
}
@ -604,6 +604,7 @@ gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
GstOggStream *stream = (GstOggStream *) l->data;
GstBuffer *buf = GST_BUFFER (stream->headers->data);
buf = gst_buffer_make_metadata_writable (buf);
gst_buffer_set_caps (buf, caps);
result = gst_pad_push (ogg->srcpad, buf);
@ -612,11 +613,13 @@ gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
}
for (l = ogg->oggstreams; l != NULL; l = l->next) {
GstOggStream *stream = (GstOggStream *) l->data;
int j;
GList *j;
for (j = 1; j < g_list_length (stream->headers); j++) {
GstBuffer *buf =
GST_BUFFER (g_list_nth_data (stream->headers, j));
/* pushed the first one for each stream already, now do 2-N */
for (j = stream->headers->next; j != NULL; j = j->next) {
GstBuffer *buf = GST_BUFFER (j->data);
buf = gst_buffer_make_metadata_writable (buf);
gst_buffer_set_caps (buf, caps);
result = gst_pad_push (ogg->srcpad, buf);
@ -647,7 +650,7 @@ gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
for (k = stream->unknown_pages; k != NULL; k = k->next) {
GstBuffer *buf;
buf = GST_BUFFER (k->data);
buf = gst_buffer_make_metadata_writable (GST_BUFFER (k->data));
gst_buffer_set_caps (buf, caps);
result = gst_pad_push (ogg->srcpad, buf);
if (result != GST_FLOW_OK)
@ -664,29 +667,28 @@ gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
stream->stored_buffers = g_list_append (stream->stored_buffers,
pagebuffer);
} else {
if (stream->stored_buffers) {
int j;
while (stream->stored_buffers) {
GstBuffer *buf = stream->stored_buffers->data;
for (j = 0; j < g_list_length (stream->stored_buffers); j++) {
GstBuffer *buf =
GST_BUFFER (g_list_nth_data (stream->stored_buffers, j));
gst_buffer_set_caps (buf, ogg->caps);
GST_BUFFER_TIMESTAMP (buf) = buffertimestamp;
if (!keyframe) {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
keyframe = FALSE;
}
result = gst_pad_push (ogg->srcpad, buf);
if (result != GST_FLOW_OK)
return result;
buf = gst_buffer_make_metadata_writable (buf);
gst_buffer_set_caps (buf, ogg->caps);
GST_BUFFER_TIMESTAMP (buf) = buffertimestamp;
if (!keyframe) {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
keyframe = FALSE;
}
g_list_free (stream->stored_buffers);
stream->stored_buffers = NULL;
result = gst_pad_push (ogg->srcpad, buf);
if (result != GST_FLOW_OK)
return result;
stream->stored_buffers =
g_list_delete_link (stream->stored_buffers,
stream->stored_buffers);
}
pagebuffer = gst_buffer_make_metadata_writable (pagebuffer);
gst_buffer_set_caps (pagebuffer, ogg->caps);
if (!keyframe) {
GST_BUFFER_FLAG_SET (pagebuffer, GST_BUFFER_FLAG_DELTA_UNIT);
@ -694,7 +696,7 @@ gst_ogg_parse_chain (GstPad * pad, GstBuffer * buffer)
keyframe = FALSE;
}
result = gst_pad_push (ogg->srcpad, GST_BUFFER (pagebuffer));
result = gst_pad_push (ogg->srcpad, pagebuffer);
if (result != GST_FLOW_OK)
return result;
}

View file

@ -27,7 +27,9 @@ typedef struct _GstTextOverlayClass GstTextOverlayClass;
* GstTextOverlayVAlign:
* @GST_TEXT_OVERLAY_VALIGN_BASELINE: draw text on the baseline
* @GST_TEXT_OVERLAY_VALIGN_BOTTOM: draw text on the bottom
* @GST_TEXT_OVERLAY_VALIGN_TOP: draw test on top
* @GST_TEXT_OVERLAY_VALIGN_TOP: draw text on top
* @GST_TEXT_OVERLAY_VALIGN_POS: draw text according to the #GstTextOverlay:ypos property
* @GST_TEXT_OVERLAY_VALIGN_CENTER: draw text vertically centered
*
* Vertical alignment of the text.
*/
@ -44,14 +46,16 @@ typedef enum {
* @GST_TEXT_OVERLAY_HALIGN_LEFT: align text left
* @GST_TEXT_OVERLAY_HALIGN_CENTER: align text center
* @GST_TEXT_OVERLAY_HALIGN_RIGHT: align text right
* @GST_TEXT_OVERLAY_HALIGN_POS: position text according to the #GstTextOverlay:xpos property
*
* Horizontal alignment of the text.
*/
/* FIXME 0.11: remove GST_TEXT_OVERLAY_HALIGN_UNUSED */
typedef enum {
GST_TEXT_OVERLAY_HALIGN_LEFT,
GST_TEXT_OVERLAY_HALIGN_CENTER,
GST_TEXT_OVERLAY_HALIGN_RIGHT,
GST_TEXT_OVERLAY_HALIGN_TOP,
GST_TEXT_OVERLAY_HALIGN_UNUSED,
GST_TEXT_OVERLAY_HALIGN_POS
} GstTextOverlayHAlign;

View file

@ -68,12 +68,12 @@
* gst_caps_unref (caps);
*
* caps = gst_caps_from_string("video/x-theora");
* sprof = gst_encoding_container_profile_add_profile(
* gst_encoding_container_profile_add_profile(prof,
* (GstEncodingProfile*) gst_encoding_video_profile_new(caps, NULL, NULL, 0));
* gst_caps_unref (caps);
*
* caps = gst_caps_from_string("audio/x-vorbis");
* sprof = gst_encoding_container_profile_add_profile(
* gst_encoding_container_profile_add_profile(prof,
* (GstEncodingProfile*) gst_encoding_audio_profile_new(caps, NULL, NULL, 0));
* gst_caps_unref (caps);
*

View file

@ -1505,7 +1505,7 @@ gst_rtp_buffer_add_extension_twobytes_header (GstRTPBuffer * rtp,
gst_rtp_buffer_set_extension_data (rtp, (0x100 << 4) | (appbits & 0x0F),
wordlen);
} else {
wordlen = (size + 1) / 4 + (((size + 1) % 4) ? 1 : 0);
wordlen = (size + 2) / 4 + (((size + 2) % 4) ? 1 : 0);
gst_rtp_buffer_set_extension_data (rtp, (0x100 << 4) | (appbits & 0x0F),
wordlen);

View file

@ -205,7 +205,6 @@ _xmp_tag_get_mapping (const gchar * gst_tag)
ret = (GPtrArray *) gst_xmp_schema_lookup (schema, GUINT_TO_POINTER (key));
}
return ret;
}
@ -822,6 +821,7 @@ _init_xmp_tag_map ()
_gst_xmp_add_schema ("xmp", schema);
/* tiff */
schema = gst_xmp_schema_new ();
_gst_xmp_schema_add_simple_mapping (schema,
GST_TAG_DEVICE_MANUFACTURER, "tiff:Make", GST_XMP_TAG_TYPE_SIMPLE, NULL,
NULL);

View file

@ -544,36 +544,14 @@ gst_video_format_new_caps_interlaced (GstVideoFormat format,
return res;
}
/**
* gst_video_format_new_caps:
* @format: the #GstVideoFormat describing the raw video format
* @width: width of video
* @height: height of video
* @framerate_n: numerator of frame rate
* @framerate_d: denominator of frame rate
* @par_n: numerator of pixel aspect ratio
* @par_d: denominator of pixel aspect ratio
*
* Creates a new #GstCaps object based on the parameters provided.
*
* Since: 0.10.16
*
* Returns: a new #GstCaps object, or NULL if there was an error
*/
GstCaps *
gst_video_format_new_caps (GstVideoFormat format, int width,
int height, int framerate_n, int framerate_d, int par_n, int par_d)
static GstCaps *
gst_video_format_new_caps_raw (GstVideoFormat format)
{
g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, NULL);
g_return_val_if_fail (width > 0 && height > 0, NULL);
if (gst_video_format_is_yuv (format)) {
return gst_caps_new_simple ("video/x-raw-yuv",
"format", GST_TYPE_FOURCC, gst_video_format_to_fourcc (format),
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_n, framerate_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL);
"format", GST_TYPE_FOURCC, gst_video_format_to_fourcc (format), NULL);
}
if (gst_video_format_is_rgb (format)) {
GstCaps *caps;
@ -651,14 +629,11 @@ gst_video_format_new_caps (GstVideoFormat format, int width,
mask = 0xff0000;
}
red_mask =
mask >> (8 * gst_video_format_get_component_offset (format, 0,
width, height));
mask >> (8 * gst_video_format_get_component_offset (format, 0, 0, 0));
green_mask =
mask >> (8 * gst_video_format_get_component_offset (format, 1,
width, height));
mask >> (8 * gst_video_format_get_component_offset (format, 1, 0, 0));
blue_mask =
mask >> (8 * gst_video_format_get_component_offset (format, 2,
width, height));
mask >> (8 * gst_video_format_get_component_offset (format, 2, 0, 0));
} else if (bpp == 16) {
switch (format) {
case GST_VIDEO_FORMAT_RGB16:
@ -690,12 +665,7 @@ gst_video_format_new_caps (GstVideoFormat format, int width,
}
caps = gst_caps_new_simple ("video/x-raw-rgb",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_n, framerate_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL);
"bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, NULL);
if (bpp != 8) {
gst_caps_set_simple (caps,
@ -707,8 +677,7 @@ gst_video_format_new_caps (GstVideoFormat format, int width,
if (have_alpha) {
alpha_mask =
mask >> (8 * gst_video_format_get_component_offset (format, 3,
width, height));
mask >> (8 * gst_video_format_get_component_offset (format, 3, 0, 0));
gst_caps_set_simple (caps, "alpha_mask", G_TYPE_INT, alpha_mask, NULL);
}
return caps;
@ -740,21 +709,12 @@ gst_video_format_new_caps (GstVideoFormat format, int width,
if (bpp > 8) {
caps = gst_caps_new_simple ("video/x-raw-gray",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_n, framerate_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL);
"bpp", G_TYPE_INT, bpp, "depth", G_TYPE_INT, depth, NULL);
} else {
caps = gst_caps_new_simple ("video/x-raw-gray",
"bpp", G_TYPE_INT, bpp,
"depth", G_TYPE_INT, depth,
"endianness", G_TYPE_INT, G_BIG_ENDIAN,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_n, framerate_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL);
"endianness", G_TYPE_INT, G_BIG_ENDIAN, NULL);
}
return caps;
@ -763,6 +723,96 @@ gst_video_format_new_caps (GstVideoFormat format, int width,
return NULL;
}
/**
* gst_video_format_new_template_caps:
* @format: the #GstVideoFormat describing the raw video format
*
* Creates a new #GstCaps object based on the parameters provided.
* Size, frame rate, and pixel aspect ratio are set to the full
* range.
*
* Since: 0.10.33
*
* Returns: a new #GstCaps object, or NULL if there was an error
*/
GstCaps *
gst_video_format_new_template_caps (GstVideoFormat format)
{
GstCaps *caps;
GstStructure *structure;
g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, NULL);
caps = gst_video_format_new_caps_raw (format);
if (caps) {
GValue value = { 0 };
GValue v = { 0 };
structure = gst_caps_get_structure (caps, 0);
gst_structure_set (structure,
"width", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"height", GST_TYPE_INT_RANGE, 1, G_MAXINT,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1,
"pixel-aspect-ratio", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
g_value_init (&value, GST_TYPE_LIST);
g_value_init (&v, G_TYPE_BOOLEAN);
g_value_set_boolean (&v, TRUE);
gst_value_list_append_value (&value, &v);
g_value_set_boolean (&v, FALSE);
gst_value_list_append_value (&value, &v);
gst_structure_set_value (structure, "interlaced", &value);
g_value_reset (&value);
g_value_reset (&v);
}
return caps;
}
/**
* gst_video_format_new_caps:
* @format: the #GstVideoFormat describing the raw video format
* @width: width of video
* @height: height of video
* @framerate_n: numerator of frame rate
* @framerate_d: denominator of frame rate
* @par_n: numerator of pixel aspect ratio
* @par_d: denominator of pixel aspect ratio
*
* Creates a new #GstCaps object based on the parameters provided.
*
* Since: 0.10.16
*
* Returns: a new #GstCaps object, or NULL if there was an error
*/
GstCaps *
gst_video_format_new_caps (GstVideoFormat format, int width,
int height, int framerate_n, int framerate_d, int par_n, int par_d)
{
GstCaps *caps;
GstStructure *structure;
g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, NULL);
g_return_val_if_fail (width > 0 && height > 0, NULL);
caps = gst_video_format_new_caps_raw (format);
if (caps) {
structure = gst_caps_get_structure (caps, 0);
gst_structure_set (structure,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", GST_TYPE_FRACTION, framerate_n, framerate_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, par_n, par_d, NULL);
}
return caps;
}
/**
* gst_video_format_from_fourcc:
* @fourcc: a FOURCC value representing raw YUV video
@ -1680,7 +1730,8 @@ gst_video_format_get_component_offset (GstVideoFormat format,
{
g_return_val_if_fail (format != GST_VIDEO_FORMAT_UNKNOWN, 0);
g_return_val_if_fail (component >= 0 && component <= 3, 0);
g_return_val_if_fail (width > 0 && height > 0, 0);
g_return_val_if_fail ((!gst_video_format_is_yuv (format)) || (width > 0
&& height > 0), 0);
switch (format) {
case GST_VIDEO_FORMAT_I420:

View file

@ -461,7 +461,8 @@ GstCaps * gst_video_format_new_caps (GstVideoFormat format,
int par_n, int par_d);
GstCaps * gst_video_format_new_caps_interlaced (GstVideoFormat format,
int width, int height, int framerate_n, int framerate_d,
int par_n, int par_d, gboolean interlaced);
int par_n, int par_d, gboolean interlaced);
GstCaps * gst_video_format_new_template_caps (GstVideoFormat format);
GstVideoFormat gst_video_format_from_fourcc (guint32 fourcc);
guint32 gst_video_format_to_fourcc (GstVideoFormat format);
gboolean gst_video_format_is_rgb (GstVideoFormat format);

View file

@ -1139,11 +1139,17 @@ free_group (GstPlayBin * playbin, GstSourceGroup * group)
g_ptr_array_free (group->text_channels, TRUE);
g_mutex_free (group->lock);
if (group->audio_sink)
if (group->audio_sink) {
if (group->audio_sink != playbin->audio_sink)
gst_element_set_state (group->audio_sink, GST_STATE_NULL);
gst_object_unref (group->audio_sink);
}
group->audio_sink = NULL;
if (group->video_sink)
if (group->video_sink) {
if (group->video_sink != playbin->video_sink)
gst_element_set_state (group->video_sink, GST_STATE_NULL);
gst_object_unref (group->video_sink);
}
group->video_sink = NULL;
g_list_free (group->stream_changed_pending);
@ -1239,12 +1245,18 @@ gst_play_bin_finalize (GObject * object)
if (playbin->source)
gst_object_unref (playbin->source);
if (playbin->video_sink)
if (playbin->video_sink) {
gst_element_set_state (playbin->video_sink, GST_STATE_NULL);
gst_object_unref (playbin->video_sink);
if (playbin->audio_sink)
}
if (playbin->audio_sink) {
gst_element_set_state (playbin->audio_sink, GST_STATE_NULL);
gst_object_unref (playbin->audio_sink);
if (playbin->text_sink)
}
if (playbin->text_sink) {
gst_element_set_state (playbin->text_sink, GST_STATE_NULL);
gst_object_unref (playbin->text_sink);
}
if (playbin->elements)
gst_plugin_feature_list_free (playbin->elements);
@ -2756,30 +2768,22 @@ no_more_pads_cb (GstElement * decodebin, GstSourceGroup * group)
if (configure) {
/* if we have custom sinks, configure them now */
GST_SOURCE_GROUP_LOCK (group);
if (group->audio_sink) {
GST_INFO_OBJECT (playbin, "setting custom audio sink %" GST_PTR_FORMAT,
group->audio_sink);
gst_play_sink_set_sink (playbin->playsink, GST_PLAY_SINK_TYPE_AUDIO,
group->audio_sink);
} else {
GST_INFO_OBJECT (playbin, "setting default audio sink %" GST_PTR_FORMAT,
playbin->audio_sink);
gst_play_sink_set_sink (playbin->playsink, GST_PLAY_SINK_TYPE_AUDIO,
playbin->audio_sink);
}
if (group->video_sink) {
GST_INFO_OBJECT (playbin, "setting custom video sink %" GST_PTR_FORMAT,
group->video_sink);
gst_play_sink_set_sink (playbin->playsink, GST_PLAY_SINK_TYPE_VIDEO,
group->video_sink);
} else {
GST_INFO_OBJECT (playbin, "setting default video sink %" GST_PTR_FORMAT,
playbin->video_sink);
gst_play_sink_set_sink (playbin->playsink, GST_PLAY_SINK_TYPE_VIDEO,
playbin->video_sink);
}
GST_INFO_OBJECT (playbin, "setting custom audio sink %" GST_PTR_FORMAT,
group->audio_sink);
gst_play_sink_set_sink (playbin->playsink, GST_PLAY_SINK_TYPE_AUDIO,
group->audio_sink);
GST_INFO_OBJECT (playbin, "setting custom video sink %" GST_PTR_FORMAT,
group->video_sink);
gst_play_sink_set_sink (playbin->playsink, GST_PLAY_SINK_TYPE_VIDEO,
group->video_sink);
GST_INFO_OBJECT (playbin, "setting custom text sink %" GST_PTR_FORMAT,
playbin->text_sink);
gst_play_sink_set_sink (playbin->playsink, GST_PLAY_SINK_TYPE_TEXT,
playbin->text_sink);
GST_SOURCE_GROUP_UNLOCK (group);
GST_LOG_OBJECT (playbin, "reconfigure sink");
@ -2961,8 +2965,15 @@ autoplug_continue_cb (GstElement * element, GstPad * pad, GstCaps * caps,
if ((sink = group->playbin->text_sink))
sinkpad = gst_element_get_static_pad (sink, "sink");
if (sinkpad) {
GstCaps *sinkcaps = gst_pad_get_caps_reffed (sinkpad);
GstCaps *sinkcaps;
/* Ignore errors here, if a custom sink fails to go
* to READY things are wrong and will error out later
*/
if (GST_STATE (sink) < GST_STATE_READY)
gst_element_set_state (sink, GST_STATE_READY);
sinkcaps = gst_pad_get_caps_reffed (sinkpad);
if (!gst_caps_is_any (sinkcaps))
ret = !gst_pad_accept_caps (sinkpad, caps);
gst_caps_unref (sinkcaps);
@ -2972,14 +2983,29 @@ autoplug_continue_cb (GstElement * element, GstPad * pad, GstCaps * caps,
ret = !gst_caps_can_intersect (caps, subcaps);
gst_caps_unref (subcaps);
}
/* If autoplugging can stop don't do additional checks */
if (!ret)
goto done;
if ((sink = group->playbin->audio_sink)) {
/* If this is from the subtitle uridecodebin we don't need to
* check the audio and video sink */
if (group->suburidecodebin
&& gst_object_has_ancestor (GST_OBJECT_CAST (element),
GST_OBJECT_CAST (group->suburidecodebin)))
goto done;
if ((sink = group->audio_sink)) {
sinkpad = gst_element_get_static_pad (sink, "sink");
if (sinkpad) {
GstCaps *sinkcaps = gst_pad_get_caps_reffed (sinkpad);
GstCaps *sinkcaps;
/* Ignore errors here, if a custom sink fails to go
* to READY things are wrong and will error out later
*/
if (GST_STATE (sink) < GST_STATE_READY)
gst_element_set_state (sink, GST_STATE_READY);
sinkcaps = gst_pad_get_caps_reffed (sinkpad);
if (!gst_caps_is_any (sinkcaps))
ret = !gst_pad_accept_caps (sinkpad, caps);
gst_caps_unref (sinkcaps);
@ -2989,11 +3015,18 @@ autoplug_continue_cb (GstElement * element, GstPad * pad, GstCaps * caps,
if (!ret)
goto done;
if ((sink = group->playbin->video_sink)) {
if ((sink = group->video_sink)) {
sinkpad = gst_element_get_static_pad (sink, "sink");
if (sinkpad) {
GstCaps *sinkcaps = gst_pad_get_caps_reffed (sinkpad);
GstCaps *sinkcaps;
/* Ignore errors here, if a custom sink fails to go
* to READY things are wrong and will error out later
*/
if (GST_STATE (sink) < GST_STATE_READY)
gst_element_set_state (sink, GST_STATE_READY);
sinkcaps = gst_pad_get_caps_reffed (sinkpad);
if (!gst_caps_is_any (sinkcaps))
ret = !gst_pad_accept_caps (sinkpad, caps);
gst_caps_unref (sinkcaps);
@ -3012,6 +3045,33 @@ done:
return ret;
}
static gboolean
sink_accepts_caps (GstElement * sink, GstCaps * caps)
{
GstPad *sinkpad;
/* ... activate it ... We do this before adding it to the bin so that we
* don't accidentally make it post error messages that will stop
* everything. */
if (GST_STATE (sink) < GST_STATE_READY &&
gst_element_set_state (sink,
GST_STATE_READY) == GST_STATE_CHANGE_FAILURE) {
return FALSE;
}
if ((sinkpad = gst_element_get_static_pad (sink, "sink"))) {
/* Got the sink pad, now let's see if the element actually does accept the
* caps that we have */
if (!gst_pad_accept_caps (sinkpad, caps)) {
gst_object_unref (sinkpad);
return FALSE;
}
gst_object_unref (sinkpad);
}
return TRUE;
}
/* We are asked to select an element. See if the next element to check
* is a sink. If this is the case, we see if the sink works by setting it to
* READY. If the sink works, we return SELECT_EXPOSE to make decodebin
@ -3071,48 +3131,50 @@ autoplug_select_cb (GstElement * decodebin, GstPad * pad,
/* now see if we already have a sink element */
GST_SOURCE_GROUP_LOCK (group);
if (*sinkp) {
GST_DEBUG_OBJECT (playbin, "we already have a pending sink, expose pad");
/* for now, just assume that we can link the pad to this same sink. FIXME,
* check that we can link this new pad to this sink as well. */
GST_SOURCE_GROUP_UNLOCK (group);
return GST_AUTOPLUG_SELECT_EXPOSE;
GstElement *sink = gst_object_ref (*sinkp);
if (sink_accepts_caps (sink, caps)) {
GST_DEBUG_OBJECT (playbin,
"Existing sink '%s' accepts caps: %" GST_PTR_FORMAT,
GST_ELEMENT_NAME (sink), caps);
gst_object_unref (sink);
GST_SOURCE_GROUP_UNLOCK (group);
return GST_AUTOPLUG_SELECT_EXPOSE;
} else {
GST_DEBUG_OBJECT (playbin,
"Existing sink '%s' does not accept caps: %" GST_PTR_FORMAT,
GST_ELEMENT_NAME (sink), caps);
gst_object_unref (sink);
GST_SOURCE_GROUP_UNLOCK (group);
return GST_AUTOPLUG_SELECT_SKIP;
}
}
GST_DEBUG_OBJECT (playbin, "we have no pending sink, try to create one");
GST_SOURCE_GROUP_UNLOCK (group);
if ((element = gst_element_factory_create (factory, NULL)) == NULL) {
GST_WARNING_OBJECT (playbin, "Could not create an element from %s",
gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));
GST_SOURCE_GROUP_UNLOCK (group);
return GST_AUTOPLUG_SELECT_SKIP;
}
/* ... activate it ... We do this before adding it to the bin so that we
* don't accidentally make it post error messages that will stop
* everything. */
if ((gst_element_set_state (element,
GST_STATE_READY)) == GST_STATE_CHANGE_FAILURE) {
GST_WARNING_OBJECT (playbin, "Couldn't set %s to READY",
GST_ELEMENT_NAME (element));
/* Check if the selected sink actually supports the
* caps and can be set to READY*/
if (!sink_accepts_caps (element, caps)) {
gst_element_set_state (element, GST_STATE_NULL);
gst_object_unref (element);
GST_SOURCE_GROUP_UNLOCK (group);
return GST_AUTOPLUG_SELECT_SKIP;
}
/* remember the sink in the group now, the element is floating, we take
* ownership now */
GST_SOURCE_GROUP_LOCK (group);
if (*sinkp == NULL) {
/* store the sink in the group, we will configure it later when we
* reconfigure the sink */
GST_DEBUG_OBJECT (playbin, "remember sink");
gst_object_ref_sink (element);
*sinkp = element;
} else {
/* some other thread configured a sink while we were testing the sink, set
* the sink back to NULL and assume we can use the other sink */
GST_DEBUG_OBJECT (playbin, "another sink was found, expose pad");
gst_element_set_state (element, GST_STATE_NULL);
gst_object_unref (element);
}
* ownership now
*
* store the sink in the group, we will configure it later when we
* reconfigure the sink */
GST_DEBUG_OBJECT (playbin, "remember sink");
gst_object_ref_sink (element);
*sinkp = element;
GST_SOURCE_GROUP_UNLOCK (group);
/* tell decodebin to expose the pad because we are going to use this
@ -3175,6 +3237,12 @@ activate_group (GstPlayBin * playbin, GstSourceGroup * group, GstState target)
GST_SOURCE_GROUP_LOCK (group);
/* First set up the custom sources */
if (playbin->audio_sink)
group->audio_sink = gst_object_ref (playbin->audio_sink);
if (playbin->video_sink)
group->video_sink = gst_object_ref (playbin->video_sink);
g_list_free (group->stream_changed_pending);
group->stream_changed_pending = NULL;
if (!group->stream_changed_pending_lock)
@ -3326,6 +3394,22 @@ no_decodebin:
{
GstMessage *msg;
/* delete any custom sinks we might have */
if (group->audio_sink) {
/* If this is a automatically created sink set it to NULL */
if (group->audio_sink != playbin->audio_sink)
gst_element_set_state (group->audio_sink, GST_STATE_NULL);
gst_object_unref (group->audio_sink);
}
group->audio_sink = NULL;
if (group->video_sink) {
/* If this is a automatically created sink set it to NULL */
if (group->video_sink != playbin->video_sink)
gst_element_set_state (group->video_sink, GST_STATE_NULL);
gst_object_unref (group->video_sink);
}
group->video_sink = NULL;
GST_SOURCE_GROUP_UNLOCK (group);
msg =
gst_missing_element_message_new (GST_ELEMENT_CAST (playbin),
@ -3338,6 +3422,22 @@ no_decodebin:
}
uridecodebin_failure:
{
/* delete any custom sinks we might have */
if (group->audio_sink) {
/* If this is a automatically created sink set it to NULL */
if (group->audio_sink != playbin->audio_sink)
gst_element_set_state (group->audio_sink, GST_STATE_NULL);
gst_object_unref (group->audio_sink);
}
group->audio_sink = NULL;
if (group->video_sink) {
/* If this is a automatically created sink set it to NULL */
if (group->video_sink != playbin->video_sink)
gst_element_set_state (group->video_sink, GST_STATE_NULL);
gst_object_unref (group->video_sink);
}
group->video_sink = NULL;
GST_DEBUG_OBJECT (playbin, "failed state change of uridecodebin");
return FALSE;
}
@ -3395,11 +3495,19 @@ deactivate_group (GstPlayBin * playbin, GstSourceGroup * group)
}
}
/* delete any custom sinks we might have */
if (group->audio_sink)
if (group->audio_sink) {
/* If this is a automatically created sink set it to NULL */
if (group->audio_sink != playbin->audio_sink)
gst_element_set_state (group->audio_sink, GST_STATE_NULL);
gst_object_unref (group->audio_sink);
}
group->audio_sink = NULL;
if (group->video_sink)
if (group->video_sink) {
/* If this is a automatically created sink set it to NULL */
if (group->video_sink != playbin->video_sink)
gst_element_set_state (group->video_sink, GST_STATE_NULL);
gst_object_unref (group->video_sink);
}
group->video_sink = NULL;
if (group->uridecodebin) {
@ -3615,6 +3723,14 @@ gst_play_bin_change_state (GstElement * element, GstStateChange transition)
}
}
/* Set our sinks back to NULL, they might not be child of playbin */
if (playbin->audio_sink)
gst_element_set_state (playbin->audio_sink, GST_STATE_NULL);
if (playbin->video_sink)
gst_element_set_state (playbin->video_sink, GST_STATE_NULL);
if (playbin->text_sink)
gst_element_set_state (playbin->text_sink, GST_STATE_NULL);
/* make sure the groups don't perform a state change anymore until we
* enable them again */
groups_set_locked_state (playbin, TRUE);

View file

@ -1215,6 +1215,8 @@ gen_video_chain (GstPlaySink * playsink, gboolean raw, gboolean async)
chain->sink = try_element (playsink, elem, TRUE);
}
}
if (chain->sink)
playsink->video_sink = gst_object_ref (chain->sink);
}
if (chain->sink == NULL)
goto no_sinks;
@ -1360,6 +1362,8 @@ link_failed:
(NULL), ("Failed to configure the video sink."));
/* checking sink made it READY */
gst_element_set_state (chain->sink, GST_STATE_NULL);
/* Remove chain from the bin to allow reuse later */
gst_bin_remove (bin, chain->sink);
free_chain ((GstPlayChain *) chain);
return NULL;
}
@ -1486,6 +1490,9 @@ gen_text_chain (GstPlaySink * playsink)
gst_play_sink_find_property_sinks (playsink, chain->sink,
"sync", G_TYPE_BOOLEAN)))
g_object_set (elem, "sync", TRUE, NULL);
if (!textsinkpad)
gst_bin_remove (bin, chain->sink);
} else {
GST_WARNING_OBJECT (playsink,
"can't find async property in custom text sink");
@ -1671,6 +1678,8 @@ gen_audio_chain (GstPlaySink * playsink, gboolean raw)
chain->sink = try_element (playsink, elem, TRUE);
}
}
if (chain->sink)
playsink->audio_sink = gst_object_ref (chain->sink);
}
if (chain->sink == NULL)
goto no_sinks;
@ -1681,7 +1690,7 @@ gen_audio_chain (GstPlaySink * playsink, gboolean raw)
gst_bin_add (bin, chain->sink);
/* we have to add a queue when we need to decouple for the video sink in
* visualisations */
* visualisations and for streamsynchronizer */
GST_DEBUG_OBJECT (playsink, "adding audio queue");
chain->queue = gst_element_factory_make ("queue", "aqueue");
if (chain->queue == NULL) {
@ -1885,6 +1894,8 @@ link_failed:
(NULL), ("Failed to configure the audio sink."));
/* checking sink made it READY */
gst_element_set_state (chain->sink, GST_STATE_NULL);
/* Remove chain from the bin to allow reuse later */
gst_bin_remove (bin, chain->sink);
free_chain ((GstPlayChain *) chain);
return NULL;
}
@ -2211,6 +2222,13 @@ gst_play_sink_reconfigure (GstPlaySink * playsink)
}
add_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
/* Remove the sink from the bin to keep its state
* and unparent it to allow reuse */
if (playsink->videochain->sink)
gst_bin_remove (GST_BIN_CAST (playsink->videochain->chain.bin),
playsink->videochain->sink);
activate_chain (GST_PLAY_CHAIN (playsink->videochain), FALSE);
free_chain ((GstPlayChain *) playsink->videochain);
playsink->videochain = NULL;
@ -2355,6 +2373,13 @@ gst_play_sink_reconfigure (GstPlaySink * playsink)
}
add_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
/* Remove the sink from the bin to keep its state
* and unparent it to allow reuse */
if (playsink->audiochain->sink)
gst_bin_remove (GST_BIN_CAST (playsink->audiochain->chain.bin),
playsink->audiochain->sink);
activate_chain (GST_PLAY_CHAIN (playsink->audiochain), FALSE);
disconnect_chain (playsink->audiochain, playsink);
playsink->audiochain->volume = NULL;
@ -3275,6 +3300,27 @@ gst_play_sink_change_state (GstElement * element, GstStateChange transition)
* so they may be re-used faster next time/url around.
* when really going to NULL, clean up everything completely. */
if (transition == GST_STATE_CHANGE_READY_TO_NULL) {
/* Unparent the sinks to allow reuse */
if (playsink->videochain && playsink->videochain->sink)
gst_bin_remove (GST_BIN_CAST (playsink->videochain->chain.bin),
playsink->videochain->sink);
if (playsink->audiochain && playsink->audiochain->sink)
gst_bin_remove (GST_BIN_CAST (playsink->audiochain->chain.bin),
playsink->audiochain->sink);
if (playsink->textchain && playsink->textchain->sink)
gst_bin_remove (GST_BIN_CAST (playsink->textchain->chain.bin),
playsink->textchain->sink);
if (playsink->audio_sink != NULL)
gst_element_set_state (playsink->audio_sink, GST_STATE_NULL);
if (playsink->video_sink != NULL)
gst_element_set_state (playsink->video_sink, GST_STATE_NULL);
if (playsink->visualisation != NULL)
gst_element_set_state (playsink->visualisation, GST_STATE_NULL);
if (playsink->text_sink != NULL)
gst_element_set_state (playsink->text_sink, GST_STATE_NULL);
free_chain ((GstPlayChain *) playsink->videodeinterlacechain);
playsink->videodeinterlacechain = NULL;
free_chain ((GstPlayChain *) playsink->videochain);

View file

@ -1539,6 +1539,11 @@ make_decoder (GstURIDecodeBin * decoder)
if (!decodebin)
goto no_decodebin;
/* sanity check */
if (decodebin->numsinkpads == 0)
goto no_typefind;
/* connect signals to proxy */
g_signal_connect (decodebin, "unknown-type",
G_CALLBACK (proxy_unknown_type_signal), decoder);
@ -1609,6 +1614,15 @@ make_decoder (GstURIDecodeBin * decoder)
no_decodebin:
{
post_missing_plugin_error (GST_ELEMENT_CAST (decoder), "decodebin2");
GST_ELEMENT_ERROR (decoder, CORE, MISSING_PLUGIN, (NULL),
("No decodebin2 element, check your installation"));
return NULL;
}
no_typefind:
{
gst_object_unref (decodebin);
GST_ELEMENT_ERROR (decoder, CORE, MISSING_PLUGIN, (NULL),
("No typefind element, decodebin2 is unusable, check your installation"));
return NULL;
}
}
@ -1762,6 +1776,8 @@ setup_streaming (GstURIDecodeBin * decoder)
no_typefind:
{
post_missing_plugin_error (GST_ELEMENT_CAST (decoder), "typefind");
GST_ELEMENT_ERROR (decoder, CORE, MISSING_PLUGIN, (NULL),
("No typefind element, check your installation"));
return FALSE;
}
could_not_link:

View file

@ -501,8 +501,8 @@ gst_video_scale_fixate_caps (GstBaseTransform * base, GstPadDirection direction,
NULL);
}
if (!from_par) {
g_value_init (&fpar, GST_TYPE_FRACTION_RANGE);
gst_value_set_fraction_range_full (&fpar, 1, G_MAXINT, G_MAXINT, 1);
g_value_init (&fpar, GST_TYPE_FRACTION);
gst_value_set_fraction (&fpar, 1, 1);
from_par = &fpar;
}
}

View file

@ -290,8 +290,8 @@ libs_tag_CFLAGS = \
$(GST_BASE_CFLAGS) \
$(AM_CFLAGS)
libs_tag_LDADD = \
$(GST_BASE_LIBS) \
$(top_builddir)/gst-libs/gst/tag/libgsttag-@GST_MAJORMINOR@.la $(LDADD)
$(top_builddir)/gst-libs/gst/tag/libgsttag-@GST_MAJORMINOR@.la \
$(GST_BASE_LIBS) $(LDADD)
libs_pbutils_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \

View file

@ -3,6 +3,7 @@ audio
cddabasesrc
fft
gstlibscpp
libsabi
mixer
navigation
netbuffer

View file

@ -491,6 +491,43 @@ GST_START_TEST (test_video_formats)
GST_END_TEST;
GST_START_TEST (test_video_formats_rgb)
{
gint width, height, framerate_n, framerate_d, par_n, par_d;
GstCaps *caps =
gst_video_format_new_caps (GST_VIDEO_FORMAT_RGB, 800, 600, 0, 1, 1, 1);
GstStructure *structure;
structure = gst_caps_get_structure (caps, 0);
fail_unless (gst_structure_get_int (structure, "width", &width));
fail_unless (gst_structure_get_int (structure, "height", &height));
fail_unless (gst_structure_get_fraction (structure, "framerate", &framerate_n,
&framerate_d));
fail_unless (gst_structure_get_fraction (structure, "pixel-aspect-ratio",
&par_n, &par_d));
fail_unless (width == 800);
fail_unless (height == 600);
fail_unless (framerate_n == 0);
fail_unless (framerate_d == 1);
fail_unless (par_n == 1);
fail_unless (par_d == 1);
gst_caps_unref (caps);
}
GST_END_TEST;
GST_START_TEST (test_video_template_caps)
{
GstCaps *caps = gst_video_format_new_template_caps (GST_VIDEO_FORMAT_RGB);
gst_caps_unref (caps);
}
GST_END_TEST;
GST_START_TEST (test_dar_calc)
{
guint display_ratio_n, display_ratio_d;
@ -738,6 +775,8 @@ video_suite (void)
suite_add_tcase (s, tc_chain);
tcase_add_test (tc_chain, test_video_formats);
tcase_add_test (tc_chain, test_video_formats_rgb);
tcase_add_test (tc_chain, test_video_template_caps);
tcase_add_test (tc_chain, test_dar_calc);
tcase_add_test (tc_chain, test_parse_caps_rgb);
tcase_add_test (tc_chain, test_events);

View file

@ -21,6 +21,7 @@ EXPORTS
gst_video_format_is_yuv
gst_video_format_new_caps
gst_video_format_new_caps_interlaced
gst_video_format_new_template_caps
gst_video_format_parse_caps
gst_video_format_parse_caps_interlaced
gst_video_format_to_fourcc