Merge branch 'master' into 0.11

Conflicts:
	gst-libs/gst/app/gstappsrc.c
	gst-libs/gst/audio/multichannel.h
	gst-libs/gst/video/videooverlay.c
	gst/playback/gstplaysink.c
	gst/playback/gststreamsynchronizer.c
	tests/check/Makefile.am
	win32/common/libgstvideo.def
This commit is contained in:
Sebastian Dröge 2012-01-10 13:15:12 +01:00
commit dc8984d76c
32 changed files with 496 additions and 149 deletions

View file

@ -119,6 +119,7 @@ AG_GST_ARG_ENABLE_EXTERNAL
AG_GST_ARG_ENABLE_EXPERIMENTAL
dnl *** checks for platform ***
AG_GST_PLATFORM
dnl * hardware/architecture *

View file

@ -1836,6 +1836,7 @@ gst_tag_get_language_code
gst_tag_get_language_code_iso_639_1
gst_tag_get_language_code_iso_639_2B
gst_tag_get_language_code_iso_639_2T
gst_tag_check_language_code
</SECTION>
<SECTION>
@ -2121,6 +2122,7 @@ gst_video_overlay_rectangle_get_render_rectangle
gst_video_overlay_rectangle_get_seqnum
gst_video_overlay_rectangle_set_render_rectangle
gst_video_overlay_rectangle_copy
gst_video_overlay_rectangle_get_flags
<SUBSECTION Standard>
GST_TYPE_VIDEO_OVERLAY_COMPOSITION
GST_VIDEO_OVERLAY_COMPOSITION

View file

@ -46,7 +46,7 @@
#include "config.h"
#endif
#include <gstclockoverlay.h>
#include "gstclockoverlay.h"
#include <gst/video/video.h>
#include <time.h>

View file

@ -48,7 +48,7 @@
#include <gst/video/video.h>
#include <gsttimeoverlay.h>
#include "gsttimeoverlay.h"
#define gst_time_overlay_parent_class parent_class
G_DEFINE_TYPE (GstTimeOverlay, gst_time_overlay, GST_TYPE_BASE_TEXT_OVERLAY);

View file

@ -220,8 +220,10 @@ static void gst_app_src_set_latencies (GstAppSrc * appsrc,
gboolean do_min, guint64 min, gboolean do_max, guint64 max);
static gboolean gst_app_src_negotiate (GstBaseSrc * basesrc);
static GstFlowReturn gst_app_src_create (GstBaseSrc * bsrc,
guint64 offset, guint size, GstBuffer ** buf);
static GstCaps *gst_app_src_internal_get_caps (GstBaseSrc * bsrc,
GstCaps * filter);
static GstFlowReturn gst_app_src_create (GstBaseSrc * bsrc, guint64 offset,
guint size, GstBuffer ** buf);
static gboolean gst_app_src_start (GstBaseSrc * bsrc);
static gboolean gst_app_src_stop (GstBaseSrc * bsrc);
static gboolean gst_app_src_unlock (GstBaseSrc * bsrc);
@ -470,6 +472,7 @@ gst_app_src_class_init (GstAppSrcClass * klass)
gst_static_pad_template_get (&gst_app_src_template));
basesrc_class->negotiate = gst_app_src_negotiate;
basesrc_class->get_caps = gst_app_src_internal_get_caps;
basesrc_class->create = gst_app_src_create;
basesrc_class->start = gst_app_src_start;
basesrc_class->stop = gst_app_src_stop;
@ -551,6 +554,12 @@ gst_app_src_finalize (GObject * obj)
G_OBJECT_CLASS (parent_class)->finalize (obj);
}
static GstCaps *
gst_app_src_internal_get_caps (GstBaseSrc * bsrc, GstCaps * filter)
{
return gst_app_src_get_caps (GST_APP_SRC_CAST (bsrc), filter);
}
static void
gst_app_src_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
@ -614,7 +623,7 @@ gst_app_src_get_property (GObject * object, guint prop_id, GValue * value,
GstCaps *caps;
/* we're missing a _take_caps() function to transfer ownership */
caps = gst_app_src_get_caps (appsrc);
caps = gst_app_src_get_caps (appsrc, NULL);
gst_value_set_caps (value, caps);
if (caps)
gst_caps_unref (caps);
@ -1097,7 +1106,7 @@ gst_app_src_set_caps (GstAppSrc * appsrc, const GstCaps * caps)
* Since: 0.10.22
*/
GstCaps *
gst_app_src_get_caps (GstAppSrc * appsrc)
gst_app_src_get_caps (GstAppSrc * appsrc, GstCaps * filter)
{
GstCaps *caps;
GstAppSrcPrivate *priv;
@ -1109,6 +1118,14 @@ gst_app_src_get_caps (GstAppSrc * appsrc)
GST_OBJECT_LOCK (appsrc);
if ((caps = priv->caps))
gst_caps_ref (caps);
if (filter) {
GstCaps *intersection =
gst_caps_intersect_full (filter, caps, GST_CAPS_INTERSECT_FIRST);
gst_caps_unref (caps);
caps = intersection;
}
GST_DEBUG_OBJECT (appsrc, "getting caps of %" GST_PTR_FORMAT, caps);
GST_OBJECT_UNLOCK (appsrc);

View file

@ -122,7 +122,7 @@ GType gst_app_src_get_type(void);
GType gst_app_stream_type_get_type (void);
void gst_app_src_set_caps (GstAppSrc *appsrc, const GstCaps *caps);
GstCaps* gst_app_src_get_caps (GstAppSrc *appsrc);
GstCaps* gst_app_src_get_caps (GstAppSrc *appsrc, GstCaps * filter);
void gst_app_src_set_size (GstAppSrc *appsrc, gint64 size);
gint64 gst_app_src_get_size (GstAppSrc *appsrc);

View file

@ -379,11 +379,13 @@ GstDiscovererInfo *
gst_discoverer_info_copy (GstDiscovererInfo * ptr)
{
GstDiscovererInfo *ret;
GHashTable *stream_map = g_hash_table_new (g_direct_hash, NULL);
GHashTable *stream_map;
GList *tmp;
g_return_val_if_fail (ptr != NULL, NULL);
stream_map = g_hash_table_new (g_direct_hash, NULL);
ret = gst_discoverer_info_new ();
ret->uri = g_strdup (ptr->uri);

View file

@ -442,19 +442,22 @@ _event_probe (GstPad * pad, GstPadProbeInfo * info, PrivateStream * ps)
return GST_PAD_PROBE_OK;
}
static GstStaticCaps subtitle_caps = GST_STATIC_CAPS ("text/plain; "
"text/x-pango-markup; subpicture/x-pgs; subpicture/x-dvb; "
"application/x-subtitle-unknown; application/x-ssa; application/x-ass; "
"subtitle/x-kate; application/x-kate; video/x-dvd-subpicture");
static gboolean
is_subtitle_caps (const GstCaps * caps)
{
static GstCaps *subs_caps = NULL;
GstCaps *subs_caps;
gboolean ret;
if (!subs_caps) {
subs_caps = gst_caps_from_string ("text/plain; text/x-pango-markup; "
"subpicture/x-pgs; subpicture/x-dvb; application/x-subtitle-unknown; "
"application/x-ssa; application/x-ass; subtitle/x-kate; "
"application/x-kate; video/x-dvd-subpicture; ");
}
subs_caps = gst_static_caps_get (&subtitle_caps);
ret = gst_caps_can_intersect (caps, subs_caps);
gst_caps_unref (subs_caps);
return gst_caps_can_intersect (caps, subs_caps);
return ret;
}
static void

View file

@ -109,7 +109,7 @@ gboolean
gst_rtcp_buffer_validate_data (guint8 * data, guint len)
{
guint16 header_mask;
guint16 header_len;
guint header_len;
guint8 version;
guint data_len;
gboolean padding;

View file

@ -457,7 +457,7 @@ wrong_family:
static gchar *
do_resolve (const gchar * host)
{
static gchar ip[INET6_ADDRSTRLEN];
gchar ip[INET6_ADDRSTRLEN];
struct addrinfo *aires, hints;
struct addrinfo *ai;
gint aierr;

View file

@ -228,10 +228,9 @@ typedef enum {
/**
* GstRTSPHeaderField:
*
* Enumeration of rtsp header fields.
* Enumeration of rtsp header fields
*/
typedef enum {
/*< protected >*/
GST_RTSP_HDR_INVALID,
/*
@ -342,10 +341,9 @@ typedef enum {
/**
* GstRTSPStatusCode:
*
* Enumeration of rtsp status codes.
* Enumeration of rtsp status codes
*/
typedef enum {
/*< protected >*/
GST_RTSP_STS_INVALID = 0,
GST_RTSP_STS_CONTINUE = 100,
GST_RTSP_STS_OK = 200,

View file

@ -30,8 +30,6 @@
* </refsect2>
*/
/* FIXME 0.11: maybe switch to ISO-639-2 everywhere incl. GST_TAG_LANGUAGE? */
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
@ -311,8 +309,8 @@ gst_tag_get_language_codes (void)
* gst_tag_get_language_name:
* @language_code: two or three-letter ISO-639 language code
*
* Returns the name of the language given an ISO-639 language code, such
* as often found in a GST_TAG_LANGUAGE tag. The name will be translated
* Returns the name of the language given an ISO-639 language code as
* found in a GST_TAG_LANGUAGE_CODE tag. The name will be translated
* according to the current locale (if the library was built against the
* iso-codes package, otherwise the English name will be returned).
*
@ -492,3 +490,25 @@ gst_tag_get_language_code_iso_639_2B (const gchar * lang_code)
return c;
}
/**
* gst_tag_check_language_code:
* @lang_code: ISO-639 language code (e.g. "deu" or "ger" or "de")
*
* Check if a given string contains a known ISO 639 language code.
*
* This is useful in situations where it's not clear whether a given
* string is a language code (which should be put into a #GST_TAG_LANGUAGE_CODE
* tag) or a free-form language name descriptor (which should be put into a
* #GST_TAG_LANGUAGE_NAME tag instead).
*
* Returns: TRUE if the two- or three-letter language code in @lang_code
* is a valid ISO-639 language code.
*
* Since: 0.10.37
*/
gboolean
gst_tag_check_language_code (const gchar * lang_code)
{
return (gst_tag_get_language_code_iso_639_1 (lang_code) != NULL);
}

View file

@ -544,6 +544,8 @@ const gchar * gst_tag_get_language_code_iso_639_2B (const gchar * lang_code);
const gchar * gst_tag_get_language_code_iso_639_2T (const gchar * lang_code);
gboolean gst_tag_check_language_code (const gchar * lang_code);
/**
* gst_tag_get_language_code:
* @lang_code: ISO-639 language code (e.g. "deu" or "ger" or "de")

View file

@ -501,6 +501,7 @@ gst_tag_freeform_string_to_utf8 (const gchar * data, gint size,
/* fallback in case iconv implementation doesn't support windows-1252
* for some reason */
if (err->code == G_CONVERT_ERROR_NO_CONVERSION) {
g_free (utf8);
utf8 = g_convert (data, size, "UTF-8", "ISO-8859-1", &bytes_read,
NULL, NULL);
}

View file

@ -1155,6 +1155,34 @@ matrix_identity (guint8 * tmpline, guint width)
{
}
static void
matrix_prea_rgb_to_yuv (guint8 * tmpline, guint width)
{
int i;
int a, r, g, b;
int y, u, v;
for (i = 0; i < width; i++) {
a = tmpline[i * 4 + 0];
r = tmpline[i * 4 + 1];
g = tmpline[i * 4 + 2];
b = tmpline[i * 4 + 3];
if (a) {
r = (r * 255 + a / 2) / a;
g = (g * 255 + a / 2) / a;
b = (b * 255 + a / 2) / a;
}
y = (47 * r + 157 * g + 16 * b + 4096) >> 8;
u = (-26 * r - 87 * g + 112 * b + 32768) >> 8;
v = (112 * r - 102 * g - 10 * b + 32768) >> 8;
tmpline[i * 4 + 1] = CLAMP (y, 0, 255);
tmpline[i * 4 + 2] = CLAMP (u, 0, 255);
tmpline[i * 4 + 3] = CLAMP (v, 0, 255);
}
}
static void
matrix_rgb_to_yuv (guint8 * tmpline, guint width)
{
@ -1221,10 +1249,15 @@ lookup_getput (GetPutLine * getput, GstVideoFormat fmt)
return FALSE;
}
#define BLEND(ret, alpha, v0, v1) \
{ \
#define BLEND00(ret, alpha, v0, v1) \
G_STMT_START { \
ret = (v0 * alpha + v1 * (255 - alpha)) / 255; \
}
} G_STMT_END
#define BLEND10(ret, alpha, v0, v1) \
G_STMT_START { \
ret = v0 + (v1 * (255 - alpha)) / 255; \
} G_STMT_END
void
video_blend_scale_linear_RGBA (GstBlendVideoFormatInfo * src,
@ -1289,7 +1322,7 @@ video_blend_scale_linear_RGBA (GstBlendVideoFormatInfo * src,
/* Update src, our reference to the old src->pixels is lost */
video_blend_format_info_init (src, dest_pixels, dest_height, dest_width,
src->fmt);
src->fmt, src->premultiplied_alpha);
g_free (tmpbuf);
}
@ -1310,10 +1343,21 @@ video_blend (GstBlendVideoFormatInfo * dest,
guint i, j;
guint8 alpha;
GetPutLine getputdest, getputsrc;
gint src_stride;
guint8 *tmpdestline = NULL, *tmpsrcline = NULL;
gboolean src_premultiplied_alpha;
gint src_stride = src->width * 4;
guint8 *tmpdestline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
guint8 *tmpsrcline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
g_return_val_if_fail (dest, FALSE);
g_return_val_if_fail (src, FALSE);
/* we do no support writing to premultiplied alpha, though that should
just be a matter of adding blenders below (BLEND01 and BLEND11) */
g_return_val_if_fail (!dest->premultiplied_alpha, FALSE);
src_premultiplied_alpha = src->premultiplied_alpha;
src_stride = src->width * 4;
tmpdestline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
tmpsrcline = g_malloc (sizeof (guint8) * (dest->width + 8) * 4);
ensure_debug_category ();
@ -1324,9 +1368,18 @@ video_blend (GstBlendVideoFormatInfo * dest,
if (!lookup_getput (&getputsrc, src->fmt))
goto failed;
if (gst_video_format_is_rgb (src->fmt) != gst_video_format_is_rgb (dest->fmt))
getputsrc.matrix = gst_video_format_is_rgb (src->fmt) ?
matrix_rgb_to_yuv : matrix_yuv_to_rgb;
if (gst_video_format_is_rgb (src->fmt) != gst_video_format_is_rgb (dest->fmt)) {
if (gst_video_format_is_rgb (src->fmt)) {
if (src_premultiplied_alpha) {
getputsrc.matrix = matrix_prea_rgb_to_yuv;
src_premultiplied_alpha = FALSE;
} else {
getputsrc.matrix = matrix_rgb_to_yuv;
}
} else {
getputsrc.matrix = matrix_yuv_to_rgb;
}
}
/* adjust src pointers for negative sizes */
if (x < 0) {
@ -1358,14 +1411,29 @@ video_blend (GstBlendVideoFormatInfo * dest,
/* Here dest and src are both either in AYUV or ARGB
* TODO: Make the orc version working properly*/
for (j = 0; j < src->width * 4; j += 4) {
alpha = tmpsrcline[j];
#define BLENDLOOP(blender) \
do { \
for (j = 0; j < src->width * 4; j += 4) { \
alpha = tmpsrcline[j]; \
\
blender (tmpdestline[j + 1], alpha, tmpsrcline[j + 1], tmpdestline[j + 1]); \
blender (tmpdestline[j + 2], alpha, tmpsrcline[j + 2], tmpdestline[j + 2]); \
blender (tmpdestline[j + 3], alpha, tmpsrcline[j + 3], tmpdestline[j + 3]); \
} \
} while(0)
BLEND (tmpdestline[j + 1], alpha, tmpsrcline[j + 1], tmpdestline[j + 1]);
BLEND (tmpdestline[j + 2], alpha, tmpsrcline[j + 2], tmpdestline[j + 2]);
BLEND (tmpdestline[j + 3], alpha, tmpsrcline[j + 3], tmpdestline[j + 3]);
if (src_premultiplied_alpha && dest->premultiplied_alpha) {
/* BLENDLOOP (BLEND11); */
} else if (!src_premultiplied_alpha && dest->premultiplied_alpha) {
/* BLENDLOOP (BLEND01); */
} else if (src_premultiplied_alpha && !dest->premultiplied_alpha) {
BLENDLOOP (BLEND10);
} else {
BLENDLOOP (BLEND00);
}
#undef BLENDLOOP
/* FIXME
* #if G_BYTE_ORDER == LITTLE_ENDIAN
* orc_blend_little (tmpdestline, tmpsrcline, dest->width);
@ -1403,7 +1471,8 @@ failed:
*/
void
video_blend_format_info_init (GstBlendVideoFormatInfo * info,
guint8 * pixels, guint height, guint width, GstVideoFormat fmt)
guint8 * pixels, guint height, guint width, GstVideoFormat fmt,
gboolean premultiplied_alpha)
{
guint nb_component = gst_video_format_has_alpha (fmt) ? 4 : 3;
@ -1417,6 +1486,7 @@ video_blend_format_info_init (GstBlendVideoFormatInfo * info,
info->height = height;
info->pixels = pixels;
info->fmt = fmt;
info->premultiplied_alpha = premultiplied_alpha;
info->size = gst_video_format_get_size (fmt, height, width);
fill_planes (info);

View file

@ -53,6 +53,8 @@ struct _GstBlendVideoFormatInfo
guint8 * pixels;
gsize size;
gboolean premultiplied_alpha;
/* YUV components: Y=0, U=1, V=2, A=3
* RGB components: R=0, G=1, B=2, A=3 */
gint offset[MAX_VIDEO_PLANES];
@ -61,7 +63,8 @@ struct _GstBlendVideoFormatInfo
void video_blend_format_info_init (GstBlendVideoFormatInfo * info,
guint8 *pixels, guint height,
guint width, GstVideoFormat fmt);
guint width, GstVideoFormat fmt,
gboolean premultiplied_alpha);
void video_blend_scale_linear_RGBA (GstBlendVideoFormatInfo * src,
gint dest_height, gint dest_width);

View file

@ -108,6 +108,9 @@ struct _GstVideoOverlayRectangle
/* The format of the data in pixels */
GstVideoFormat format;
/* The flags associated to this rectangle */
GstVideoOverlayFormatFlags flags;
/* Refcounted blob of memory, no caps or timestamps */
GstBuffer *pixels;
@ -501,7 +504,7 @@ gst_video_overlay_composition_blend (GstVideoOverlayComposition * comp,
}
video_blend_format_info_init (&video_info, GST_BUFFER_DATA (video_buf),
h, w, fmt);
h, w, fmt, FALSE);
num = comp->num_rectangles;
GST_LOG ("Blending composition %p with %u rectangles onto video buffer %p "
@ -518,7 +521,8 @@ gst_video_overlay_composition_blend (GstVideoOverlayComposition * comp,
video_blend_format_info_init (&rectangle_info,
GST_BUFFER_DATA (rect->pixels), rect->height, rect->width,
rect->format);
rect->format,
! !(rect->flags & GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA));
needs_scaling = gst_video_overlay_rectangle_needs_scaling (rect);
if (needs_scaling) {
@ -706,6 +710,22 @@ gst_video_overlay_rectangle_instance_init (GstMiniObject * mini_obj)
#endif
}
static inline gboolean
gst_video_overlay_rectangle_check_flags (GstVideoOverlayFormatFlags flags)
{
/* Check flags only contains flags we know about */
return (flags & ~(GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA)) == 0;
}
static gboolean
gst_video_overlay_rectangle_is_same_alpha_type (GstVideoOverlayFormatFlags
flags1, GstVideoOverlayFormatFlags flags2)
{
return ((flags1 ^ flags2) & GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA)
== 0;
}
/**
* gst_video_overlay_rectangle_new_argb:
* @pixels: (transfer none): a #GstBuffer pointing to the pixel memory
@ -718,7 +738,7 @@ gst_video_overlay_rectangle_instance_init (GstMiniObject * mini_obj)
* overlay rectangle should be rendered to
* @render_width: the render width of this rectangle on the video
* @render_height: the render height of this rectangle on the video
* @flags: flags (currently unused)
* @flags: flags
*
* Creates a new video overlay rectangle with ARGB pixel data. The layout
* of the components in memory is B-G-R-A on little-endian platforms
@ -726,9 +746,9 @@ gst_video_overlay_rectangle_instance_init (GstMiniObject * mini_obj)
* platforms (corresponding to #GST_VIDEO_FORMAT_ARGB). In other words,
* pixels are treated as 32-bit words and the lowest 8 bits then contain
* the blue component value and the highest 8 bits contain the alpha
* component value. The RGB values are non-premultiplied. This is the
* format that is used by most hardware, and also many rendering libraries
* such as Cairo, for example.
* component value. Unless specified in the flags, the RGB values are
* non-premultiplied. This is the format that is used by most hardware,
* and also many rendering libraries such as Cairo, for example.
*
* Returns: (transfer full): a new #GstVideoOverlayRectangle. Unref with
* gst_video_overlay_rectangle_unref() when no longer needed.
@ -748,7 +768,7 @@ gst_video_overlay_rectangle_new_argb (GstBuffer * pixels,
g_return_val_if_fail (stride >= (4 * width), NULL);
g_return_val_if_fail (height > 0 && width > 0, NULL);
g_return_val_if_fail (render_height > 0 && render_width > 0, NULL);
g_return_val_if_fail (flags == 0, NULL);
g_return_val_if_fail (gst_video_overlay_rectangle_check_flags (flags), NULL);
rect = (GstVideoOverlayRectangle *)
gst_mini_object_new (GST_TYPE_VIDEO_OVERLAY_RECTANGLE);
@ -758,6 +778,7 @@ gst_video_overlay_rectangle_new_argb (GstBuffer * pixels,
#else
rect->format = GST_VIDEO_FORMAT_ARGB;
#endif
rect->pixels = gst_buffer_ref (pixels);
rect->width = width;
@ -769,11 +790,13 @@ gst_video_overlay_rectangle_new_argb (GstBuffer * pixels,
rect->render_width = render_width;
rect->render_height = render_height;
rect->flags = flags;
rect->seq_num = gst_video_overlay_get_seqnum ();
GST_LOG ("new rectangle %p: %ux%u => %ux%u @ %u,%u, seq_num %u, format %u, "
"pixels %p", rect, width, height, render_width, render_height, render_x,
render_y, rect->seq_num, rect->format, pixels);
"flags %x, pixels %p", rect, width, height, render_width, render_height,
render_x, render_y, rect->seq_num, rect->format, rect->flags, pixels);
return rect;
}
@ -846,37 +869,61 @@ gst_video_overlay_rectangle_set_render_rectangle (GstVideoOverlayRectangle *
rectangle->render_height = render_height;
}
/**
* gst_video_overlay_rectangle_get_pixels_argb:
* @rectangle: a #GstVideoOverlayRectangle
* @stride: (out) (allow-none): address of guint variable where to store the
* row stride of the ARGB pixel data in the buffer
* @flags: flags (unused)
*
* Returns: (transfer none): a #GstBuffer holding the ARGB pixel data with
* row stride @stride and width and height of the render dimensions as per
* gst_video_overlay_rectangle_get_render_rectangle(). This function does
* not return a reference, the caller should obtain a reference of her own
* with gst_buffer_ref() if needed.
*
* Since: 0.10.36
*/
GstBuffer *
gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle *
rectangle, guint * stride, GstVideoOverlayFormatFlags flags)
static void
gst_video_overlay_rectangle_premultiply (GstBlendVideoFormatInfo * info)
{
int i, j;
for (j = 0; j < info->height; ++j) {
guint8 *line = info->pixels + info->stride[0] * j;
for (i = 0; i < info->width; ++i) {
int a = line[0];
line[1] = line[1] * a / 255;
line[2] = line[2] * a / 255;
line[3] = line[3] * a / 255;
line += 4;
}
}
}
static void
gst_video_overlay_rectangle_unpremultiply (GstBlendVideoFormatInfo * info)
{
int i, j;
for (j = 0; j < info->height; ++j) {
guint8 *line = info->pixels + info->stride[0] * j;
for (i = 0; i < info->width; ++i) {
int a = line[0];
if (a) {
line[1] = MIN ((line[1] * 255 + a / 2) / a, 255);
line[2] = MIN ((line[2] * 255 + a / 2) / a, 255);
line[3] = MIN ((line[3] * 255 + a / 2) / a, 255);
}
line += 4;
}
}
}
static GstBuffer *
gst_video_overlay_rectangle_get_pixels_argb_internal (GstVideoOverlayRectangle *
rectangle, guint * stride, GstVideoOverlayFormatFlags flags,
gboolean unscaled)
{
GstVideoOverlayRectangle *scaled_rect = NULL;
GstBlendVideoFormatInfo info;
GstBuffer *buf;
GList *l;
guint wanted_width = unscaled ? rectangle->width : rectangle->render_width;
guint wanted_height = unscaled ? rectangle->height : rectangle->render_height;
g_return_val_if_fail (GST_IS_VIDEO_OVERLAY_RECTANGLE (rectangle), NULL);
g_return_val_if_fail (flags == 0, NULL);
g_return_val_if_fail (stride != NULL, NULL);
g_return_val_if_fail (gst_video_overlay_rectangle_check_flags (flags), NULL);
/* This assumes we don't need to adjust the format */
if (rectangle->render_width == rectangle->width &&
rectangle->render_height == rectangle->height) {
if (wanted_width == rectangle->width &&
wanted_height == rectangle->height &&
gst_video_overlay_rectangle_is_same_alpha_type (rectangle->flags,
flags)) {
*stride = rectangle->stride;
return rectangle->pixels;
}
@ -886,8 +933,10 @@ gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle *
for (l = rectangle->scaled_rectangles; l != NULL; l = l->next) {
GstVideoOverlayRectangle *r = l->data;
if (r->width == rectangle->render_width &&
r->height == rectangle->render_height) {
if (r->width == wanted_width &&
r->height == wanted_height &&
gst_video_overlay_rectangle_is_same_alpha_type (rectangle->flags,
flags)) {
/* we'll keep these rectangles around until finalize, so it's ok not
* to take our own ref here */
scaled_rect = r;
@ -901,10 +950,21 @@ gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle *
/* not cached yet, do the scaling and put the result into our cache */
video_blend_format_info_init (&info, GST_BUFFER_DATA (rectangle->pixels),
rectangle->height, rectangle->width, rectangle->format);
rectangle->height, rectangle->width, rectangle->format,
! !(rectangle->flags &
GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA));
video_blend_scale_linear_RGBA (&info, rectangle->render_height,
rectangle->render_width);
if (wanted_width != rectangle->width || wanted_height != rectangle->height) {
video_blend_scale_linear_RGBA (&info, wanted_height, wanted_width);
}
if (!gst_video_overlay_rectangle_is_same_alpha_type (rectangle->flags, flags)) {
if (rectangle->flags & GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA) {
gst_video_overlay_rectangle_unpremultiply (&info);
} else {
gst_video_overlay_rectangle_premultiply (&info);
}
}
buf = gst_buffer_new ();
GST_BUFFER_DATA (buf) = info.pixels;
@ -912,8 +972,8 @@ gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle *
GST_BUFFER_SIZE (buf) = info.size;
scaled_rect = gst_video_overlay_rectangle_new_argb (buf,
rectangle->render_width, rectangle->render_height, info.stride[0],
0, 0, rectangle->render_width, rectangle->render_height, 0);
wanted_width, wanted_height, info.stride[0],
0, 0, wanted_width, wanted_height, rectangle->flags);
gst_buffer_unref (buf);
@ -928,6 +988,29 @@ done:
return scaled_rect->pixels;
}
/**
* gst_video_overlay_rectangle_get_pixels_argb:
* @rectangle: a #GstVideoOverlayRectangle
* @stride: (out) (allow-none): address of guint variable where to store the
* row stride of the ARGB pixel data in the buffer
* @flags: flags
*
* Returns: (transfer none): a #GstBuffer holding the ARGB pixel data with
* row stride @stride and width and height of the render dimensions as per
* gst_video_overlay_rectangle_get_render_rectangle(). This function does
* not return a reference, the caller should obtain a reference of her own
* with gst_buffer_ref() if needed.
*
* Since: 0.10.36
*/
GstBuffer *
gst_video_overlay_rectangle_get_pixels_argb (GstVideoOverlayRectangle *
rectangle, guint * stride, GstVideoOverlayFormatFlags flags)
{
return gst_video_overlay_rectangle_get_pixels_argb_internal (rectangle,
stride, flags, FALSE);
}
/**
* gst_video_overlay_rectangle_get_pixels_unscaled_argb:
* @rectangle: a #GstVideoOverlayRectangle
@ -937,7 +1020,7 @@ done:
* rectangle in pixels
* @stride: (out): address of guint variable where to store the row
* stride of the ARGB pixel data in the buffer
* @flags: flags for future use (unused)
* @flags: flags
*
* Retrieves the pixel data as it is. This is useful if the caller can
* do the scaling itself when handling the overlaying. The rectangle will
@ -959,13 +1042,34 @@ gst_video_overlay_rectangle_get_pixels_unscaled_argb (GstVideoOverlayRectangle *
g_return_val_if_fail (width != NULL, NULL);
g_return_val_if_fail (height != NULL, NULL);
g_return_val_if_fail (stride != NULL, NULL);
g_return_val_if_fail (flags == 0, NULL);
*width = rectangle->width;
*height = rectangle->height;
*stride = rectangle->stride;
return gst_video_overlay_rectangle_get_pixels_argb_internal (rectangle,
stride, flags, TRUE);
}
return rectangle->pixels;
/**
* gst_video_overlay_rectangle_get_flags:
* @rectangle: a #GstVideoOverlayRectangle
*
* Retrieves the flags associated with a #GstVideoOverlayRectangle.
* This is useful if the caller can handle both premultiplied alpha and
* non premultiplied alpha, for example. By knowing whether the rectangle
* uses premultiplied or not, it can request the pixel data in the format
* it is stored in, to avoid unnecessary conversion.
*
* Returns: the #GstVideoOverlayFormatFlags associated with the rectangle.
*
* Since: 0.10.37
*/
GstVideoOverlayFormatFlags
gst_video_overlay_rectangle_get_flags (GstVideoOverlayRectangle * rectangle)
{
g_return_val_if_fail (GST_IS_VIDEO_OVERLAY_RECTANGLE (rectangle),
GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE);
return rectangle->flags;
}
/**
@ -992,7 +1096,7 @@ gst_video_overlay_rectangle_copy (GstVideoOverlayRectangle * rectangle)
copy = gst_video_overlay_rectangle_new_argb (rectangle->pixels,
rectangle->width, rectangle->height, rectangle->stride,
rectangle->x, rectangle->y,
rectangle->render_width, rectangle->render_height, 0);
rectangle->render_width, rectangle->render_height, rectangle->flags);
return copy;
}

View file

@ -95,13 +95,15 @@ gst_video_overlay_rectangle_unref (GstVideoOverlayRectangle * comp)
/**
* GstVideoOverlayFormatFlags:
* @GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE: no flags
* @GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA: RGB are premultiplied by A/255. Since: 0.10.37
*
* Overlay format flags.
*
* Since: 0.10.36
*/
typedef enum {
GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE = 0
GST_VIDEO_OVERLAY_FORMAT_FLAG_NONE = 0,
GST_VIDEO_OVERLAY_FORMAT_FLAG_PREMULTIPLIED_ALPHA = 1
} GstVideoOverlayFormatFlags;
GType gst_video_overlay_rectangle_get_type (void);
@ -138,6 +140,8 @@ GstBuffer * gst_video_overlay_rectangle_get_pixels_unscaled_arg
guint * stride,
GstVideoOverlayFormatFlags flags);
GstVideoOverlayFormatFlags gst_video_overlay_rectangle_get_flags (GstVideoOverlayRectangle * rectangle);
/**
* GstVideoOverlayComposition:
*

View file

@ -145,8 +145,11 @@
* #ifdef GDK_WINDOWING_X11
* #include &lt;gdk/gdkx.h&gt; // for GDK_WINDOW_XID
* #endif
* #ifdef GDK_WINDOWING_WIN32
* #include &lt;gdk/gdkwin32.h&gt; // for GDK_WINDOW_HWND
* #endif
* ...
* static gulong video_window_xid = 0;
* static guintptr video_window_handle = 0;
* ...
* static GstBusSyncReply
* bus_sync_handler (GstBus * bus, GstMessage * message, gpointer user_data)
@ -155,14 +158,14 @@
* if (!gst_is_video_overlay_prepare_window_handle_message (message))
* return GST_BUS_PASS;
*
* if (video_window_xid != 0) {
* GstVideoOverlay *xoverlay;
* if (video_window_handle != 0) {
* GstXOverlay *xoverlay;
*
* // GST_MESSAGE_SRC (message) will be the video sink element
* xoverlay = GST_VIDEO_OVERLAY (GST_MESSAGE_SRC (message));
* gst_video_overlay_set_window_handle (xoverlay, video_window_xid);
* xoverlay = GST_X_OVERLAY (GST_MESSAGE_SRC (message));
* gst_x_overlay_set_window_handle (xoverlay, video_window_handle);
* } else {
* g_warning ("Should have obtained video_window_xid by now!");
* g_warning ("Should have obtained video_window_handle by now!");
* }
*
* gst_message_unref (message);
@ -173,6 +176,8 @@
* video_widget_realize_cb (GtkWidget * widget, gpointer data)
* {
* #if GTK_CHECK_VERSION(2,18,0)
* // Tell Gtk+/Gdk to create a native window for this widget instead of
* // drawing onto the parent widget.
* // This is here just for pedagogical purposes, GDK_WINDOW_XID will call
* // it as well in newer Gtk versions
* if (!gdk_window_ensure_native (widget->window))
@ -180,7 +185,16 @@
* #endif
*
* #ifdef GDK_WINDOWING_X11
* video_window_xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
* {
* gulong xid = GDK_WINDOW_XID (gtk_widget_get_window (video_window));
* video_window_handle = xid;
* }
* #endif
* #ifdef GDK_WINDOWING_WIN32
* {
* HWND wnd = GDK_WINDOW_HWND (gtk_widget_get_window (video_window));
* video_window_handle = (guintptr) wnd;
* }
* #endif
* }
* ...
@ -206,12 +220,12 @@
* gtk_widget_show_all (app_window);
*
* // realize window now so that the video window gets created and we can
* // obtain its XID before the pipeline is started up and the videosink
* // asks for the XID of the window to render onto
* // obtain its XID/HWND before the pipeline is started up and the videosink
* // asks for the XID/HWND of the window to render onto
* gtk_widget_realize (video_window);
*
* // we should have the XID now
* g_assert (video_window_xid != 0);
* // we should have the XID/HWND now
* g_assert (video_window_handle != 0);
* ...
* // set up sync handler for setting the xid once the pipeline is started
* bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));

View file

@ -1128,6 +1128,8 @@ gst_audio_test_src_fill (GstBaseSrc * basesrc, guint64 offset,
if (eclass->send_event)
eclass->send_event (GST_ELEMENT_CAST (basesrc),
gst_event_new_tag (taglist));
else
gst_tag_list_free (taglist);
src->tags_pushed = TRUE;
}

View file

@ -658,9 +658,10 @@ gst_smart_encoder_find_elements (GstSmartEncoder * smart_encoder)
gst_caps_unref (tmpl);
if (gst_caps_is_empty (res))
if (gst_caps_is_empty (res)) {
gst_caps_unref (res);
ret = GST_STATE_CHANGE_FAILURE;
else
} else
smart_encoder->available_caps = res;
GST_DEBUG_OBJECT (smart_encoder, "Done, available_caps:%" GST_PTR_FORMAT,

View file

@ -56,6 +56,7 @@ GType gst_autoplug_select_result_get_type (void);
* @GST_PLAY_FLAG_DOWNLOAD: enable progressice download buffering for selected
* formats.
* @GST_PLAY_FLAG_BUFFERING: enable buffering of the demuxed or parsed data.
* @GST_PLAY_FLAG_DEINTERLACE: deinterlace raw video (if native not forced).
*
* Extra flags to configure the behaviour of the sinks.
*/

View file

@ -95,11 +95,16 @@ gst_stream_get_other_pad (GstStream * stream, GstPad * pad)
static GstPad *
gst_stream_get_other_pad_from_pad (GstPad * pad)
{
GstStreamSynchronizer *self =
GST_STREAM_SYNCHRONIZER (gst_pad_get_parent (pad));
GstObject *parent = gst_pad_get_parent (pad);
GstStreamSynchronizer *self;
GstStream *stream;
GstPad *opad = NULL;
/* released pad does not have parent anymore */
if (!G_LIKELY (parent))
goto exit;
self = GST_STREAM_SYNCHRONIZER (parent);
GST_STREAM_SYNCHRONIZER_LOCK (self);
stream = gst_pad_get_element_private (pad);
if (!stream)
@ -111,6 +116,7 @@ out:
GST_STREAM_SYNCHRONIZER_UNLOCK (self);
gst_object_unref (self);
exit:
if (!opad)
GST_WARNING_OBJECT (pad, "Trying to get other pad after releasing");
@ -501,6 +507,8 @@ skip_adjustments:
}
done:
gst_object_unref (self);
return ret;
}

View file

@ -17,7 +17,7 @@ libgstvideotestsrc_la_LIBTOOLFLAGS = --tag=disable-static
noinst_HEADERS = gstvideotestsrc.h videotestsrc.h
noinst_PROGRAMS = generate_sine_table
EXTRA_PROGRAMS = generate_sine_table
generate_sine_table_SOURCES = generate_sine_table.c
generate_sine_table_CFLAGS = $(GST_CFLAGS)

View file

@ -653,7 +653,7 @@ gst_video_test_src_smpte (GstVideoTestSrc * v, GstVideoFrame * frame)
format->paint_setup (p, frame);
y1 = 2 * h / 3;
y2 = h * 0.75;
y2 = 3 * h / 4;
/* color bars */
for (j = 0; j < y1; j++) {

View file

@ -156,6 +156,7 @@ check_PROGRAMS = \
# videoconvert takes too long, so disabled for now
VALGRIND_TO_FIX = \
elements/videoconvert \
libs/discoverer \
libs/video
# these tests don't even pass

View file

@ -52,7 +52,8 @@ GST_START_TEST (test_disco_sync)
GstDiscovererResult result;
gchar *uri;
dc = gst_discoverer_new (GST_SECOND, &err);
/* high timeout, in case we're running under valgrind */
dc = gst_discoverer_new (5 * GST_SECOND, &err);
fail_unless (dc != NULL);
fail_unless (err == NULL);
@ -78,6 +79,68 @@ GST_START_TEST (test_disco_sync)
GST_END_TEST;
static void
test_disco_sync_reuse (const gchar * test_fn, guint num, GstClockTime timeout)
{
GError *err = NULL;
GstDiscoverer *dc;
GstDiscovererInfo *info;
GstDiscovererResult result;
gchar *uri, *path;
int i;
dc = gst_discoverer_new (timeout, &err);
fail_unless (dc != NULL);
fail_unless (err == NULL);
/* GST_TEST_FILE comes from makefile CFLAGS */
path = g_build_filename (GST_TEST_FILES_PATH, test_fn, NULL);
uri = gst_filename_to_uri (path, &err);
g_free (path);
fail_unless (err == NULL);
for (i = 0; i < num; ++i) {
GST_INFO ("[%02d] discovering uri '%s'", i, uri);
info = gst_discoverer_discover_uri (dc, uri, &err);
if (info) {
result = gst_discoverer_info_get_result (info);
GST_INFO ("result: %d", result);
gst_discoverer_info_unref (info);
}
/* in case we don't have some of the elements needed */
if (err) {
g_error_free (err);
err = NULL;
}
}
g_free (uri);
g_object_unref (dc);
}
GST_START_TEST (test_disco_sync_reuse_ogg)
{
test_disco_sync_reuse ("theora-vorbis.ogg", 2, 10 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_disco_sync_reuse_mp3)
{
/* this will cause errors because -base doesn't do mp3 parsing or decoding */
test_disco_sync_reuse ("test.mp3", 3, 10 * GST_SECOND);
}
GST_END_TEST;
GST_START_TEST (test_disco_sync_reuse_timeout)
{
/* set minimum timeout to test that, esp. leakage under valgrind */
/* FIXME: should really be even shorter */
test_disco_sync_reuse ("theora-vorbis.ogg", 2, GST_SECOND);
}
GST_END_TEST;
static Suite *
discoverer_suite (void)
@ -88,6 +151,9 @@ discoverer_suite (void)
suite_add_tcase (s, tc_chain);
tcase_add_test (tc_chain, test_disco_init);
tcase_add_test (tc_chain, test_disco_sync);
tcase_add_test (tc_chain, test_disco_sync_reuse_ogg);
tcase_add_test (tc_chain, test_disco_sync_reuse_mp3);
tcase_add_test (tc_chain, test_disco_sync_reuse_timeout);
return s;
}

View file

@ -759,6 +759,16 @@ GST_START_TEST (test_language_utils)
ASSERT_STRINGS_EQUAL (gst_tag_get_language_code_iso_639_2B ("de"), "ger");
ASSERT_STRINGS_EQUAL (gst_tag_get_language_code_iso_639_2B ("deu"), "ger");
ASSERT_STRINGS_EQUAL (gst_tag_get_language_code_iso_639_2B ("ger"), "ger");
fail_unless (gst_tag_check_language_code ("de"));
fail_unless (gst_tag_check_language_code ("deu"));
fail_unless (gst_tag_check_language_code ("ger"));
fail_if (gst_tag_check_language_code ("xxx"));
fail_if (gst_tag_check_language_code ("und"));
fail_if (gst_tag_check_language_code ("un"));
fail_if (gst_tag_check_language_code (""));
fail_if (gst_tag_check_language_code ("\377"));
fail_if (gst_tag_check_language_code ("deutsch"));
}
GST_END_TEST;

View file

@ -3,4 +3,5 @@ EXTRA_DIST = \
hls.m3u8 \
license-uris \
partialframe.mjpeg \
test.mp3
test.mp3 \
theora-vorbis.ogg

Binary file not shown.

View file

@ -37,8 +37,19 @@ typedef struct
char **argv;
} PrivStruct;
#define my_g_string_append_printf(str, format, ...) \
g_string_append_printf (str, "%*s" format, 2*depth, " ", ##__VA_ARGS__)
static void
my_g_string_append_printf (GString * str, int depth, const gchar * format, ...)
{
va_list args;
while (depth-- > 0) {
g_string_append (str, " ");
}
va_start (args, format);
g_string_append_vprintf (str, format, args);
va_end (args);
}
static gchar *
gst_stream_audio_information_to_string (GstDiscovererStreamInfo * info,
@ -56,48 +67,49 @@ gst_stream_audio_information_to_string (GstDiscovererStreamInfo * info,
s = g_string_sized_new (len);
my_g_string_append_printf (s, "Codec:\n");
my_g_string_append_printf (s, depth, "Codec:\n");
caps = gst_discoverer_stream_info_get_caps (info);
tmp = gst_caps_to_string (caps);
gst_caps_unref (caps);
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
my_g_string_append_printf (s, "Additional info:\n");
my_g_string_append_printf (s, depth, "Additional info:\n");
if (gst_discoverer_stream_info_get_misc (info)) {
tmp = gst_structure_to_string (gst_discoverer_stream_info_get_misc (info));
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
my_g_string_append_printf (s, " None\n");
my_g_string_append_printf (s, depth, " None\n");
}
audio_info = (GstDiscovererAudioInfo *) info;
ctmp = gst_discoverer_audio_info_get_language (audio_info);
my_g_string_append_printf (s, "Language: %s\n", ctmp ? ctmp : "<unknown>");
my_g_string_append_printf (s, "Channels: %u\n",
my_g_string_append_printf (s, depth, "Language: %s\n",
ctmp ? ctmp : "<unknown>");
my_g_string_append_printf (s, depth, "Channels: %u\n",
gst_discoverer_audio_info_get_channels (audio_info));
my_g_string_append_printf (s, "Sample rate: %u\n",
my_g_string_append_printf (s, depth, "Sample rate: %u\n",
gst_discoverer_audio_info_get_sample_rate (audio_info));
my_g_string_append_printf (s, "Depth: %u\n",
my_g_string_append_printf (s, depth, "Depth: %u\n",
gst_discoverer_audio_info_get_depth (audio_info));
my_g_string_append_printf (s, "Bitrate: %u\n",
my_g_string_append_printf (s, depth, "Bitrate: %u\n",
gst_discoverer_audio_info_get_bitrate (audio_info));
my_g_string_append_printf (s, "Max bitrate: %u\n",
my_g_string_append_printf (s, depth, "Max bitrate: %u\n",
gst_discoverer_audio_info_get_max_bitrate (audio_info));
my_g_string_append_printf (s, "Tags:\n");
my_g_string_append_printf (s, depth, "Tags:\n");
tags = gst_discoverer_stream_info_get_tags (info);
if (tags) {
tmp = gst_structure_to_string ((GstStructure *) tags);
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
my_g_string_append_printf (s, " None\n");
my_g_string_append_printf (s, depth, " None\n");
}
if (verbose)
my_g_string_append_printf (s, "\n");
my_g_string_append_printf (s, depth, "\n");
return g_string_free (s, FALSE);
}
@ -118,58 +130,58 @@ gst_stream_video_information_to_string (GstDiscovererStreamInfo * info,
s = g_string_sized_new (len);
my_g_string_append_printf (s, "Codec:\n");
my_g_string_append_printf (s, depth, "Codec:\n");
caps = gst_discoverer_stream_info_get_caps (info);
tmp = gst_caps_to_string (caps);
gst_caps_unref (caps);
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
my_g_string_append_printf (s, "Additional info:\n");
my_g_string_append_printf (s, depth, "Additional info:\n");
misc = gst_discoverer_stream_info_get_misc (info);
if (misc) {
tmp = gst_structure_to_string (misc);
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
my_g_string_append_printf (s, " None\n");
my_g_string_append_printf (s, depth, " None\n");
}
video_info = (GstDiscovererVideoInfo *) info;
my_g_string_append_printf (s, "Width: %u\n",
my_g_string_append_printf (s, depth, "Width: %u\n",
gst_discoverer_video_info_get_width (video_info));
my_g_string_append_printf (s, "Height: %u\n",
my_g_string_append_printf (s, depth, "Height: %u\n",
gst_discoverer_video_info_get_height (video_info));
my_g_string_append_printf (s, "Depth: %u\n",
my_g_string_append_printf (s, depth, "Depth: %u\n",
gst_discoverer_video_info_get_depth (video_info));
my_g_string_append_printf (s, "Frame rate: %u/%u\n",
my_g_string_append_printf (s, depth, "Frame rate: %u/%u\n",
gst_discoverer_video_info_get_framerate_num (video_info),
gst_discoverer_video_info_get_framerate_denom (video_info));
my_g_string_append_printf (s, "Pixel aspect ratio: %u/%u\n",
my_g_string_append_printf (s, depth, "Pixel aspect ratio: %u/%u\n",
gst_discoverer_video_info_get_par_num (video_info),
gst_discoverer_video_info_get_par_denom (video_info));
my_g_string_append_printf (s, "Interlaced: %s\n",
my_g_string_append_printf (s, depth, "Interlaced: %s\n",
gst_discoverer_video_info_is_interlaced (video_info) ? "true" : "false");
my_g_string_append_printf (s, "Bitrate: %u\n",
my_g_string_append_printf (s, depth, "Bitrate: %u\n",
gst_discoverer_video_info_get_bitrate (video_info));
my_g_string_append_printf (s, "Max bitrate: %u\n",
my_g_string_append_printf (s, depth, "Max bitrate: %u\n",
gst_discoverer_video_info_get_max_bitrate (video_info));
my_g_string_append_printf (s, "Tags:\n");
my_g_string_append_printf (s, depth, "Tags:\n");
tags = gst_discoverer_stream_info_get_tags (info);
if (tags) {
tmp = gst_structure_to_string ((GstStructure *) tags);
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
my_g_string_append_printf (s, " None\n");
my_g_string_append_printf (s, depth, " None\n");
}
if (verbose)
my_g_string_append_printf (s, "\n");
my_g_string_append_printf (s, depth, "\n");
return g_string_free (s, FALSE);
}
@ -190,37 +202,38 @@ gst_stream_subtitle_information_to_string (GstDiscovererStreamInfo * info,
s = g_string_sized_new (len);
my_g_string_append_printf (s, "Codec:\n");
my_g_string_append_printf (s, depth, "Codec:\n");
caps = gst_discoverer_stream_info_get_caps (info);
tmp = gst_caps_to_string (caps);
gst_caps_unref (caps);
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
my_g_string_append_printf (s, "Additional info:\n");
my_g_string_append_printf (s, depth, "Additional info:\n");
if (gst_discoverer_stream_info_get_misc (info)) {
tmp = gst_structure_to_string (gst_discoverer_stream_info_get_misc (info));
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
my_g_string_append_printf (s, " None\n");
my_g_string_append_printf (s, depth, " None\n");
}
subtitle_info = (GstDiscovererSubtitleInfo *) info;
ctmp = gst_discoverer_subtitle_info_get_language (subtitle_info);
my_g_string_append_printf (s, "Language: %s\n", ctmp ? ctmp : "<unknown>");
my_g_string_append_printf (s, depth, "Language: %s\n",
ctmp ? ctmp : "<unknown>");
my_g_string_append_printf (s, "Tags:\n");
my_g_string_append_printf (s, depth, "Tags:\n");
tags = gst_discoverer_stream_info_get_tags (info);
if (tags) {
tmp = gst_structure_to_string ((GstStructure *) tags);
my_g_string_append_printf (s, " %s\n", tmp);
my_g_string_append_printf (s, depth, " %s\n", tmp);
g_free (tmp);
} else {
my_g_string_append_printf (s, " None\n");
my_g_string_append_printf (s, depth, " None\n");
}
if (verbose)
my_g_string_append_printf (s, "\n");
my_g_string_append_printf (s, depth, "\n");
return g_string_free (s, FALSE);
}
@ -445,6 +458,8 @@ process_file (GstDiscoverer * dc, const gchar * filename)
g_print ("Analyzing %s\n", uri);
info = gst_discoverer_discover_uri (dc, uri, &err);
print_info (info, err);
if (err)
g_error_free (err);
gst_discoverer_info_unref (info);
if (st)
gst_structure_free (st);

View file

@ -1,4 +1,5 @@
EXPORTS
gst_tag_check_language_code
gst_tag_demux_get_type
gst_tag_demux_result_get_type
gst_tag_freeform_string_to_utf8