mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-18 07:47:17 +00:00
Merge branch '0.11' of ssh://git.freedesktop.org/git/gstreamer/gst-plugins-base into 0.11
This commit is contained in:
commit
84a02f9f13
4 changed files with 122 additions and 59 deletions
|
@ -50,7 +50,6 @@
|
|||
#define GST_CAT_DEFAULT theoradec_debug
|
||||
GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
|
||||
|
||||
#define THEORA_DEF_CROP TRUE
|
||||
#define THEORA_DEF_TELEMETRY_MV 0
|
||||
#define THEORA_DEF_TELEMETRY_MBMODE 0
|
||||
#define THEORA_DEF_TELEMETRY_QI 0
|
||||
|
@ -59,7 +58,6 @@ GST_DEBUG_CATEGORY_STATIC (GST_CAT_DEFAULT);
|
|||
enum
|
||||
{
|
||||
PROP_0,
|
||||
PROP_CROP,
|
||||
PROP_TELEMETRY_MV,
|
||||
PROP_TELEMETRY_MBMODE,
|
||||
PROP_TELEMETRY_QI,
|
||||
|
@ -128,11 +126,6 @@ gst_theora_dec_class_init (GstTheoraDecClass * klass)
|
|||
gobject_class->set_property = theora_dec_set_property;
|
||||
gobject_class->get_property = theora_dec_get_property;
|
||||
|
||||
g_object_class_install_property (gobject_class, PROP_CROP,
|
||||
g_param_spec_boolean ("crop", "Crop",
|
||||
"Crop the image to the visible region", THEORA_DEF_CROP,
|
||||
(GParamFlags) G_PARAM_READWRITE | G_PARAM_STATIC_STRINGS));
|
||||
|
||||
if (gst_theora_dec_ctl_is_supported (TH_DECCTL_SET_TELEMETRY_MV)) {
|
||||
g_object_class_install_property (gobject_class, PROP_TELEMETRY_MV,
|
||||
g_param_spec_int ("visualize-motion-vectors",
|
||||
|
@ -208,7 +201,6 @@ gst_theora_dec_init (GstTheoraDec * dec)
|
|||
|
||||
gst_element_add_pad (GST_ELEMENT (dec), dec->srcpad);
|
||||
|
||||
dec->crop = THEORA_DEF_CROP;
|
||||
dec->telemetry_mv = THEORA_DEF_TELEMETRY_MV;
|
||||
dec->telemetry_mbmode = THEORA_DEF_TELEMETRY_MBMODE;
|
||||
dec->telemetry_qi = THEORA_DEF_TELEMETRY_QI;
|
||||
|
@ -776,12 +768,16 @@ theora_handle_comment_packet (GstTheoraDec * dec, ogg_packet * packet)
|
|||
}
|
||||
|
||||
static GstFlowReturn
|
||||
theora_negotiate_pool (GstTheoraDec * dec, GstCaps * caps, GstVideoInfo * info)
|
||||
theora_negotiate_pool (GstTheoraDec * dec)
|
||||
{
|
||||
GstQuery *query;
|
||||
GstBufferPool *pool = NULL;
|
||||
GstBufferPool *pool;
|
||||
guint size, min, max, prefix, alignment;
|
||||
GstStructure *config;
|
||||
GstCaps *caps;
|
||||
|
||||
/* find the caps of the output buffer */
|
||||
caps = gst_pad_get_current_caps (dec->srcpad);
|
||||
|
||||
/* find a pool for the negotiated caps now */
|
||||
query = gst_query_new_allocation (caps, TRUE);
|
||||
|
@ -791,35 +787,45 @@ theora_negotiate_pool (GstTheoraDec * dec, GstCaps * caps, GstVideoInfo * info)
|
|||
/* we got configuration from our peer, parse them */
|
||||
gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
|
||||
&alignment, &pool);
|
||||
size = MAX (size, info->size);
|
||||
} else {
|
||||
GST_DEBUG_OBJECT (dec, "didn't get downstream ALLOCATION hints");
|
||||
size = info->size;
|
||||
size = 0;
|
||||
min = max = 0;
|
||||
prefix = 0;
|
||||
alignment = 0;
|
||||
pool = NULL;
|
||||
}
|
||||
|
||||
if (pool == NULL) {
|
||||
/* we did not get a pool, make one ourselves then */
|
||||
pool = gst_buffer_pool_new ();
|
||||
pool = gst_video_buffer_pool_new ();
|
||||
}
|
||||
|
||||
if (dec->pool)
|
||||
gst_object_unref (dec->pool);
|
||||
dec->pool = pool;
|
||||
|
||||
/* check if downstream supports cropping */
|
||||
dec->has_cropping =
|
||||
gst_query_has_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE);
|
||||
|
||||
if (dec->has_cropping) {
|
||||
/* we can crop, configure the pool with buffers of caps and size of the
|
||||
* decoded picture size and then crop them with metadata */
|
||||
size = MAX (size, GST_VIDEO_INFO_SIZE (&dec->vinfo));
|
||||
gst_caps_unref (caps);
|
||||
caps = gst_video_info_to_caps (&dec->vinfo);
|
||||
}
|
||||
|
||||
config = gst_buffer_pool_get_config (pool);
|
||||
gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
|
||||
gst_caps_unref (caps);
|
||||
|
||||
/* just set the option, if the pool can support it we will transparently use
|
||||
* it through the video info API. We could also see if the pool support this
|
||||
* option and only activate it then. */
|
||||
gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
|
||||
|
||||
/* check if downstream supports cropping */
|
||||
dec->has_cropping =
|
||||
gst_query_has_allocation_meta (query, GST_VIDEO_CROP_META_API_TYPE);
|
||||
|
||||
GST_DEBUG_OBJECT (dec, "downstream cropping %d", dec->has_cropping);
|
||||
|
||||
gst_buffer_pool_set_config (pool, config);
|
||||
|
@ -836,10 +842,10 @@ theora_handle_type_packet (GstTheoraDec * dec, ogg_packet * packet)
|
|||
{
|
||||
GstCaps *caps;
|
||||
GstVideoFormat format;
|
||||
gint width, height;
|
||||
gint par_num, par_den;
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
GList *walk;
|
||||
GstVideoInfo info;
|
||||
|
||||
GST_DEBUG_OBJECT (dec, "fps %d/%d, PAR %d/%d",
|
||||
dec->info.fps_numerator, dec->info.fps_denominator,
|
||||
|
@ -891,14 +897,6 @@ theora_handle_type_packet (GstTheoraDec * dec, ogg_packet * packet)
|
|||
goto invalid_format;
|
||||
}
|
||||
|
||||
if (dec->crop) {
|
||||
width = dec->info.pic_width;
|
||||
height = dec->info.pic_height;
|
||||
} else {
|
||||
/* no cropping, use the encoded dimensions */
|
||||
width = dec->info.frame_width;
|
||||
height = dec->info.frame_height;
|
||||
}
|
||||
if (dec->info.pic_width != dec->info.frame_width ||
|
||||
dec->info.pic_height != dec->info.frame_height ||
|
||||
dec->info.pic_x != 0 || dec->info.pic_y != 0) {
|
||||
|
@ -929,7 +927,9 @@ theora_handle_type_packet (GstTheoraDec * dec, ogg_packet * packet)
|
|||
GST_WARNING_OBJECT (dec, "Could not enable BITS mode visualisation");
|
||||
}
|
||||
|
||||
gst_video_info_set_format (&dec->vinfo, format, width, height);
|
||||
/* our info contains the dimensions for the coded picture before cropping */
|
||||
gst_video_info_set_format (&dec->vinfo, format, dec->info.frame_width,
|
||||
dec->info.frame_height);
|
||||
dec->vinfo.fps_n = dec->info.fps_numerator;
|
||||
dec->vinfo.fps_d = dec->info.fps_denominator;
|
||||
dec->vinfo.par_n = par_num;
|
||||
|
@ -953,13 +953,16 @@ theora_handle_type_packet (GstTheoraDec * dec, ogg_packet * packet)
|
|||
break;
|
||||
}
|
||||
|
||||
caps = gst_video_info_to_caps (&dec->vinfo);
|
||||
/* for the output caps we always take the cropped dimensions */
|
||||
info = dec->vinfo;
|
||||
gst_video_info_set_format (&info, format, dec->info.pic_width,
|
||||
dec->info.pic_height);
|
||||
caps = gst_video_info_to_caps (&info);
|
||||
gst_pad_set_caps (dec->srcpad, caps);
|
||||
gst_caps_unref (caps);
|
||||
|
||||
/* negotiate a bufferpool */
|
||||
if ((ret = theora_negotiate_pool (dec, caps, &dec->vinfo)) != GST_FLOW_OK)
|
||||
goto no_bufferpool;
|
||||
/* make sure we negotiate a bufferpool */
|
||||
gst_pad_mark_reconfigure (dec->srcpad);
|
||||
|
||||
dec->have_header = TRUE;
|
||||
|
||||
|
@ -983,10 +986,6 @@ invalid_format:
|
|||
GST_ERROR_OBJECT (dec, "Invalid pixel format %d", dec->info.pixel_fmt);
|
||||
return GST_FLOW_ERROR;
|
||||
}
|
||||
no_bufferpool:
|
||||
{
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
static GstFlowReturn
|
||||
|
@ -1113,13 +1112,8 @@ theora_handle_image (GstTheoraDec * dec, th_ycbcr_buffer buf, GstBuffer ** out)
|
|||
GstVideoCropMeta *crop;
|
||||
gint offset_x, offset_y;
|
||||
|
||||
if (gst_pad_check_reconfigure (dec->srcpad)) {
|
||||
GstCaps *caps;
|
||||
|
||||
caps = gst_pad_get_current_caps (dec->srcpad);
|
||||
theora_negotiate_pool (dec, caps, &dec->vinfo);
|
||||
gst_caps_unref (caps);
|
||||
}
|
||||
if (gst_pad_check_reconfigure (dec->srcpad))
|
||||
theora_negotiate_pool (dec);
|
||||
|
||||
result = gst_buffer_pool_acquire_buffer (dec->pool, out, NULL);
|
||||
if (G_UNLIKELY (result != GST_FLOW_OK))
|
||||
|
@ -1128,7 +1122,7 @@ theora_handle_image (GstTheoraDec * dec, th_ycbcr_buffer buf, GstBuffer ** out)
|
|||
if (!gst_video_frame_map (&frame, &dec->vinfo, *out, GST_MAP_WRITE))
|
||||
goto invalid_frame;
|
||||
|
||||
if (dec->crop && !dec->has_cropping) {
|
||||
if (!dec->has_cropping) {
|
||||
/* we need to crop the hard way */
|
||||
offset_x = dec->info.pic_x;
|
||||
offset_y = dec->info.pic_y;
|
||||
|
@ -1650,9 +1644,6 @@ theora_dec_set_property (GObject * object, guint prop_id,
|
|||
GstTheoraDec *dec = GST_THEORA_DEC (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_CROP:
|
||||
dec->crop = g_value_get_boolean (value);
|
||||
break;
|
||||
case PROP_TELEMETRY_MV:
|
||||
dec->telemetry_mv = g_value_get_int (value);
|
||||
break;
|
||||
|
@ -1678,9 +1669,6 @@ theora_dec_get_property (GObject * object, guint prop_id,
|
|||
GstTheoraDec *dec = GST_THEORA_DEC (object);
|
||||
|
||||
switch (prop_id) {
|
||||
case PROP_CROP:
|
||||
g_value_set_boolean (value, dec->crop);
|
||||
break;
|
||||
case PROP_TELEMETRY_MV:
|
||||
g_value_set_int (value, dec->telemetry_mv);
|
||||
break;
|
||||
|
|
|
@ -82,8 +82,6 @@ struct _GstTheoraDec
|
|||
gint telemetry_qi;
|
||||
gint telemetry_bits;
|
||||
|
||||
gboolean crop;
|
||||
|
||||
/* list of buffers that need timestamps */
|
||||
GList *queued;
|
||||
/* list of raw output buffers */
|
||||
|
|
|
@ -53,7 +53,7 @@ G_DEFINE_ABSTRACT_TYPE (GstVideoFilter, gst_video_filter,
|
|||
/* Answer the allocation query downstream. */
|
||||
static gboolean
|
||||
gst_video_filter_propose_allocation (GstBaseTransform * trans,
|
||||
gboolean passthrough, GstQuery * query)
|
||||
GstQuery * decide_query, GstQuery * query)
|
||||
{
|
||||
GstVideoFilter *filter = GST_VIDEO_FILTER_CAST (trans);
|
||||
GstVideoInfo info;
|
||||
|
@ -63,9 +63,9 @@ gst_video_filter_propose_allocation (GstBaseTransform * trans,
|
|||
guint size;
|
||||
|
||||
/* we're passthrough, let the parent implementation hande things */
|
||||
if (passthrough)
|
||||
if (decide_query == NULL)
|
||||
return GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
|
||||
passthrough, query);
|
||||
decide_query, query);
|
||||
|
||||
gst_query_parse_allocation (query, &caps, &need_pool);
|
||||
|
||||
|
@ -124,7 +124,8 @@ gst_video_filter_decide_allocation (GstBaseTransform * trans, GstQuery * query)
|
|||
GST_BUFFER_POOL_OPTION_VIDEO_META);
|
||||
gst_buffer_pool_set_config (pool, config);
|
||||
}
|
||||
return TRUE;
|
||||
return GST_BASE_TRANSFORM_CLASS (parent_class)->decide_allocation (trans,
|
||||
query);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -49,6 +49,12 @@ GST_DEBUG_CATEGORY (videoconvert_debug);
|
|||
#define GST_CAT_DEFAULT videoconvert_debug
|
||||
GST_DEBUG_CATEGORY_EXTERN (GST_CAT_PERFORMANCE);
|
||||
|
||||
GType gst_video_convert_get_type (void);
|
||||
|
||||
static GQuark _colorspace_quark;
|
||||
|
||||
#define gst_video_convert_parent_class parent_class
|
||||
G_DEFINE_TYPE (GstVideoConvert, gst_video_convert, GST_TYPE_VIDEO_FILTER);
|
||||
|
||||
enum
|
||||
{
|
||||
|
@ -72,8 +78,6 @@ GST_STATIC_PAD_TEMPLATE ("sink",
|
|||
GST_STATIC_CAPS (CSP_VIDEO_CAPS)
|
||||
);
|
||||
|
||||
GType gst_video_convert_get_type (void);
|
||||
|
||||
static void gst_video_convert_set_property (GObject * object,
|
||||
guint property_id, const GValue * value, GParamSpec * pspec);
|
||||
static void gst_video_convert_get_property (GObject * object,
|
||||
|
@ -305,8 +309,74 @@ invalid_palette:
|
|||
}
|
||||
}
|
||||
|
||||
#define gst_video_convert_parent_class parent_class
|
||||
G_DEFINE_TYPE (GstVideoConvert, gst_video_convert, GST_TYPE_VIDEO_FILTER);
|
||||
static gboolean
|
||||
gst_video_convert_propose_allocation (GstBaseTransform * trans,
|
||||
GstQuery * decide_query, GstQuery * query)
|
||||
{
|
||||
gboolean ret;
|
||||
guint i, n_metas;
|
||||
|
||||
/* let parent handle */
|
||||
ret = GST_BASE_TRANSFORM_CLASS (parent_class)->propose_allocation (trans,
|
||||
decide_query, query);
|
||||
/* error or passthrough, we're done */
|
||||
if (!ret || decide_query == NULL)
|
||||
return ret;
|
||||
|
||||
/* non-passthrough, copy all metadata, decide_query does not contain the
|
||||
* metadata anymore that depends on the buffer memory */
|
||||
n_metas = gst_query_get_n_allocation_metas (decide_query);
|
||||
for (i = 0; i < n_metas; i++) {
|
||||
GType api;
|
||||
|
||||
api = gst_query_parse_nth_allocation_meta (decide_query, i);
|
||||
gst_query_add_allocation_meta (query, api);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
typedef struct
|
||||
{
|
||||
GstBaseTransform *trans;
|
||||
GstBuffer *outbuf;
|
||||
GQuark tag;
|
||||
} CopyMetaData;
|
||||
|
||||
static gboolean
|
||||
foreach_metadata (GstBuffer * inbuf, GstMeta ** meta, gpointer user_data)
|
||||
{
|
||||
CopyMetaData *data = user_data;
|
||||
const GstMetaInfo *info = (*meta)->info;
|
||||
|
||||
if (info->transform_func) {
|
||||
if (gst_meta_api_type_has_tag (info->api, data->tag)) {
|
||||
/* metadata depends on colorspace. FIXME discard for now until we
|
||||
* have some transform data for it. */
|
||||
} else {
|
||||
GstMetaTransformCopy copy_data = { 0, -1 };
|
||||
/* simply copy then */
|
||||
info->transform_func (data->outbuf, *meta, inbuf,
|
||||
_gst_meta_transform_copy, ©_data);
|
||||
}
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_convert_copy_metadata (GstBaseTransform * trans,
|
||||
GstBuffer * inbuf, GstBuffer * outbuf)
|
||||
{
|
||||
CopyMetaData data;
|
||||
|
||||
data.trans = trans;
|
||||
data.outbuf = outbuf;
|
||||
data.tag = _colorspace_quark;
|
||||
|
||||
gst_buffer_foreach_meta (inbuf, foreach_metadata, &data);
|
||||
|
||||
return GST_BASE_TRANSFORM_CLASS (parent_class)->copy_metadata (trans, inbuf,
|
||||
outbuf);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_video_convert_finalize (GObject * obj)
|
||||
|
@ -347,6 +417,10 @@ gst_video_convert_class_init (GstVideoConvertClass * klass)
|
|||
GST_DEBUG_FUNCPTR (gst_video_convert_transform_caps);
|
||||
gstbasetransform_class->fixate_caps =
|
||||
GST_DEBUG_FUNCPTR (gst_video_convert_fixate_caps);
|
||||
gstbasetransform_class->propose_allocation =
|
||||
GST_DEBUG_FUNCPTR (gst_video_convert_propose_allocation);
|
||||
gstbasetransform_class->copy_metadata =
|
||||
GST_DEBUG_FUNCPTR (gst_video_convert_copy_metadata);
|
||||
|
||||
gstbasetransform_class->passthrough_on_same_caps = TRUE;
|
||||
|
||||
|
@ -428,6 +502,8 @@ plugin_init (GstPlugin * plugin)
|
|||
GST_DEBUG_CATEGORY_INIT (videoconvert_debug, "videoconvert", 0,
|
||||
"Colorspace Converter");
|
||||
|
||||
_colorspace_quark = g_quark_from_static_string ("colorspace");
|
||||
|
||||
return gst_element_register (plugin, "videoconvert",
|
||||
GST_RANK_NONE, GST_TYPE_VIDEO_CONVERT);
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue