gstreamer/gst/videorate/gstvideorate.c
Josep Torre Valles 4de10dacb6 ext/gnomevfs/: Fix URI interface implementation return type.
Original commit message from CVS:
2006-10-10  Zaheer Abbas Merali  <zaheerabbas at merali dot org>

Patch by: Josep Torre Valles <josep@fluendo.com>

* ext/gnomevfs/gstgnomevfssink.c:
* ext/gnomevfs/gstgnomevfssrc.c:
Fix URI interface implementation return type.
* ext/pango/gsttextoverlay.c: (gst_text_overlay_set_property):
Fix what looks like a copy/paste issue when assigning values.
* gst-libs/gst/audio/gstaudiofiltertemplate.c:
(gst_audio_filter_template_get_type):
Cast to prevent Forte warnings.
* gst-libs/gst/cdda/gstcddabasesrc.c: (gst_cdda_base_src_create):
Fix URI interface implementation return type.
gst_pad_query_position requires a signed integer pointer as
3rd parameter, GstClockTime is unsigned.
* gst/audioconvert/audioconvert.c:
Fix integer overflow when treated as signed.
* gst/audioresample/resample.c: (resample_add_input_data):
Cast to prevent warnings on Forte.
* gst/ffmpegcolorspace/imgconvert.c: (build_rgb_palette):
Fix integer overflow when treated as signed.
* gst/ffmpegcolorspace/imgconvert_template.h:
Fix integer overflow when treated as signed. RGBA_OUT shifts bits.
* gst/playback/gstdecodebin.c: (queue_filled_cb),
(cleanup_decodebin):
Who initialises a guint to -1!
Cast function pointers to prevent warnings on Forte.
* gst/playback/gstplaybasebin.c: (queue_deadlock_check),
(queue_threshold_reached):
Cast function pointers correctly to prevent warnings on Forte.
* gst/playback/gststreaminfo.c: (gst_stream_info_dispose):
Cast function pointers correctly to prevent warnings on Forte.
* gst/subparse/gstssaparse.c: (gst_ssa_parse_setcaps):
Obvious change to unsigned, 0xEF > max signed char.
* gst/tcp/gstmultifdsink.c: (get_buffers_max), (count_burst_unit):
GstClockTime is unsigned, initialise correctly.
* gst/tcp/gsttcp.c: (gst_tcp_socket_write):
Cast so pointer arithemetic doesn't cause warnings on Forte.
* gst/videorate/gstvideorate.c:
Use correct return value.
* tests/examples/seek/scrubby.c:
GstClockTime is unsigned, initialise correctly.
2006-10-10 12:49:03 +00:00

763 lines
23 KiB
C

/* GStreamer
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:element-videorate
* @short_description: retimestamps and drops/duplicates video frames to
* match the source pad's framerate and create a perfect stream
*
* <refsect2>
* <para>
* This element takes an incoming stream of timestamped video frames.
* It will produce a perfect stream that matches the source pad's framerate.
*
* The correction is performed by dropping and duplicating frames, no fancy
* algorithm is used to interpolate frames (yet).
* </para>
* <para>
* By default the element will simply negotiate the same framerate on its
* source and sink pad.
* </para>
* <para>
* This operation is useful to link to elements that require a perfect stream.
* Typical examples are formats that do not store timestamps for video frames,
* but only store a framerate, like Ogg and AVI.
* </para>
* <para>
* A conversion to a specific framerate can be forced by using filtered caps on
* the source pad.
* </para>
* <para>
* The properties "in", "out", "duplicate" and "drop" can be read to obtain
* information about number of input frames, output frames, dropped frames
* (i.e. the number of unused input frames) and duplicated frames (i.e. the
* number of times an input frame was duplicated, beside being used normally).
*
* An input stream that needs no adjustments will thus never have dropped or
* duplicated frames.
*
* When the "silent" property is set to FALSE, a GObject property notification
* will be emitted whenever one of the "duplicate" or "drop" values changes.
* This can potentially cause performance degradation.
* Note that property notification will happen from the streaming thread, so
* applications should be prepared for this.
* </para>
* <title>Example pipelines</title>
* <para>
* <programlisting>
* gst-launch -v filesrc location=videotestsrc.ogg ! oggdemux ! theoradec ! videorate ! video/x-raw-yuv,framerate=15/1 ! xvimagesink
* </programlisting>
* Decode an Ogg/Theora file and adjust the framerate to 15 fps before playing.
* To create the test Ogg/Theora file refer to the documentation of theoraenc.
* </para>
* <para>
* <programlisting>
* gst-launch -v v4lsrc ! videorate ! video/x-raw-yuv,framerate=25/2 ! theoraenc ! oggmux ! filesink location=v4l.ogg
* </programlisting>
* Capture video from a V4L device, and adjust the stream to 12.5 fps before
* encoding to Ogg/Theora.
* </para>
* </refsect2>
*
* Last reviewed on 2006-09-02 (0.10.11)
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstvideorate.h"
GST_DEBUG_CATEGORY_STATIC (video_rate_debug);
#define GST_CAT_DEFAULT video_rate_debug
/* elementfactory information */
static const GstElementDetails video_rate_details =
GST_ELEMENT_DETAILS ("Video rate adjuster",
"Filter/Effect/Video",
"Drops/duplicates/adjusts timestamps on video frames to make a perfect stream",
"Wim Taymans <wim@fluendo.com>");
/* GstVideoRate signals and args */
enum
{
/* FILL ME */
LAST_SIGNAL
};
#define DEFAULT_SILENT TRUE
#define DEFAULT_NEW_PREF 1.0
enum
{
ARG_0,
ARG_IN,
ARG_OUT,
ARG_DUP,
ARG_DROP,
ARG_SILENT,
ARG_NEW_PREF,
/* FILL ME */
};
static GstStaticPadTemplate gst_video_rate_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb")
);
static GstStaticPadTemplate gst_video_rate_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-raw-yuv; video/x-raw-rgb")
);
static void gst_video_rate_base_init (gpointer g_class);
static void gst_video_rate_class_init (GstVideoRateClass * klass);
static void gst_video_rate_init (GstVideoRate * videorate);
static void gst_video_rate_swap_prev (GstVideoRate * videorate,
GstBuffer * buffer, gint64 time);
static gboolean gst_video_rate_event (GstPad * pad, GstEvent * event);
static GstFlowReturn gst_video_rate_chain (GstPad * pad, GstBuffer * buffer);
static void gst_video_rate_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_video_rate_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static GstStateChangeReturn gst_video_rate_change_state (GstElement * element,
GstStateChange transition);
static GstElementClass *parent_class = NULL;
/*static guint gst_video_rate_signals[LAST_SIGNAL] = { 0 }; */
static GType
gst_video_rate_get_type (void)
{
static GType video_rate_type = 0;
if (!video_rate_type) {
static const GTypeInfo video_rate_info = {
sizeof (GstVideoRateClass),
gst_video_rate_base_init,
NULL,
(GClassInitFunc) gst_video_rate_class_init,
NULL,
NULL,
sizeof (GstVideoRate),
0,
(GInstanceInitFunc) gst_video_rate_init,
};
video_rate_type = g_type_register_static (GST_TYPE_ELEMENT,
"GstVideoRate", &video_rate_info, 0);
}
return video_rate_type;
}
static void
gst_video_rate_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_set_details (element_class, &video_rate_details);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_video_rate_sink_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_video_rate_src_template));
}
static void
gst_video_rate_class_init (GstVideoRateClass * klass)
{
GObjectClass *object_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
object_class->set_property = gst_video_rate_set_property;
object_class->get_property = gst_video_rate_get_property;
g_object_class_install_property (object_class, ARG_IN,
g_param_spec_uint64 ("in", "In",
"Number of input frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
g_object_class_install_property (object_class, ARG_OUT,
g_param_spec_uint64 ("out", "Out",
"Number of output frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
g_object_class_install_property (object_class, ARG_DUP,
g_param_spec_uint64 ("duplicate", "Duplicate",
"Number of duplicated frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
g_object_class_install_property (object_class, ARG_DROP,
g_param_spec_uint64 ("drop", "Drop",
"Number of dropped frames", 0, G_MAXUINT64, 0, G_PARAM_READABLE));
g_object_class_install_property (object_class, ARG_SILENT,
g_param_spec_boolean ("silent", "silent",
"Don't emit notify for dropped and duplicated frames",
DEFAULT_SILENT, G_PARAM_READWRITE));
g_object_class_install_property (object_class, ARG_NEW_PREF,
g_param_spec_double ("new_pref", "New Pref",
"Value indicating how much to prefer new frames (unused)",
0.0, 1.0, DEFAULT_NEW_PREF, G_PARAM_READWRITE));
element_class->change_state = gst_video_rate_change_state;
}
/* return the caps that can be used on out_pad given in_caps on in_pad */
static gboolean
gst_video_rate_transformcaps (GstPad * in_pad, GstCaps * in_caps,
GstPad * out_pad, GstCaps ** out_caps)
{
GstCaps *intersect;
const GstCaps *in_templ;
gint i;
in_templ = gst_pad_get_pad_template_caps (in_pad);
intersect = gst_caps_intersect (in_caps, in_templ);
/* all possible framerates are allowed */
for (i = 0; i < gst_caps_get_size (intersect); i++) {
GstStructure *structure;
structure = gst_caps_get_structure (intersect, i);
gst_structure_set (structure,
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, G_MAXINT, 1, NULL);
}
*out_caps = intersect;
return TRUE;
}
static GstCaps *
gst_video_rate_getcaps (GstPad * pad)
{
GstVideoRate *videorate;
GstPad *otherpad;
GstCaps *caps;
videorate = GST_VIDEO_RATE (GST_PAD_PARENT (pad));
otherpad = (pad == videorate->srcpad) ? videorate->sinkpad :
videorate->srcpad;
/* we can do what the peer can */
caps = gst_pad_peer_get_caps (otherpad);
if (caps) {
GstCaps *transform;
gst_video_rate_transformcaps (otherpad, caps, pad, &transform);
gst_caps_unref (caps);
caps = transform;
} else {
/* no peer, our padtemplate is enough then */
caps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
}
return caps;
}
static gboolean
gst_video_rate_setcaps (GstPad * pad, GstCaps * caps)
{
GstVideoRate *videorate;
GstStructure *structure;
gboolean ret = TRUE;
GstPad *otherpad, *opeer;
gint rate_numerator, rate_denominator;
videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));
structure = gst_caps_get_structure (caps, 0);
if (!gst_structure_get_fraction (structure, "framerate",
&rate_numerator, &rate_denominator))
goto no_framerate;
if (pad == videorate->srcpad) {
videorate->to_rate_numerator = rate_numerator;
videorate->to_rate_denominator = rate_denominator;
otherpad = videorate->sinkpad;
} else {
videorate->from_rate_numerator = rate_numerator;
videorate->from_rate_denominator = rate_denominator;
otherpad = videorate->srcpad;
}
/* now try to find something for the peer */
opeer = gst_pad_get_peer (otherpad);
if (opeer) {
if (gst_pad_accept_caps (opeer, caps)) {
/* the peer accepts the caps as they are */
gst_pad_set_caps (otherpad, caps);
ret = TRUE;
} else {
GstCaps *peercaps;
GstCaps *intersect;
GstCaps *transform = NULL;
ret = FALSE;
/* see how we can transform the input caps */
if (!gst_video_rate_transformcaps (pad, caps, otherpad, &transform))
goto no_transform;
/* see what the peer can do */
peercaps = gst_pad_get_caps (opeer);
GST_DEBUG ("icaps %" GST_PTR_FORMAT, peercaps);
GST_DEBUG ("transform %" GST_PTR_FORMAT, transform);
/* filter against our possibilities */
intersect = gst_caps_intersect (peercaps, transform);
gst_caps_unref (peercaps);
gst_caps_unref (transform);
GST_DEBUG ("intersect %" GST_PTR_FORMAT, intersect);
/* take first possibility */
caps = gst_caps_copy_nth (intersect, 0);
gst_caps_unref (intersect);
structure = gst_caps_get_structure (caps, 0);
/* and fixate */
gst_structure_fixate_field_nearest_fraction (structure, "framerate",
rate_numerator, rate_denominator);
gst_structure_get_fraction (structure, "framerate",
&rate_numerator, &rate_denominator);
if (otherpad == videorate->srcpad) {
videorate->to_rate_numerator = rate_numerator;
videorate->to_rate_denominator = rate_denominator;
} else {
videorate->from_rate_numerator = rate_numerator;
videorate->from_rate_denominator = rate_denominator;
}
gst_pad_set_caps (otherpad, caps);
ret = TRUE;
}
gst_object_unref (opeer);
}
done:
gst_object_unref (videorate);
return ret;
no_framerate:
{
GST_DEBUG_OBJECT (videorate, "no framerate specified");
goto done;
}
no_transform:
{
GST_DEBUG_OBJECT (videorate, "no framerate transform possible");
ret = FALSE;
goto done;
}
}
static void
gst_video_rate_reset (GstVideoRate * videorate)
{
GST_DEBUG ("resetting internal variables");
videorate->in = 0;
videorate->out = 0;
videorate->drop = 0;
videorate->dup = 0;
videorate->next_ts = G_GINT64_CONSTANT (0);
gst_video_rate_swap_prev (videorate, NULL, 0);
gst_segment_init (&videorate->segment, GST_FORMAT_TIME);
}
static void
gst_video_rate_init (GstVideoRate * videorate)
{
GST_DEBUG ("gst_video_rate_init");
videorate->sinkpad =
gst_pad_new_from_static_template (&gst_video_rate_sink_template, "sink");
gst_element_add_pad (GST_ELEMENT (videorate), videorate->sinkpad);
gst_pad_set_event_function (videorate->sinkpad, gst_video_rate_event);
gst_pad_set_chain_function (videorate->sinkpad, gst_video_rate_chain);
gst_pad_set_getcaps_function (videorate->sinkpad, gst_video_rate_getcaps);
gst_pad_set_setcaps_function (videorate->sinkpad, gst_video_rate_setcaps);
videorate->srcpad =
gst_pad_new_from_static_template (&gst_video_rate_src_template, "src");
gst_element_add_pad (GST_ELEMENT (videorate), videorate->srcpad);
gst_pad_set_getcaps_function (videorate->srcpad, gst_video_rate_getcaps);
gst_pad_set_setcaps_function (videorate->srcpad, gst_video_rate_setcaps);
gst_video_rate_reset (videorate);
videorate->silent = DEFAULT_SILENT;
videorate->new_pref = DEFAULT_NEW_PREF;
videorate->from_rate_numerator = 0;
videorate->from_rate_denominator = 0;
videorate->to_rate_numerator = 0;
videorate->to_rate_denominator = 0;
}
/* flush the oldest buffer */
static GstFlowReturn
gst_video_rate_flush_prev (GstVideoRate * videorate)
{
GstFlowReturn res;
GstBuffer *outbuf;
GstClockTime push_ts;
if (!videorate->prevbuf)
goto eos_before_buffers;
/* make sure we can write to the metadata */
outbuf = gst_buffer_make_metadata_writable
(gst_buffer_ref (videorate->prevbuf));
/* this is the timestamp we put on the buffer */
push_ts = videorate->next_ts;
videorate->out++;
if (videorate->to_rate_numerator) {
/* interpolate next expected timestamp in the segment */
videorate->next_ts = videorate->segment.accum + videorate->segment.start +
gst_util_uint64_scale (videorate->out,
videorate->to_rate_denominator * GST_SECOND,
videorate->to_rate_numerator);
GST_BUFFER_DURATION (outbuf) = videorate->next_ts - push_ts;
}
/* adapt for looping, bring back to time in current segment. */
GST_BUFFER_TIMESTAMP (outbuf) = push_ts - videorate->segment.accum;
gst_buffer_set_caps (outbuf, GST_PAD_CAPS (videorate->srcpad));
GST_LOG_OBJECT (videorate,
"old is best, dup, pushing buffer outgoing ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (push_ts));
res = gst_pad_push (videorate->srcpad, outbuf);
return res;
/* WARNINGS */
eos_before_buffers:
{
GST_INFO_OBJECT (videorate, "got EOS before any buffer was received");
return GST_FLOW_OK;
}
}
static void
gst_video_rate_swap_prev (GstVideoRate * videorate, GstBuffer * buffer,
gint64 time)
{
GST_LOG_OBJECT (videorate, "swap_prev: storing buffer %p in prev", buffer);
if (videorate->prevbuf)
gst_buffer_unref (videorate->prevbuf);
videorate->prevbuf = buffer;
videorate->prev_ts = time;
}
static gboolean
gst_video_rate_event (GstPad * pad, GstEvent * event)
{
GstVideoRate *videorate;
gboolean ret;
videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_NEWSEGMENT:
{
gint64 start, stop, time;
gdouble rate, arate;
gboolean update;
GstFormat format;
gst_event_parse_new_segment_full (event, &update, &rate, &arate, &format,
&start, &stop, &time);
if (format != GST_FORMAT_TIME)
goto format_error;
GST_DEBUG_OBJECT (videorate, "handle NEWSEGMENT");
/* We just want to update the accumulated stream_time */
gst_segment_set_newsegment_full (&videorate->segment, update, rate, arate,
format, start, stop, time);
GST_DEBUG_OBJECT (videorate, "updated segment: %" GST_SEGMENT_FORMAT,
&videorate->segment);
break;
}
case GST_EVENT_EOS:
/* flush last queued frame */
GST_DEBUG_OBJECT (videorate, "Got EOS");
gst_video_rate_flush_prev (videorate);
break;
case GST_EVENT_FLUSH_STOP:
/* also resets the segment */
GST_DEBUG_OBJECT (videorate, "Got FLUSH_STOP");
gst_video_rate_reset (videorate);
break;
default:
break;
}
ret = gst_pad_push_event (videorate->srcpad, event);
done:
gst_object_unref (videorate);
return ret;
/* ERRORS */
format_error:
{
GST_WARNING_OBJECT (videorate,
"Got segment but doesn't have GST_FORMAT_TIME value");
gst_event_unref (event);
ret = FALSE;
goto done;
}
}
static GstFlowReturn
gst_video_rate_chain (GstPad * pad, GstBuffer * buffer)
{
GstVideoRate *videorate;
GstFlowReturn res = GST_FLOW_OK;
GstClockTime intime, in_ts;
videorate = GST_VIDEO_RATE (gst_pad_get_parent (pad));
/* make sure the denominators are not 0 */
if (videorate->from_rate_denominator == 0 ||
videorate->to_rate_denominator == 0)
goto not_negotiated;
in_ts = GST_BUFFER_TIMESTAMP (buffer);
GST_DEBUG_OBJECT (videorate, "got buffer with timestamp %" GST_TIME_FORMAT,
GST_TIME_ARGS (in_ts));
/* the input time is the time in the segment + all previously accumulated
* segments */
intime = in_ts + videorate->segment.accum;
/* we need to have two buffers to compare */
if (videorate->prevbuf == NULL) {
gst_video_rate_swap_prev (videorate, buffer, intime);
videorate->in++;
/* new buffer, we expect to output a buffer that matches the first
* timestamp in the segment */
videorate->next_ts = videorate->segment.start;
} else {
GstClockTime prevtime;
gint count = 0;
gint64 diff1, diff2;
prevtime = videorate->prev_ts;
GST_LOG_OBJECT (videorate,
"BEGINNING prev buf %" GST_TIME_FORMAT " new buf %" GST_TIME_FORMAT
" outgoing ts %" GST_TIME_FORMAT, GST_TIME_ARGS (prevtime),
GST_TIME_ARGS (intime), GST_TIME_ARGS (videorate->next_ts));
videorate->in++;
/* drop new buffer if it's before previous one */
if (intime < prevtime) {
GST_DEBUG_OBJECT (videorate,
"The new buffer (%" GST_TIME_FORMAT
") is before the previous buffer (%"
GST_TIME_FORMAT "). Dropping new buffer.",
GST_TIME_ARGS (intime), GST_TIME_ARGS (prevtime));
videorate->drop++;
if (!videorate->silent)
g_object_notify (G_OBJECT (videorate), "drop");
gst_buffer_unref (buffer);
goto done;
}
/* got 2 buffers, see which one is the best */
do {
diff1 = prevtime - videorate->next_ts;
diff2 = intime - videorate->next_ts;
/* take absolute values, beware: abs and ABS don't work for gint64 */
if (diff1 < 0)
diff1 = -diff1;
if (diff2 < 0)
diff2 = -diff2;
GST_LOG_OBJECT (videorate,
"diff with prev %" GST_TIME_FORMAT " diff with new %"
GST_TIME_FORMAT " outgoing ts %" GST_TIME_FORMAT,
GST_TIME_ARGS (diff1), GST_TIME_ARGS (diff2),
GST_TIME_ARGS (videorate->next_ts));
/* output first one when its the best */
if (diff1 < diff2) {
count++;
/* on error the _flush function posted a warning already */
if ((res = gst_video_rate_flush_prev (videorate)) != GST_FLOW_OK)
goto done;
}
/* continue while the first one was the best */
}
while (diff1 < diff2);
/* if we outputed the first buffer more then once, we have dups */
if (count > 1) {
videorate->dup += count - 1;
if (!videorate->silent)
g_object_notify (G_OBJECT (videorate), "duplicate");
}
/* if we didn't output the first buffer, we have a drop */
else if (count == 0) {
videorate->drop++;
if (!videorate->silent)
g_object_notify (G_OBJECT (videorate), "drop");
GST_LOG_OBJECT (videorate,
"new is best, old never used, drop, outgoing ts %"
GST_TIME_FORMAT, GST_TIME_ARGS (videorate->next_ts));
}
GST_LOG_OBJECT (videorate,
"END, putting new in old, diff1 %" GST_TIME_FORMAT
", diff2 %" GST_TIME_FORMAT ", next_ts %" GST_TIME_FORMAT
", in %lld, out %lld, drop %lld, dup %lld", GST_TIME_ARGS (diff1),
GST_TIME_ARGS (diff2), GST_TIME_ARGS (videorate->next_ts),
videorate->in, videorate->out, videorate->drop, videorate->dup);
/* swap in new one when it's the best */
gst_video_rate_swap_prev (videorate, buffer, intime);
}
done:
gst_object_unref (videorate);
return res;
/* ERRORS */
not_negotiated:
{
GST_WARNING_OBJECT (videorate, "no framerate negotiated");
gst_buffer_unref (buffer);
res = GST_FLOW_NOT_NEGOTIATED;
goto done;
}
}
static void
gst_video_rate_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstVideoRate *videorate = GST_VIDEO_RATE (object);
switch (prop_id) {
case ARG_SILENT:
videorate->silent = g_value_get_boolean (value);
break;
case ARG_NEW_PREF:
videorate->new_pref = g_value_get_double (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
gst_video_rate_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstVideoRate *videorate = GST_VIDEO_RATE (object);
switch (prop_id) {
case ARG_IN:
g_value_set_uint64 (value, videorate->in);
break;
case ARG_OUT:
g_value_set_uint64 (value, videorate->out);
break;
case ARG_DUP:
g_value_set_uint64 (value, videorate->dup);
break;
case ARG_DROP:
g_value_set_uint64 (value, videorate->drop);
break;
case ARG_SILENT:
g_value_set_boolean (value, videorate->silent);
break;
case ARG_NEW_PREF:
g_value_set_double (value, videorate->new_pref);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static GstStateChangeReturn
gst_video_rate_change_state (GstElement * element, GstStateChange transition)
{
GstStateChangeReturn ret;
GstVideoRate *videorate;
videorate = GST_VIDEO_RATE (element);
switch (transition) {
default:
break;
}
ret = parent_class->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_video_rate_reset (videorate);
break;
default:
break;
}
return ret;
}
static gboolean
plugin_init (GstPlugin * plugin)
{
GST_DEBUG_CATEGORY_INIT (video_rate_debug, "videorate", 0,
"VideoRate stream fixer");
return gst_element_register (plugin, "videorate", GST_RANK_NONE,
GST_TYPE_VIDEO_RATE);
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"videorate",
"Adjusts video frames",
plugin_init, VERSION, GST_LICENSE, GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)