mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-11-27 04:01:08 +00:00
video: Initial port of video base classes and related things to 0.11
This commit is contained in:
parent
3ab5be1cff
commit
f7bc9cc5ba
7 changed files with 394 additions and 440 deletions
|
@ -135,6 +135,7 @@
|
|||
* decoded).
|
||||
* * Add a flag/boolean for decoders that require keyframes, so the base
|
||||
* class can automatically discard non-keyframes before one has arrived
|
||||
* * Detect reordered frame/timestamps and fix the pts/dts
|
||||
* * Support for GstIndex (or shall we not care ?)
|
||||
* * Calculate actual latency based on input/output timestamp/frame_number
|
||||
* and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
|
||||
|
@ -142,10 +143,6 @@
|
|||
*
|
||||
*/
|
||||
|
||||
/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
|
||||
* with newer GLib versions (>= 2.31.0) */
|
||||
#define GLIB_DISABLE_DEPRECATION_WARNINGS
|
||||
|
||||
#include "gstvideodecoder.h"
|
||||
#include "gstvideoutils.h"
|
||||
|
||||
|
@ -158,9 +155,6 @@ GST_DEBUG_CATEGORY (videodecoder_debug);
|
|||
(G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VIDEO_DECODER, \
|
||||
GstVideoDecoderPrivate))
|
||||
|
||||
/* FIXME : I really hope we never see streams that go over this */
|
||||
#define MAX_DTS_PTS_REORDER_DEPTH 36
|
||||
|
||||
struct _GstVideoDecoderPrivate
|
||||
{
|
||||
/* FIXME introduce a context ? */
|
||||
|
@ -198,18 +192,8 @@ struct _GstVideoDecoderPrivate
|
|||
/* combine to yield (presentation) ts */
|
||||
GstClockTime timestamp_offset;
|
||||
|
||||
/* last incoming and outgoing ts */
|
||||
GstClockTime last_timestamp_in;
|
||||
GstClockTime last_timestamp_out;
|
||||
|
||||
/* last outgoing system frame number (used to detect reordering) */
|
||||
guint last_out_frame_number;
|
||||
|
||||
/* TRUE if input timestamp is not monotonically increasing */
|
||||
gboolean reordered_input;
|
||||
|
||||
/* TRUE if frames come out in a different order than they were inputted */
|
||||
gboolean reordered_output;
|
||||
/* last outgoing ts */
|
||||
GstClockTime last_timestamp;
|
||||
|
||||
/* reverse playback */
|
||||
/* collect input */
|
||||
|
@ -254,25 +238,29 @@ struct _GstVideoDecoderPrivate
|
|||
|
||||
gint64 min_latency;
|
||||
gint64 max_latency;
|
||||
|
||||
/* Handle incoming buffers with DTS instead of PTS as timestamps */
|
||||
GstClockTime incoming_timestamps[MAX_DTS_PTS_REORDER_DEPTH];
|
||||
guint reorder_idx_in;
|
||||
guint reorder_idx_out;
|
||||
};
|
||||
|
||||
static GstElementClass *parent_class = NULL;
|
||||
static void gst_video_decoder_class_init (GstVideoDecoderClass * klass);
|
||||
static void gst_video_decoder_init (GstVideoDecoder * dec,
|
||||
GstVideoDecoderClass * klass);
|
||||
|
||||
static void gst_video_decoder_finalize (GObject * object);
|
||||
|
||||
static gboolean gst_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps);
|
||||
static gboolean gst_video_decoder_sink_event (GstPad * pad, GstEvent * event);
|
||||
static gboolean gst_video_decoder_src_event (GstPad * pad, GstEvent * event);
|
||||
static GstFlowReturn gst_video_decoder_chain (GstPad * pad, GstBuffer * buf);
|
||||
static gboolean gst_video_decoder_sink_query (GstPad * pad, GstQuery * query);
|
||||
static GstStateChangeReturn
|
||||
gst_video_decoder_change_state (GstElement * element,
|
||||
GstStateChange transition);
|
||||
static const GstQueryType *gst_video_decoder_get_query_types (GstPad * pad);
|
||||
static gboolean gst_video_decoder_src_query (GstPad * pad, GstQuery * query);
|
||||
static gboolean gst_video_decoder_setcaps (GstVideoDecoder * dec,
|
||||
GstCaps * caps);
|
||||
static gboolean gst_video_decoder_sink_event (GstPad * pad, GstObject * parent,
|
||||
GstEvent * event);
|
||||
static gboolean gst_video_decoder_src_event (GstPad * pad, GstObject * parent,
|
||||
GstEvent * event);
|
||||
static GstFlowReturn gst_video_decoder_chain (GstPad * pad, GstObject * parent,
|
||||
GstBuffer * buf);
|
||||
static gboolean gst_video_decoder_sink_query (GstPad * pad, GstObject * parent,
|
||||
GstQuery * query);
|
||||
static GstStateChangeReturn gst_video_decoder_change_state (GstElement *
|
||||
element, GstStateChange transition);
|
||||
static gboolean gst_video_decoder_src_query (GstPad * pad, GstObject * parent,
|
||||
GstQuery * query);
|
||||
static void gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full);
|
||||
|
||||
static GstFlowReturn gst_video_decoder_have_frame_2 (GstVideoDecoder * decoder);
|
||||
|
@ -287,14 +275,32 @@ static GstVideoCodecFrame *gst_video_decoder_new_frame (GstVideoDecoder *
|
|||
|
||||
static void gst_video_decoder_clear_queues (GstVideoDecoder * dec);
|
||||
|
||||
GST_BOILERPLATE (GstVideoDecoder, gst_video_decoder,
|
||||
GstElement, GST_TYPE_ELEMENT);
|
||||
|
||||
static void
|
||||
gst_video_decoder_base_init (gpointer g_class)
|
||||
/* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
|
||||
* method to get to the padtemplates */
|
||||
GType
|
||||
gst_video_decoder_get_type (void)
|
||||
{
|
||||
GST_DEBUG_CATEGORY_INIT (videodecoder_debug, "videodecoder", 0,
|
||||
"Base Video Decoder");
|
||||
static volatile gsize type = 0;
|
||||
|
||||
if (g_once_init_enter (&type)) {
|
||||
GType _type;
|
||||
static const GTypeInfo info = {
|
||||
sizeof (GstVideoDecoderClass),
|
||||
NULL,
|
||||
NULL,
|
||||
(GClassInitFunc) gst_video_decoder_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
sizeof (GstVideoDecoder),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_video_decoder_init,
|
||||
};
|
||||
|
||||
_type = g_type_register_static (GST_TYPE_ELEMENT,
|
||||
"GstVideoDecoder", &info, G_TYPE_FLAG_ABSTRACT);
|
||||
g_once_init_leave (&type, _type);
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -306,6 +312,10 @@ gst_video_decoder_class_init (GstVideoDecoderClass * klass)
|
|||
gobject_class = G_OBJECT_CLASS (klass);
|
||||
gstelement_class = GST_ELEMENT_CLASS (klass);
|
||||
|
||||
GST_DEBUG_CATEGORY_INIT (videodecoder_debug, "videodecoder", 0,
|
||||
"Base Video Decoder");
|
||||
|
||||
parent_class = g_type_class_peek_parent (klass);
|
||||
g_type_class_add_private (klass, sizeof (GstVideoDecoderPrivate));
|
||||
|
||||
gobject_class->finalize = gst_video_decoder_finalize;
|
||||
|
@ -333,8 +343,6 @@ gst_video_decoder_init (GstVideoDecoder * decoder, GstVideoDecoderClass * klass)
|
|||
gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_decoder_chain));
|
||||
gst_pad_set_event_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_decoder_sink_event));
|
||||
gst_pad_set_setcaps_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_decoder_sink_setcaps));
|
||||
gst_pad_set_query_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_decoder_sink_query));
|
||||
gst_element_add_pad (GST_ELEMENT (decoder), decoder->sinkpad);
|
||||
|
@ -347,8 +355,6 @@ gst_video_decoder_init (GstVideoDecoder * decoder, GstVideoDecoderClass * klass)
|
|||
|
||||
gst_pad_set_event_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_decoder_src_event));
|
||||
gst_pad_set_query_type_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_decoder_get_query_types));
|
||||
gst_pad_set_query_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_decoder_src_query));
|
||||
gst_pad_use_fixed_caps (pad);
|
||||
|
@ -357,7 +363,7 @@ gst_video_decoder_init (GstVideoDecoder * decoder, GstVideoDecoderClass * klass)
|
|||
gst_segment_init (&decoder->input_segment, GST_FORMAT_TIME);
|
||||
gst_segment_init (&decoder->output_segment, GST_FORMAT_TIME);
|
||||
|
||||
g_static_rec_mutex_init (&decoder->stream_lock);
|
||||
g_rec_mutex_init (&decoder->stream_lock);
|
||||
|
||||
decoder->priv->input_adapter = gst_adapter_new ();
|
||||
decoder->priv->output_adapter = gst_adapter_new ();
|
||||
|
@ -499,7 +505,7 @@ _new_input_state (GstCaps * caps)
|
|||
|
||||
codec_data = gst_structure_get_value (structure, "codec_data");
|
||||
if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER)
|
||||
state->codec_data = GST_BUFFER (gst_value_dup_mini_object (codec_data));
|
||||
state->codec_data = GST_BUFFER (g_value_dup_boxed (codec_data));
|
||||
|
||||
return state;
|
||||
|
||||
|
@ -548,14 +554,12 @@ _new_output_state (GstVideoFormat fmt, guint width, guint height,
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
||||
gst_video_decoder_setcaps (GstVideoDecoder * decoder, GstCaps * caps)
|
||||
{
|
||||
GstVideoDecoder *decoder;
|
||||
GstVideoDecoderClass *decoder_class;
|
||||
GstVideoCodecState *state;
|
||||
gboolean ret = TRUE;
|
||||
|
||||
decoder = GST_VIDEO_DECODER (gst_pad_get_parent (pad));
|
||||
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
||||
|
||||
GST_DEBUG_OBJECT (decoder, "setcaps %" GST_PTR_FORMAT, caps);
|
||||
|
@ -578,7 +582,6 @@ gst_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
|||
decoder->priv->input_state = state;
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
gst_object_unref (decoder);
|
||||
|
||||
return ret;
|
||||
|
||||
|
@ -587,7 +590,6 @@ gst_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
|||
parse_fail:
|
||||
{
|
||||
GST_WARNING_OBJECT (decoder, "Failed to parse caps");
|
||||
gst_object_unref (decoder);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
|
@ -609,7 +611,7 @@ gst_video_decoder_finalize (GObject * object)
|
|||
|
||||
GST_DEBUG_OBJECT (object, "finalize");
|
||||
|
||||
g_static_rec_mutex_free (&decoder->stream_lock);
|
||||
g_rec_mutex_clear (&decoder->stream_lock);
|
||||
|
||||
if (decoder->priv->input_adapter) {
|
||||
g_object_unref (decoder->priv->input_adapter);
|
||||
|
@ -676,6 +678,15 @@ gst_video_decoder_sink_eventfunc (GstVideoDecoder * decoder, GstEvent * event)
|
|||
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
||||
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_CAPS:
|
||||
{
|
||||
GstCaps *caps;
|
||||
|
||||
gst_event_parse_caps (event, &caps);
|
||||
handled = gst_video_decoder_setcaps (decoder, caps);
|
||||
gst_event_unref (event);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_EOS:
|
||||
{
|
||||
GstFlowReturn flow_ret = GST_FLOW_OK;
|
||||
|
@ -698,66 +709,51 @@ gst_video_decoder_sink_eventfunc (GstVideoDecoder * decoder, GstEvent * event)
|
|||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_NEWSEGMENT:
|
||||
case GST_EVENT_SEGMENT:
|
||||
{
|
||||
gboolean update;
|
||||
double rate, arate;
|
||||
GstFormat format;
|
||||
gint64 start;
|
||||
gint64 stop;
|
||||
gint64 pos;
|
||||
GstSegment *segment = &decoder->input_segment;
|
||||
GstSegment segment;
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
||||
gst_event_parse_new_segment_full (event, &update, &rate,
|
||||
&arate, &format, &start, &stop, &pos);
|
||||
|
||||
if (format == GST_FORMAT_TIME) {
|
||||
gst_event_copy_segment (event, &segment);
|
||||
|
||||
if (segment.format == GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (decoder,
|
||||
"received TIME NEW_SEGMENT %" GST_TIME_FORMAT
|
||||
" -- %" GST_TIME_FORMAT ", pos %" GST_TIME_FORMAT
|
||||
", rate %g, applied_rate %g",
|
||||
GST_TIME_ARGS (start), GST_TIME_ARGS (stop), GST_TIME_ARGS (pos),
|
||||
rate, arate);
|
||||
"received TIME SEGMENT %" GST_SEGMENT_FORMAT, &segment);
|
||||
} else {
|
||||
GstFormat dformat = GST_FORMAT_TIME;
|
||||
gint64 start;
|
||||
|
||||
GST_DEBUG_OBJECT (decoder,
|
||||
"received NEW_SEGMENT %" G_GINT64_FORMAT
|
||||
" -- %" G_GINT64_FORMAT ", time %" G_GINT64_FORMAT
|
||||
", rate %g, applied_rate %g", start, stop, pos, rate, arate);
|
||||
"received SEGMENT %" GST_SEGMENT_FORMAT, &segment);
|
||||
|
||||
/* handle newsegment as a result from our legacy simple seeking */
|
||||
/* note that initial 0 should convert to 0 in any case */
|
||||
if (priv->do_estimate_rate &&
|
||||
gst_pad_query_convert (decoder->sinkpad, GST_FORMAT_BYTES, start,
|
||||
&dformat, &start)) {
|
||||
gst_pad_query_convert (decoder->sinkpad, GST_FORMAT_BYTES,
|
||||
segment.start, GST_FORMAT_TIME, &start)) {
|
||||
/* best attempt convert */
|
||||
/* as these are only estimates, stop is kept open-ended to avoid
|
||||
* premature cutting */
|
||||
GST_DEBUG_OBJECT (decoder,
|
||||
"converted to TIME start %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (start));
|
||||
pos = start;
|
||||
stop = GST_CLOCK_TIME_NONE;
|
||||
segment.start = start;
|
||||
segment.stop = GST_CLOCK_TIME_NONE;
|
||||
segment.time = start;
|
||||
/* replace event */
|
||||
gst_event_unref (event);
|
||||
event = gst_event_new_new_segment_full (update, rate, arate,
|
||||
GST_FORMAT_TIME, start, stop, pos);
|
||||
event = gst_event_new_segment (&segment);
|
||||
} else {
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
goto newseg_wrong_format;
|
||||
}
|
||||
}
|
||||
|
||||
if (!update) {
|
||||
gst_video_decoder_flush (decoder, FALSE);
|
||||
}
|
||||
gst_video_decoder_flush (decoder, FALSE);
|
||||
|
||||
priv->timestamp_offset = start;
|
||||
priv->timestamp_offset = segment.start;
|
||||
|
||||
gst_segment_set_newsegment_full (segment,
|
||||
update, rate, arate, format, start, stop, pos);
|
||||
decoder->input_segment = segment;
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
break;
|
||||
|
@ -789,34 +785,23 @@ static gboolean
|
|||
gst_video_decoder_push_event (GstVideoDecoder * decoder, GstEvent * event)
|
||||
{
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_NEWSEGMENT:
|
||||
case GST_EVENT_SEGMENT:
|
||||
{
|
||||
gboolean update;
|
||||
double rate;
|
||||
double applied_rate;
|
||||
GstFormat format;
|
||||
gint64 start;
|
||||
gint64 stop;
|
||||
gint64 position;
|
||||
GstSegment segment;
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
||||
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
|
||||
&format, &start, &stop, &position);
|
||||
|
||||
GST_DEBUG_OBJECT (decoder, "newseg rate %g, applied rate %g, "
|
||||
"format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
|
||||
", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
|
||||
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
|
||||
GST_TIME_ARGS (position));
|
||||
gst_event_copy_segment (event, &segment);
|
||||
|
||||
if (format != GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (decoder, "segment %" GST_SEGMENT_FORMAT, &segment);
|
||||
|
||||
if (segment.format != GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (decoder, "received non TIME newsegment");
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
break;
|
||||
}
|
||||
|
||||
gst_segment_set_newsegment_full (&decoder->output_segment, update, rate,
|
||||
applied_rate, format, start, stop, position);
|
||||
decoder->output_segment = segment;
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
break;
|
||||
}
|
||||
|
@ -828,14 +813,15 @@ gst_video_decoder_push_event (GstVideoDecoder * decoder, GstEvent * event)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
||||
gst_video_decoder_sink_event (GstPad * pad, GstObject * parent,
|
||||
GstEvent * event)
|
||||
{
|
||||
GstVideoDecoder *decoder;
|
||||
GstVideoDecoderClass *decoder_class;
|
||||
gboolean ret = FALSE;
|
||||
gboolean handled = FALSE;
|
||||
|
||||
decoder = GST_VIDEO_DECODER (gst_pad_get_parent (pad));
|
||||
decoder = GST_VIDEO_DECODER (parent);
|
||||
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
||||
|
||||
GST_DEBUG_OBJECT (decoder, "received event %d, %s", GST_EVENT_TYPE (event),
|
||||
|
@ -871,9 +857,7 @@ gst_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
}
|
||||
}
|
||||
|
||||
gst_object_unref (decoder);
|
||||
return ret;
|
||||
|
||||
}
|
||||
|
||||
/* perform upstream byte <-> time conversion (duration, seeking)
|
||||
|
@ -888,9 +872,9 @@ gst_video_decoder_do_byte (GstVideoDecoder * dec)
|
|||
static gboolean
|
||||
gst_video_decoder_do_seek (GstVideoDecoder * dec, GstEvent * event)
|
||||
{
|
||||
GstFormat format;
|
||||
GstSeekFlags flags;
|
||||
GstSeekType start_type, end_type;
|
||||
GstFormat format;
|
||||
gdouble rate;
|
||||
gint64 start, start_time, end_time;
|
||||
GstSegment seek_segment;
|
||||
|
@ -922,13 +906,12 @@ gst_video_decoder_do_seek (GstVideoDecoder * dec, GstEvent * event)
|
|||
}
|
||||
|
||||
memcpy (&seek_segment, &dec->output_segment, sizeof (seek_segment));
|
||||
gst_segment_set_seek (&seek_segment, rate, format, flags, start_type,
|
||||
gst_segment_do_seek (&seek_segment, rate, format, flags, start_type,
|
||||
start_time, end_type, end_time, NULL);
|
||||
start_time = seek_segment.last_stop;
|
||||
start_time = seek_segment.position;
|
||||
|
||||
format = GST_FORMAT_BYTES;
|
||||
if (!gst_pad_query_convert (dec->sinkpad, GST_FORMAT_TIME, start_time,
|
||||
&format, &start)) {
|
||||
GST_FORMAT_BYTES, &start)) {
|
||||
GST_DEBUG_OBJECT (dec, "conversion failed");
|
||||
return FALSE;
|
||||
}
|
||||
|
@ -945,13 +928,13 @@ gst_video_decoder_do_seek (GstVideoDecoder * dec, GstEvent * event)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_decoder_src_event (GstPad * pad, GstEvent * event)
|
||||
gst_video_decoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
|
||||
{
|
||||
GstVideoDecoder *decoder;
|
||||
GstVideoDecoderPrivate *priv;
|
||||
gboolean res = FALSE;
|
||||
|
||||
decoder = GST_VIDEO_DECODER (gst_pad_get_parent (pad));
|
||||
decoder = GST_VIDEO_DECODER (parent);
|
||||
priv = decoder->priv;
|
||||
|
||||
GST_DEBUG_OBJECT (decoder,
|
||||
|
@ -961,7 +944,7 @@ gst_video_decoder_src_event (GstPad * pad, GstEvent * event)
|
|||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_SEEK:
|
||||
{
|
||||
GstFormat format, tformat;
|
||||
GstFormat format;
|
||||
gdouble rate;
|
||||
GstSeekFlags flags;
|
||||
GstSeekType cur_type, stop_type;
|
||||
|
@ -986,10 +969,12 @@ gst_video_decoder_src_event (GstPad * pad, GstEvent * event)
|
|||
|
||||
/* ... though a non-time seek can be aided as well */
|
||||
/* First bring the requested format to time */
|
||||
tformat = GST_FORMAT_TIME;
|
||||
if (!(res = gst_pad_query_convert (pad, format, cur, &tformat, &tcur)))
|
||||
if (!(res =
|
||||
gst_pad_query_convert (pad, format, cur, GST_FORMAT_TIME, &tcur)))
|
||||
goto convert_error;
|
||||
if (!(res = gst_pad_query_convert (pad, format, stop, &tformat, &tstop)))
|
||||
if (!(res =
|
||||
gst_pad_query_convert (pad, format, stop, GST_FORMAT_TIME,
|
||||
&tstop)))
|
||||
goto convert_error;
|
||||
|
||||
/* then seek with time on the peer */
|
||||
|
@ -1002,12 +987,13 @@ gst_video_decoder_src_event (GstPad * pad, GstEvent * event)
|
|||
}
|
||||
case GST_EVENT_QOS:
|
||||
{
|
||||
GstQOSType type;
|
||||
gdouble proportion;
|
||||
GstClockTimeDiff diff;
|
||||
GstClockTime timestamp;
|
||||
GstClockTime duration;
|
||||
|
||||
gst_event_parse_qos (event, &proportion, &diff, ×tamp);
|
||||
gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
|
||||
|
||||
GST_OBJECT_LOCK (decoder);
|
||||
priv->proportion = proportion;
|
||||
|
@ -1040,7 +1026,6 @@ gst_video_decoder_src_event (GstPad * pad, GstEvent * event)
|
|||
break;
|
||||
}
|
||||
done:
|
||||
gst_object_unref (decoder);
|
||||
return res;
|
||||
|
||||
convert_error:
|
||||
|
@ -1048,27 +1033,13 @@ convert_error:
|
|||
goto done;
|
||||
}
|
||||
|
||||
static const GstQueryType *
|
||||
gst_video_decoder_get_query_types (GstPad * pad)
|
||||
{
|
||||
static const GstQueryType query_types[] = {
|
||||
GST_QUERY_POSITION,
|
||||
GST_QUERY_DURATION,
|
||||
GST_QUERY_CONVERT,
|
||||
GST_QUERY_LATENCY,
|
||||
0
|
||||
};
|
||||
|
||||
return query_types;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_decoder_src_query (GstPad * pad, GstQuery * query)
|
||||
gst_video_decoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
|
||||
{
|
||||
GstVideoDecoder *dec;
|
||||
gboolean res = TRUE;
|
||||
|
||||
dec = GST_VIDEO_DECODER (gst_pad_get_parent (pad));
|
||||
dec = GST_VIDEO_DECODER (parent);
|
||||
|
||||
GST_LOG_OBJECT (dec, "handling query: %" GST_PTR_FORMAT, query);
|
||||
|
||||
|
@ -1085,7 +1056,7 @@ gst_video_decoder_src_query (GstPad * pad, GstQuery * query)
|
|||
}
|
||||
|
||||
/* we start from the last seen time */
|
||||
time = dec->priv->last_timestamp_out;
|
||||
time = dec->priv->last_timestamp;
|
||||
/* correct for the segment values */
|
||||
time = gst_segment_to_stream_time (&dec->output_segment,
|
||||
GST_FORMAT_TIME, time);
|
||||
|
@ -1096,7 +1067,7 @@ gst_video_decoder_src_query (GstPad * pad, GstQuery * query)
|
|||
/* and convert to the final format */
|
||||
gst_query_parse_position (query, &format, NULL);
|
||||
if (!(res = gst_pad_query_convert (pad, GST_FORMAT_TIME, time,
|
||||
&format, &value)))
|
||||
format, &value)))
|
||||
break;
|
||||
|
||||
gst_query_set_position (query, format, value);
|
||||
|
@ -1111,7 +1082,7 @@ gst_video_decoder_src_query (GstPad * pad, GstQuery * query)
|
|||
GstFormat format;
|
||||
|
||||
/* upstream in any case */
|
||||
if ((res = gst_pad_query_default (pad, query)))
|
||||
if ((res = gst_pad_query_default (pad, parent, query)))
|
||||
break;
|
||||
|
||||
gst_query_parse_duration (query, &format, NULL);
|
||||
|
@ -1119,12 +1090,11 @@ gst_video_decoder_src_query (GstPad * pad, GstQuery * query)
|
|||
if (format == GST_FORMAT_TIME && gst_video_decoder_do_byte (dec)) {
|
||||
gint64 value;
|
||||
|
||||
format = GST_FORMAT_BYTES;
|
||||
if (gst_pad_query_peer_duration (dec->sinkpad, &format, &value)) {
|
||||
if (gst_pad_peer_query_duration (dec->sinkpad, GST_FORMAT_BYTES,
|
||||
&value)) {
|
||||
GST_LOG_OBJECT (dec, "upstream size %" G_GINT64_FORMAT, value);
|
||||
format = GST_FORMAT_TIME;
|
||||
if (gst_pad_query_convert (dec->sinkpad,
|
||||
GST_FORMAT_BYTES, value, &format, &value)) {
|
||||
GST_FORMAT_BYTES, value, GST_FORMAT_TIME, &value)) {
|
||||
gst_query_set_duration (query, GST_FORMAT_TIME, value);
|
||||
res = TRUE;
|
||||
}
|
||||
|
@ -1171,25 +1141,24 @@ gst_video_decoder_src_query (GstPad * pad, GstQuery * query)
|
|||
}
|
||||
break;
|
||||
default:
|
||||
res = gst_pad_query_default (pad, query);
|
||||
res = gst_pad_query_default (pad, parent, query);
|
||||
}
|
||||
gst_object_unref (dec);
|
||||
return res;
|
||||
|
||||
error:
|
||||
GST_ERROR_OBJECT (dec, "query failed");
|
||||
gst_object_unref (dec);
|
||||
return res;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_decoder_sink_query (GstPad * pad, GstQuery * query)
|
||||
gst_video_decoder_sink_query (GstPad * pad, GstObject * parent,
|
||||
GstQuery * query)
|
||||
{
|
||||
GstVideoDecoder *decoder;
|
||||
GstVideoDecoderPrivate *priv;
|
||||
gboolean res = FALSE;
|
||||
|
||||
decoder = GST_VIDEO_DECODER (gst_pad_get_parent (pad));
|
||||
decoder = GST_VIDEO_DECODER (parent);
|
||||
priv = decoder->priv;
|
||||
|
||||
GST_LOG_OBJECT (decoder, "handling query: %" GST_PTR_FORMAT, query);
|
||||
|
@ -1210,11 +1179,10 @@ gst_video_decoder_sink_query (GstPad * pad, GstQuery * query)
|
|||
break;
|
||||
}
|
||||
default:
|
||||
res = gst_pad_query_default (pad, query);
|
||||
res = gst_pad_query_default (pad, parent, query);
|
||||
break;
|
||||
}
|
||||
done:
|
||||
gst_object_unref (decoder);
|
||||
|
||||
return res;
|
||||
error:
|
||||
|
@ -1333,11 +1301,7 @@ gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full)
|
|||
priv->discont = TRUE;
|
||||
|
||||
priv->timestamp_offset = GST_CLOCK_TIME_NONE;
|
||||
priv->last_timestamp_in = GST_CLOCK_TIME_NONE;
|
||||
priv->last_timestamp_out = GST_CLOCK_TIME_NONE;
|
||||
priv->last_out_frame_number = 0;
|
||||
priv->reordered_output = FALSE;
|
||||
priv->reordered_input = FALSE;
|
||||
priv->last_timestamp = GST_CLOCK_TIME_NONE;
|
||||
|
||||
priv->input_offset = 0;
|
||||
priv->frame_offset = 0;
|
||||
|
@ -1369,8 +1333,6 @@ gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full)
|
|||
priv->earliest_time = GST_CLOCK_TIME_NONE;
|
||||
priv->proportion = 0.5;
|
||||
|
||||
priv->reorder_idx_out = priv->reorder_idx_in = 0;
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
}
|
||||
|
||||
|
@ -1393,7 +1355,7 @@ gst_video_decoder_chain_forward (GstVideoDecoder * decoder, GstBuffer * buf)
|
|||
if (GST_BUFFER_TIMESTAMP_IS_VALID (buf)) {
|
||||
gst_video_decoder_add_timestamp (decoder, buf);
|
||||
}
|
||||
priv->input_offset += GST_BUFFER_SIZE (buf);
|
||||
priv->input_offset += gst_buffer_get_size (buf);
|
||||
|
||||
if (priv->packetized) {
|
||||
priv->current_frame->input_buffer = buf;
|
||||
|
@ -1529,12 +1491,12 @@ gst_video_decoder_flush_parse (GstVideoDecoder * dec)
|
|||
GstBuffer *buf = GST_BUFFER_CAST (priv->queued->data);
|
||||
|
||||
if (G_LIKELY (res == GST_FLOW_OK)) {
|
||||
GST_DEBUG_OBJECT (dec, "pushing buffer %p of size %u, "
|
||||
GST_DEBUG_OBJECT (dec, "pushing buffer %p of size %" G_GSIZE_FORMAT ", "
|
||||
"time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf,
|
||||
GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||
gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
||||
/* should be already, but let's be sure */
|
||||
buf = gst_buffer_make_metadata_writable (buf);
|
||||
buf = gst_buffer_make_writable (buf);
|
||||
/* avoid stray DISCONT from forward processing,
|
||||
* which have no meaning in reverse pushing */
|
||||
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
|
||||
|
@ -1572,9 +1534,9 @@ gst_video_decoder_chain_reverse (GstVideoDecoder * dec, GstBuffer * buf)
|
|||
}
|
||||
|
||||
if (G_LIKELY (buf)) {
|
||||
GST_DEBUG_OBJECT (dec, "gathering buffer %p of size %u, "
|
||||
GST_DEBUG_OBJECT (dec, "gathering buffer %p of size %" G_GSIZE_FORMAT ", "
|
||||
"time %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf,
|
||||
GST_BUFFER_SIZE (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||
gst_buffer_get_size (buf), GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
||||
|
||||
/* add buffer to gather queue */
|
||||
|
@ -1585,19 +1547,19 @@ gst_video_decoder_chain_reverse (GstVideoDecoder * dec, GstBuffer * buf)
|
|||
}
|
||||
|
||||
static GstFlowReturn
|
||||
gst_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
||||
gst_video_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
|
||||
{
|
||||
GstVideoDecoder *decoder;
|
||||
GstVideoDecoderPrivate *priv;
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
|
||||
decoder = GST_VIDEO_DECODER (GST_PAD_PARENT (pad));
|
||||
decoder = GST_VIDEO_DECODER (parent);
|
||||
priv = decoder->priv;
|
||||
|
||||
GST_LOG_OBJECT (decoder,
|
||||
"chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT " size %d",
|
||||
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), GST_BUFFER_SIZE (buf));
|
||||
"chain %" GST_TIME_FORMAT " duration %" GST_TIME_FORMAT " size %"
|
||||
G_GSIZE_FORMAT, GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), gst_buffer_get_size (buf));
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
||||
|
||||
|
@ -1607,16 +1569,15 @@ gst_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
|
||||
if (decoder->input_segment.format == GST_FORMAT_UNDEFINED) {
|
||||
GstEvent *event;
|
||||
GstSegment *segment = &decoder->input_segment;
|
||||
|
||||
GST_WARNING_OBJECT (decoder,
|
||||
"Received buffer without a new-segment. "
|
||||
"Assuming timestamps start from 0.");
|
||||
|
||||
gst_segment_set_newsegment_full (&decoder->input_segment, FALSE, 1.0, 1.0,
|
||||
GST_FORMAT_TIME, 0, GST_CLOCK_TIME_NONE, 0);
|
||||
gst_segment_init (segment, GST_FORMAT_TIME);
|
||||
|
||||
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
|
||||
GST_CLOCK_TIME_NONE, 0);
|
||||
event = gst_event_new_segment (segment);
|
||||
|
||||
decoder->priv->current_frame_events =
|
||||
g_list_prepend (decoder->priv->current_frame_events, event);
|
||||
|
@ -1766,7 +1727,6 @@ gst_video_decoder_prepare_finish_frame (GstVideoDecoder *
|
|||
{
|
||||
GstVideoDecoderPrivate *priv = decoder->priv;
|
||||
GList *l, *events = NULL;
|
||||
GstClockTime reorder_pts;
|
||||
|
||||
#ifndef GST_DISABLE_GST_DEBUG
|
||||
GST_LOG_OBJECT (decoder, "n %d in %d out %d",
|
||||
|
@ -1775,22 +1735,9 @@ gst_video_decoder_prepare_finish_frame (GstVideoDecoder *
|
|||
gst_adapter_available (priv->output_adapter));
|
||||
#endif
|
||||
|
||||
reorder_pts = priv->incoming_timestamps[priv->reorder_idx_out];
|
||||
priv->reorder_idx_out =
|
||||
(priv->reorder_idx_out + 1) % MAX_DTS_PTS_REORDER_DEPTH;
|
||||
|
||||
if (!priv->reordered_output && frame->system_frame_number &&
|
||||
frame->system_frame_number != (priv->last_out_frame_number + 1)) {
|
||||
GST_DEBUG_OBJECT (decoder, "Detected reordered output");
|
||||
priv->reordered_output = TRUE;
|
||||
}
|
||||
|
||||
GST_LOG_OBJECT (decoder,
|
||||
"finish frame (#%d) sync:%d pts:%" GST_TIME_FORMAT " dts:%"
|
||||
GST_TIME_FORMAT " reorder_pts:%" GST_TIME_FORMAT,
|
||||
frame->system_frame_number, GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame),
|
||||
GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->dts),
|
||||
GST_TIME_ARGS (reorder_pts));
|
||||
"finish frame sync=%d pts=%" GST_TIME_FORMAT,
|
||||
GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame), GST_TIME_ARGS (frame->pts));
|
||||
|
||||
/* Push all pending events that arrived before this frame */
|
||||
for (l = priv->frames; l; l = l->next) {
|
||||
|
@ -1813,9 +1760,14 @@ gst_video_decoder_prepare_finish_frame (GstVideoDecoder *
|
|||
|
||||
/* Check if the data should not be displayed. For example altref/invisible
|
||||
* frame in vp8. In this case we should not update the timestamps. */
|
||||
if (GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (frame) || !frame->output_buffer)
|
||||
if (GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (frame))
|
||||
return;
|
||||
|
||||
/* If the frame is meant to be outputted but we don't have an output buffer
|
||||
* we have a problem :) */
|
||||
if (G_UNLIKELY (frame->output_buffer == NULL))
|
||||
goto no_output_buffer;
|
||||
|
||||
if (GST_CLOCK_TIME_IS_VALID (frame->pts)) {
|
||||
if (frame->pts != priv->timestamp_offset) {
|
||||
GST_DEBUG_OBJECT (decoder,
|
||||
|
@ -1839,38 +1791,32 @@ gst_video_decoder_prepare_finish_frame (GstVideoDecoder *
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (frame->pts == GST_CLOCK_TIME_NONE) {
|
||||
frame->pts =
|
||||
gst_video_decoder_get_timestamp (decoder, frame->decode_frame_number);
|
||||
frame->duration = GST_CLOCK_TIME_NONE;
|
||||
}
|
||||
|
||||
if (frame->duration == GST_CLOCK_TIME_NONE) {
|
||||
frame->duration = gst_video_decoder_get_frame_duration (decoder, frame);
|
||||
}
|
||||
|
||||
/* Fix buffers that came in with DTS and were reordered */
|
||||
if (!priv->reordered_input && priv->reordered_output) {
|
||||
GST_DEBUG_OBJECT (decoder,
|
||||
"Correcting PTS, input buffers had DTS on their timestamps");
|
||||
frame->pts = reorder_pts;
|
||||
}
|
||||
|
||||
if (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp_out)) {
|
||||
if (frame->pts < priv->last_timestamp_out) {
|
||||
if (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp)) {
|
||||
if (frame->pts < priv->last_timestamp) {
|
||||
GST_WARNING_OBJECT (decoder,
|
||||
"decreasing timestamp (%" GST_TIME_FORMAT " < %"
|
||||
GST_TIME_FORMAT ")",
|
||||
GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (priv->last_timestamp_out));
|
||||
frame->pts = reorder_pts;
|
||||
GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (priv->last_timestamp));
|
||||
}
|
||||
}
|
||||
|
||||
priv->last_timestamp_out = frame->pts;
|
||||
priv->last_out_frame_number = frame->system_frame_number;
|
||||
priv->last_timestamp = frame->pts;
|
||||
|
||||
return;
|
||||
|
||||
/* ERRORS */
|
||||
no_output_buffer:
|
||||
{
|
||||
GST_ERROR_OBJECT (decoder, "No buffer to output !");
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -1961,7 +1907,7 @@ gst_video_decoder_finish_frame (GstVideoDecoder * decoder,
|
|||
GstVideoCodecState *state = priv->output_state;
|
||||
GstBuffer *output_buffer;
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
gint64 start, stop;
|
||||
guint64 start, stop;
|
||||
GstSegment *segment;
|
||||
|
||||
GST_LOG_OBJECT (decoder, "finish frame");
|
||||
|
@ -1980,28 +1926,28 @@ gst_video_decoder_finish_frame (GstVideoDecoder * decoder,
|
|||
goto done;
|
||||
}
|
||||
|
||||
output_buffer = gst_buffer_make_metadata_writable (frame->output_buffer);
|
||||
output_buffer = gst_buffer_make_writable (frame->output_buffer);
|
||||
frame->output_buffer = NULL;
|
||||
|
||||
GST_BUFFER_FLAG_UNSET (output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
|
||||
if (GST_VIDEO_INFO_IS_INTERLACED (&state->info)) {
|
||||
if (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET (frame,
|
||||
GST_VIDEO_CODEC_FRAME_FLAG_TFF)) {
|
||||
GST_BUFFER_FLAG_SET (output_buffer, GST_VIDEO_BUFFER_TFF);
|
||||
GST_BUFFER_FLAG_SET (output_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
|
||||
} else {
|
||||
GST_BUFFER_FLAG_UNSET (output_buffer, GST_VIDEO_BUFFER_TFF);
|
||||
GST_BUFFER_FLAG_UNSET (output_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
|
||||
}
|
||||
if (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET (frame,
|
||||
GST_VIDEO_CODEC_FRAME_FLAG_RFF)) {
|
||||
GST_BUFFER_FLAG_SET (output_buffer, GST_VIDEO_BUFFER_RFF);
|
||||
GST_BUFFER_FLAG_SET (output_buffer, GST_VIDEO_BUFFER_FLAG_RFF);
|
||||
} else {
|
||||
GST_BUFFER_FLAG_UNSET (output_buffer, GST_VIDEO_BUFFER_RFF);
|
||||
GST_BUFFER_FLAG_UNSET (output_buffer, GST_VIDEO_BUFFER_FLAG_RFF);
|
||||
}
|
||||
if (GST_VIDEO_CODEC_FRAME_FLAG_IS_SET (frame,
|
||||
GST_VIDEO_CODEC_FRAME_FLAG_ONEFIELD)) {
|
||||
GST_BUFFER_FLAG_SET (output_buffer, GST_VIDEO_BUFFER_ONEFIELD);
|
||||
GST_BUFFER_FLAG_SET (output_buffer, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
|
||||
} else {
|
||||
GST_BUFFER_FLAG_UNSET (output_buffer, GST_VIDEO_BUFFER_ONEFIELD);
|
||||
GST_BUFFER_FLAG_UNSET (output_buffer, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2046,7 +1992,7 @@ gst_video_decoder_finish_frame (GstVideoDecoder * decoder,
|
|||
GST_BUFFER_OFFSET_END (output_buffer) = GST_BUFFER_OFFSET_NONE;
|
||||
|
||||
/* update rate estimate */
|
||||
priv->bytes_out += GST_BUFFER_SIZE (output_buffer);
|
||||
priv->bytes_out += gst_buffer_get_size (output_buffer);
|
||||
if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
|
||||
priv->time += frame->duration;
|
||||
} else {
|
||||
|
@ -2057,15 +2003,11 @@ gst_video_decoder_finish_frame (GstVideoDecoder * decoder,
|
|||
priv->time = GST_CLOCK_TIME_NONE;
|
||||
}
|
||||
|
||||
gst_buffer_set_caps (output_buffer, GST_PAD_CAPS (decoder->srcpad));
|
||||
|
||||
GST_LOG_OBJECT (decoder, "pushing frame ts %" GST_TIME_FORMAT
|
||||
", duration %" GST_TIME_FORMAT,
|
||||
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (output_buffer)),
|
||||
GST_TIME_ARGS (GST_BUFFER_DURATION (output_buffer)));
|
||||
|
||||
|
||||
|
||||
/* we got data, so note things are looking up again */
|
||||
/* FIXME : Shouldn't we avoid going under zero ? */
|
||||
if (G_UNLIKELY (priv->error_count))
|
||||
|
@ -2249,17 +2191,6 @@ gst_video_decoder_have_frame_2 (GstVideoDecoder * decoder)
|
|||
gst_segment_to_running_time (&decoder->input_segment, GST_FORMAT_TIME,
|
||||
frame->pts);
|
||||
|
||||
/* Store pts */
|
||||
if (GST_CLOCK_TIME_IS_VALID (frame->pts)
|
||||
&& GST_CLOCK_TIME_IS_VALID (priv->last_timestamp_in)
|
||||
&& frame->pts < priv->last_timestamp_in) {
|
||||
GST_DEBUG_OBJECT (decoder, "Incoming timestamps are out of order");
|
||||
priv->reordered_input = TRUE;
|
||||
}
|
||||
priv->last_timestamp_in = frame->pts;
|
||||
priv->incoming_timestamps[priv->reorder_idx_in] = frame->pts;
|
||||
priv->reorder_idx_in = (priv->reorder_idx_in + 1) % MAX_DTS_PTS_REORDER_DEPTH;
|
||||
|
||||
/* do something with frame */
|
||||
ret = decoder_class->handle_frame (decoder, frame);
|
||||
if (ret != GST_FLOW_OK)
|
||||
|
@ -2467,22 +2398,14 @@ gst_video_decoder_alloc_output_buffer (GstVideoDecoder * decoder)
|
|||
GstVideoCodecState *state = decoder->priv->output_state;
|
||||
int num_bytes = GST_VIDEO_INFO_SIZE (&state->info);
|
||||
|
||||
GST_DEBUG ("alloc src buffer caps=%" GST_PTR_FORMAT,
|
||||
GST_PAD_CAPS (decoder->srcpad));
|
||||
GST_DEBUG ("alloc src buffer");
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
||||
if (G_UNLIKELY (decoder->priv->output_state_changed))
|
||||
gst_video_decoder_set_src_caps (decoder);
|
||||
|
||||
flow_ret =
|
||||
gst_pad_alloc_buffer_and_set_caps (decoder->srcpad,
|
||||
GST_BUFFER_OFFSET_NONE, num_bytes, GST_PAD_CAPS (decoder->srcpad),
|
||||
&buffer);
|
||||
|
||||
if (flow_ret != GST_FLOW_OK) {
|
||||
buffer = gst_buffer_new_and_alloc (num_bytes);
|
||||
gst_buffer_set_caps (buffer, GST_PAD_CAPS (decoder->srcpad));
|
||||
}
|
||||
flow_ret = GST_FLOW_OK;
|
||||
buffer = gst_buffer_new_allocate (NULL, num_bytes, NULL);
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
|
||||
|
@ -2516,20 +2439,11 @@ gst_video_decoder_alloc_output_frame (GstVideoDecoder *
|
|||
if (G_UNLIKELY (decoder->priv->output_state_changed))
|
||||
gst_video_decoder_set_src_caps (decoder);
|
||||
|
||||
g_return_val_if_fail (GST_PAD_CAPS (decoder->srcpad) != NULL, GST_FLOW_ERROR);
|
||||
|
||||
GST_LOG_OBJECT (decoder, "alloc buffer size %d", num_bytes);
|
||||
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
||||
|
||||
flow_ret =
|
||||
gst_pad_alloc_buffer_and_set_caps (decoder->srcpad,
|
||||
GST_BUFFER_OFFSET_NONE, num_bytes, GST_PAD_CAPS (decoder->srcpad),
|
||||
&frame->output_buffer);
|
||||
|
||||
if (flow_ret != GST_FLOW_OK) {
|
||||
GST_WARNING_OBJECT (decoder, "failed to get buffer %s",
|
||||
gst_flow_get_name (flow_ret));
|
||||
}
|
||||
flow_ret = GST_FLOW_OK;
|
||||
frame->output_buffer = gst_buffer_new_allocate (NULL, num_bytes, NULL);
|
||||
|
||||
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
||||
|
||||
|
|
|
@ -26,6 +26,9 @@
|
|||
#define _GST_VIDEO_DECODER_H_
|
||||
|
||||
#include <gst/base/gstadapter.h>
|
||||
#include <gst/video/video.h>
|
||||
#include <gst/video/gstvideopool.h>
|
||||
#include <gst/video/gstvideometa.h>
|
||||
#include <gst/video/gstvideoutils.h>
|
||||
|
||||
G_BEGIN_DECLS
|
||||
|
@ -125,7 +128,7 @@ G_BEGIN_DECLS
|
|||
*
|
||||
* Since: 0.10.36
|
||||
*/
|
||||
#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) g_static_rec_mutex_lock (&GST_VIDEO_DECODER (decoder)->stream_lock)
|
||||
#define GST_VIDEO_DECODER_STREAM_LOCK(decoder) g_rec_mutex_lock (&GST_VIDEO_DECODER (decoder)->stream_lock)
|
||||
|
||||
/**
|
||||
* GST_VIDEO_DECODER_STREAM_UNLOCK:
|
||||
|
@ -135,7 +138,7 @@ G_BEGIN_DECLS
|
|||
*
|
||||
* Since: 0.10.36
|
||||
*/
|
||||
#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) g_static_rec_mutex_unlock (&GST_VIDEO_DECODER (decoder)->stream_lock)
|
||||
#define GST_VIDEO_DECODER_STREAM_UNLOCK(decoder) g_rec_mutex_unlock (&GST_VIDEO_DECODER (decoder)->stream_lock)
|
||||
|
||||
typedef struct _GstVideoDecoder GstVideoDecoder;
|
||||
typedef struct _GstVideoDecoderClass GstVideoDecoderClass;
|
||||
|
@ -211,7 +214,7 @@ struct _GstVideoDecoder
|
|||
/* protects all data processing, i.e. is locked
|
||||
* in the chain function, finish_frame and when
|
||||
* processing serialized events */
|
||||
GStaticRecMutex stream_lock;
|
||||
GRecMutex stream_lock;
|
||||
|
||||
/* MT-protected (with STREAM_LOCK) */
|
||||
GstSegment input_segment;
|
||||
|
|
|
@ -189,41 +189,65 @@ forced_key_unit_event_new (GstClockTime running_time, gboolean all_headers,
|
|||
return evt;
|
||||
}
|
||||
|
||||
static GstElementClass *parent_class = NULL;
|
||||
static void gst_video_encoder_class_init (GstVideoEncoderClass * klass);
|
||||
static void gst_video_encoder_init (GstVideoEncoder * enc,
|
||||
GstVideoEncoderClass * klass);
|
||||
|
||||
static void gst_video_encoder_finalize (GObject * object);
|
||||
|
||||
static gboolean gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps);
|
||||
static GstCaps *gst_video_encoder_sink_getcaps (GstPad * pad);
|
||||
static gboolean gst_video_encoder_src_event (GstPad * pad, GstEvent * event);
|
||||
static gboolean gst_video_encoder_sink_event (GstPad * pad, GstEvent * event);
|
||||
static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstBuffer * buf);
|
||||
static gboolean gst_video_encoder_setcaps (GstVideoEncoder * enc,
|
||||
GstCaps * caps);
|
||||
static GstCaps *gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder,
|
||||
GstCaps * filter);
|
||||
static gboolean gst_video_encoder_src_event (GstPad * pad, GstObject * parent,
|
||||
GstEvent * event);
|
||||
static gboolean gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
|
||||
GstEvent * event);
|
||||
static GstFlowReturn gst_video_encoder_chain (GstPad * pad, GstObject * parent,
|
||||
GstBuffer * buf);
|
||||
static GstStateChangeReturn gst_video_encoder_change_state (GstElement *
|
||||
element, GstStateChange transition);
|
||||
static const GstQueryType *gst_video_encoder_get_query_types (GstPad * pad);
|
||||
static gboolean gst_video_encoder_src_query (GstPad * pad, GstQuery * query);
|
||||
static gboolean gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
|
||||
GstQuery * query);
|
||||
static gboolean gst_video_encoder_src_query (GstPad * pad, GstObject * parent,
|
||||
GstQuery * query);
|
||||
static GstVideoCodecFrame *gst_video_encoder_new_frame (GstVideoEncoder *
|
||||
encoder, GstBuffer * buf, GstClockTime timestamp, GstClockTime duration);
|
||||
|
||||
static void
|
||||
_do_init (GType object_type)
|
||||
/* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
|
||||
* method to get to the padtemplates */
|
||||
GType
|
||||
gst_video_encoder_get_type (void)
|
||||
{
|
||||
const GInterfaceInfo preset_interface_info = {
|
||||
NULL, /* interface_init */
|
||||
NULL, /* interface_finalize */
|
||||
NULL /* interface_data */
|
||||
};
|
||||
static volatile gsize type = 0;
|
||||
|
||||
g_type_add_interface_static (object_type, GST_TYPE_PRESET,
|
||||
&preset_interface_info);
|
||||
}
|
||||
if (g_once_init_enter (&type)) {
|
||||
GType _type;
|
||||
static const GTypeInfo info = {
|
||||
sizeof (GstVideoEncoderClass),
|
||||
NULL,
|
||||
NULL,
|
||||
(GClassInitFunc) gst_video_encoder_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
sizeof (GstVideoEncoder),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_video_encoder_init,
|
||||
};
|
||||
const GInterfaceInfo preset_interface_info = {
|
||||
NULL, /* interface_init */
|
||||
NULL, /* interface_finalize */
|
||||
NULL /* interface_data */
|
||||
};
|
||||
|
||||
GST_BOILERPLATE_FULL (GstVideoEncoder, gst_video_encoder,
|
||||
GstElement, GST_TYPE_ELEMENT, _do_init);
|
||||
|
||||
static void
|
||||
gst_video_encoder_base_init (gpointer g_class)
|
||||
{
|
||||
GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
|
||||
"Base Video Encoder");
|
||||
_type = g_type_register_static (GST_TYPE_ELEMENT,
|
||||
"GstVideoEncoder", &info, G_TYPE_FLAG_ABSTRACT);
|
||||
g_type_add_interface_static (_type, GST_TYPE_PRESET,
|
||||
&preset_interface_info);
|
||||
g_once_init_leave (&type, _type);
|
||||
}
|
||||
return type;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -235,6 +259,11 @@ gst_video_encoder_class_init (GstVideoEncoderClass * klass)
|
|||
gobject_class = G_OBJECT_CLASS (klass);
|
||||
gstelement_class = GST_ELEMENT_CLASS (klass);
|
||||
|
||||
GST_DEBUG_CATEGORY_INIT (videoencoder_debug, "videoencoder", 0,
|
||||
"Base Video Encoder");
|
||||
|
||||
parent_class = g_type_class_peek_parent (klass);
|
||||
|
||||
g_type_class_add_private (klass, sizeof (GstVideoEncoderPrivate));
|
||||
|
||||
gobject_class->finalize = gst_video_encoder_finalize;
|
||||
|
@ -311,10 +340,8 @@ gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
|
|||
gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_encoder_chain));
|
||||
gst_pad_set_event_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_encoder_sink_event));
|
||||
gst_pad_set_setcaps_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_encoder_sink_setcaps));
|
||||
gst_pad_set_getcaps_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_encoder_sink_getcaps));
|
||||
gst_pad_set_query_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_encoder_sink_query));
|
||||
gst_element_add_pad (GST_ELEMENT (encoder), encoder->sinkpad);
|
||||
|
||||
pad_template =
|
||||
|
@ -323,8 +350,6 @@ gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
|
|||
|
||||
encoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
|
||||
|
||||
gst_pad_set_query_type_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_encoder_get_query_types));
|
||||
gst_pad_set_query_function (pad,
|
||||
GST_DEBUG_FUNCPTR (gst_video_encoder_src_query));
|
||||
gst_pad_set_event_function (pad,
|
||||
|
@ -334,7 +359,7 @@ gst_video_encoder_init (GstVideoEncoder * encoder, GstVideoEncoderClass * klass)
|
|||
gst_segment_init (&encoder->input_segment, GST_FORMAT_TIME);
|
||||
gst_segment_init (&encoder->output_segment, GST_FORMAT_TIME);
|
||||
|
||||
g_static_rec_mutex_init (&encoder->stream_lock);
|
||||
g_rec_mutex_init (&encoder->stream_lock);
|
||||
|
||||
priv->at_eos = FALSE;
|
||||
priv->headers = NULL;
|
||||
|
@ -510,15 +535,13 @@ parse_fail:
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
||||
gst_video_encoder_setcaps (GstVideoEncoder * encoder, GstCaps * caps)
|
||||
{
|
||||
GstVideoEncoder *encoder;
|
||||
GstVideoEncoderClass *encoder_class;
|
||||
GstVideoCodecState *state;
|
||||
gboolean ret;
|
||||
gboolean samecaps = FALSE;
|
||||
|
||||
encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
|
||||
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
|
||||
|
||||
/* subclass should do something here ... */
|
||||
|
@ -588,9 +611,10 @@ parse_fail:
|
|||
* Since: 0.10.36
|
||||
*/
|
||||
GstCaps *
|
||||
gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps)
|
||||
gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps,
|
||||
GstCaps * filter)
|
||||
{
|
||||
const GstCaps *templ_caps;
|
||||
GstCaps *templ_caps;
|
||||
GstCaps *allowed;
|
||||
GstCaps *fcaps, *filter_caps;
|
||||
gint i, j;
|
||||
|
@ -602,7 +626,7 @@ gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps)
|
|||
allowed = gst_pad_get_allowed_caps (encoder->srcpad);
|
||||
|
||||
if (!allowed || gst_caps_is_empty (allowed) || gst_caps_is_any (allowed)) {
|
||||
fcaps = gst_caps_copy (templ_caps);
|
||||
fcaps = templ_caps;
|
||||
goto done;
|
||||
}
|
||||
|
||||
|
@ -620,7 +644,7 @@ gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps)
|
|||
const GValue *val;
|
||||
GstStructure *s;
|
||||
|
||||
s = gst_structure_id_empty_new (q_name);
|
||||
s = gst_structure_new_id_empty (q_name);
|
||||
if ((val = gst_structure_get_value (allowed_s, "width")))
|
||||
gst_structure_set_value (s, "width", val);
|
||||
if ((val = gst_structure_get_value (allowed_s, "height")))
|
||||
|
@ -630,12 +654,20 @@ gst_video_encoder_proxy_getcaps (GstVideoEncoder * encoder, GstCaps * caps)
|
|||
if ((val = gst_structure_get_value (allowed_s, "pixel-aspect-ratio")))
|
||||
gst_structure_set_value (s, "pixel-aspect-ratio", val);
|
||||
|
||||
gst_caps_merge_structure (filter_caps, s);
|
||||
filter_caps = gst_caps_merge_structure (filter_caps, s);
|
||||
}
|
||||
}
|
||||
|
||||
fcaps = gst_caps_intersect (filter_caps, templ_caps);
|
||||
gst_caps_unref (filter_caps);
|
||||
gst_caps_unref (templ_caps);
|
||||
|
||||
if (filter) {
|
||||
GST_LOG_OBJECT (encoder, "intersecting with %" GST_PTR_FORMAT, filter);
|
||||
filter_caps = gst_caps_intersect (fcaps, filter);
|
||||
gst_caps_unref (fcaps);
|
||||
fcaps = filter_caps;
|
||||
}
|
||||
|
||||
done:
|
||||
gst_caps_replace (&allowed, NULL);
|
||||
|
@ -646,26 +678,52 @@ done:
|
|||
}
|
||||
|
||||
static GstCaps *
|
||||
gst_video_encoder_sink_getcaps (GstPad * pad)
|
||||
gst_video_encoder_sink_getcaps (GstVideoEncoder * encoder, GstCaps * filter)
|
||||
{
|
||||
GstVideoEncoder *encoder;
|
||||
GstVideoEncoderClass *klass;
|
||||
GstCaps *caps;
|
||||
|
||||
encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
|
||||
klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
|
||||
|
||||
if (klass->getcaps)
|
||||
caps = klass->getcaps (encoder);
|
||||
caps = klass->getcaps (encoder, filter);
|
||||
else
|
||||
caps = gst_video_encoder_proxy_getcaps (encoder, NULL);
|
||||
gst_object_unref (encoder);
|
||||
caps = gst_video_encoder_proxy_getcaps (encoder, NULL, filter);
|
||||
|
||||
GST_LOG_OBJECT (encoder, "Returning caps %" GST_PTR_FORMAT, caps);
|
||||
|
||||
return caps;
|
||||
}
|
||||
|
||||
|
||||
static gboolean
|
||||
gst_video_encoder_sink_query (GstPad * pad, GstObject * parent,
|
||||
GstQuery * query)
|
||||
{
|
||||
GstVideoEncoder *encoder;
|
||||
gboolean res = FALSE;
|
||||
|
||||
encoder = GST_VIDEO_ENCODER (parent);
|
||||
|
||||
switch (GST_QUERY_TYPE (query)) {
|
||||
case GST_QUERY_CAPS:
|
||||
{
|
||||
GstCaps *filter, *caps;
|
||||
|
||||
gst_query_parse_caps (query, &filter);
|
||||
caps = gst_video_encoder_sink_getcaps (encoder, filter);
|
||||
gst_query_set_caps_result (query, caps);
|
||||
gst_caps_unref (caps);
|
||||
res = TRUE;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
res = gst_pad_query_default (pad, parent, query);
|
||||
break;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_video_encoder_finalize (GObject * object)
|
||||
{
|
||||
|
@ -678,7 +736,7 @@ gst_video_encoder_finalize (GObject * object)
|
|||
g_list_foreach (encoder->priv->headers, (GFunc) gst_buffer_unref, NULL);
|
||||
g_list_free (encoder->priv->headers);
|
||||
}
|
||||
g_static_rec_mutex_free (&encoder->stream_lock);
|
||||
g_rec_mutex_clear (&encoder->stream_lock);
|
||||
|
||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||
}
|
||||
|
@ -687,34 +745,23 @@ static gboolean
|
|||
gst_video_encoder_push_event (GstVideoEncoder * encoder, GstEvent * event)
|
||||
{
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_NEWSEGMENT:
|
||||
case GST_EVENT_SEGMENT:
|
||||
{
|
||||
gboolean update;
|
||||
double rate;
|
||||
double applied_rate;
|
||||
GstFormat format;
|
||||
gint64 start;
|
||||
gint64 stop;
|
||||
gint64 position;
|
||||
GstSegment segment;
|
||||
|
||||
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
|
||||
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
|
||||
&format, &start, &stop, &position);
|
||||
|
||||
GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, "
|
||||
"format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
|
||||
", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
|
||||
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
|
||||
GST_TIME_ARGS (position));
|
||||
gst_event_copy_segment (event, &segment);
|
||||
|
||||
if (format != GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
|
||||
GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
|
||||
|
||||
if (segment.format != GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (encoder, "received non TIME segment");
|
||||
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
|
||||
break;
|
||||
}
|
||||
|
||||
gst_segment_set_newsegment_full (&encoder->output_segment, update, rate,
|
||||
applied_rate, format, start, stop, position);
|
||||
encoder->output_segment = segment;
|
||||
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
|
||||
break;
|
||||
}
|
||||
|
@ -734,6 +781,15 @@ gst_video_encoder_sink_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
|
|||
encoder_class = GST_VIDEO_ENCODER_GET_CLASS (encoder);
|
||||
|
||||
switch (GST_EVENT_TYPE (event)) {
|
||||
case GST_EVENT_CAPS:
|
||||
{
|
||||
GstCaps *caps;
|
||||
|
||||
gst_event_parse_caps (event, &caps);
|
||||
ret = gst_video_encoder_setcaps (encoder, caps);
|
||||
gst_event_unref (event);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_EOS:
|
||||
{
|
||||
GstFlowReturn flow_ret;
|
||||
|
@ -751,27 +807,17 @@ gst_video_encoder_sink_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
|
|||
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
|
||||
break;
|
||||
}
|
||||
case GST_EVENT_NEWSEGMENT:
|
||||
case GST_EVENT_SEGMENT:
|
||||
{
|
||||
gboolean update;
|
||||
double rate;
|
||||
double applied_rate;
|
||||
GstFormat format;
|
||||
gint64 start;
|
||||
gint64 stop;
|
||||
gint64 position;
|
||||
GstSegment segment;
|
||||
|
||||
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
|
||||
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
|
||||
&format, &start, &stop, &position);
|
||||
|
||||
GST_DEBUG_OBJECT (encoder, "newseg rate %g, applied rate %g, "
|
||||
"format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
|
||||
", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
|
||||
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
|
||||
GST_TIME_ARGS (position));
|
||||
gst_event_copy_segment (event, &segment);
|
||||
|
||||
if (format != GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (encoder, "segment %" GST_SEGMENT_FORMAT, &segment);
|
||||
|
||||
if (segment.format != GST_FORMAT_TIME) {
|
||||
GST_DEBUG_OBJECT (encoder, "received non TIME newsegment");
|
||||
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
|
||||
break;
|
||||
|
@ -779,8 +825,7 @@ gst_video_encoder_sink_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
|
|||
|
||||
encoder->priv->at_eos = FALSE;
|
||||
|
||||
gst_segment_set_newsegment_full (&encoder->input_segment, update, rate,
|
||||
applied_rate, format, start, stop, position);
|
||||
encoder->input_segment = segment;
|
||||
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
|
||||
break;
|
||||
}
|
||||
|
@ -819,14 +864,15 @@ gst_video_encoder_sink_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_encoder_sink_event (GstPad * pad, GstEvent * event)
|
||||
gst_video_encoder_sink_event (GstPad * pad, GstObject * parent,
|
||||
GstEvent * event)
|
||||
{
|
||||
GstVideoEncoder *enc;
|
||||
GstVideoEncoderClass *klass;
|
||||
gboolean handled = FALSE;
|
||||
gboolean ret = TRUE;
|
||||
|
||||
enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
|
||||
enc = GST_VIDEO_ENCODER (parent);
|
||||
klass = GST_VIDEO_ENCODER_GET_CLASS (enc);
|
||||
|
||||
GST_DEBUG_OBJECT (enc, "received event %d, %s", GST_EVENT_TYPE (event),
|
||||
|
@ -864,7 +910,6 @@ gst_video_encoder_sink_event (GstPad * pad, GstEvent * event)
|
|||
|
||||
GST_DEBUG_OBJECT (enc, "event handled");
|
||||
|
||||
gst_object_unref (enc);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -909,14 +954,14 @@ gst_video_encoder_src_eventfunc (GstVideoEncoder * encoder, GstEvent * event)
|
|||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_encoder_src_event (GstPad * pad, GstEvent * event)
|
||||
gst_video_encoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
|
||||
{
|
||||
GstVideoEncoder *encoder;
|
||||
GstVideoEncoderClass *klass;
|
||||
gboolean ret = FALSE;
|
||||
gboolean handled = FALSE;
|
||||
|
||||
encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
|
||||
encoder = GST_VIDEO_ENCODER (parent);
|
||||
klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
|
||||
|
||||
GST_LOG_OBJECT (encoder, "handling event: %" GST_PTR_FORMAT, event);
|
||||
|
@ -928,36 +973,20 @@ gst_video_encoder_src_event (GstPad * pad, GstEvent * event)
|
|||
handled = gst_video_encoder_src_eventfunc (encoder, event);
|
||||
|
||||
if (!handled)
|
||||
ret = gst_pad_event_default (pad, event);
|
||||
|
||||
gst_object_unref (encoder);
|
||||
ret = gst_pad_event_default (pad, parent, event);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
static const GstQueryType *
|
||||
gst_video_encoder_get_query_types (GstPad * pad)
|
||||
{
|
||||
static const GstQueryType query_types[] = {
|
||||
GST_QUERY_CONVERT,
|
||||
GST_QUERY_LATENCY,
|
||||
0
|
||||
};
|
||||
|
||||
return query_types;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
gst_video_encoder_src_query (GstPad * pad, GstQuery * query)
|
||||
gst_video_encoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
|
||||
{
|
||||
GstVideoEncoderPrivate *priv;
|
||||
GstVideoEncoder *enc;
|
||||
gboolean res;
|
||||
GstPad *peerpad;
|
||||
|
||||
enc = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
|
||||
enc = GST_VIDEO_ENCODER (parent);
|
||||
priv = enc->priv;
|
||||
peerpad = gst_pad_get_peer (enc->sinkpad);
|
||||
|
||||
GST_LOG_OBJECT (enc, "handling query: %" GST_PTR_FORMAT, query);
|
||||
|
||||
|
@ -981,7 +1010,7 @@ gst_video_encoder_src_query (GstPad * pad, GstQuery * query)
|
|||
gboolean live;
|
||||
GstClockTime min_latency, max_latency;
|
||||
|
||||
res = gst_pad_query (peerpad, query);
|
||||
res = gst_pad_peer_query (enc->sinkpad, query);
|
||||
if (res) {
|
||||
gst_query_parse_latency (query, &live, &min_latency, &max_latency);
|
||||
GST_DEBUG_OBJECT (enc, "Peer latency: live %d, min %"
|
||||
|
@ -1000,16 +1029,12 @@ gst_video_encoder_src_query (GstPad * pad, GstQuery * query)
|
|||
}
|
||||
break;
|
||||
default:
|
||||
res = gst_pad_query_default (pad, query);
|
||||
res = gst_pad_query_default (pad, parent, query);
|
||||
}
|
||||
gst_object_unref (peerpad);
|
||||
gst_object_unref (enc);
|
||||
return res;
|
||||
|
||||
error:
|
||||
GST_DEBUG_OBJECT (enc, "query failed");
|
||||
gst_object_unref (peerpad);
|
||||
gst_object_unref (enc);
|
||||
return res;
|
||||
}
|
||||
|
||||
|
@ -1043,16 +1068,16 @@ gst_video_encoder_new_frame (GstVideoEncoder * encoder, GstBuffer * buf,
|
|||
|
||||
|
||||
static GstFlowReturn
|
||||
gst_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
||||
gst_video_encoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
|
||||
{
|
||||
GstVideoEncoder *encoder;
|
||||
GstVideoEncoderPrivate *priv;
|
||||
GstVideoEncoderClass *klass;
|
||||
GstVideoCodecFrame *frame;
|
||||
GstFlowReturn ret = GST_FLOW_OK;
|
||||
gint64 start, stop = GST_CLOCK_TIME_NONE, cstart, cstop;
|
||||
guint64 start, stop = GST_CLOCK_TIME_NONE, cstart, cstop;
|
||||
|
||||
encoder = GST_VIDEO_ENCODER (gst_pad_get_parent (pad));
|
||||
encoder = GST_VIDEO_ENCODER (parent);
|
||||
priv = encoder->priv;
|
||||
klass = GST_VIDEO_ENCODER_GET_CLASS (encoder);
|
||||
|
||||
|
@ -1060,23 +1085,17 @@ gst_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
|
||||
GST_VIDEO_ENCODER_STREAM_LOCK (encoder);
|
||||
|
||||
/* .... ?? */
|
||||
if (!GST_PAD_CAPS (pad)) {
|
||||
ret = GST_FLOW_NOT_NEGOTIATED;
|
||||
goto done;
|
||||
}
|
||||
|
||||
start = GST_BUFFER_TIMESTAMP (buf);
|
||||
if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_DURATION (buf)))
|
||||
stop = start + GST_BUFFER_DURATION (buf);
|
||||
|
||||
GST_LOG_OBJECT (encoder,
|
||||
"received buffer of size %d with ts %" GST_TIME_FORMAT
|
||||
", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
|
||||
", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf),
|
||||
GST_TIME_ARGS (start), GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
||||
|
||||
if (priv->at_eos) {
|
||||
ret = GST_FLOW_UNEXPECTED;
|
||||
ret = GST_FLOW_EOS;
|
||||
goto done;
|
||||
}
|
||||
|
||||
|
@ -1145,8 +1164,6 @@ gst_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
|||
done:
|
||||
GST_VIDEO_ENCODER_STREAM_UNLOCK (encoder);
|
||||
|
||||
gst_object_unref (encoder);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
@ -1399,7 +1416,7 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
|
|||
GST_BUFFER_DURATION (frame->output_buffer) = frame->duration;
|
||||
|
||||
/* update rate estimate */
|
||||
priv->bytes += GST_BUFFER_SIZE (frame->output_buffer);
|
||||
priv->bytes += gst_buffer_get_size (frame->output_buffer);
|
||||
if (GST_CLOCK_TIME_IS_VALID (frame->duration)) {
|
||||
priv->time += frame->duration;
|
||||
} else {
|
||||
|
@ -1416,7 +1433,7 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
|
|||
for (tmp = priv->headers; tmp; tmp = tmp->next) {
|
||||
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
|
||||
|
||||
copy = g_list_append (copy, gst_buffer_make_metadata_writable (tmpbuf));
|
||||
copy = g_list_append (copy, gst_buffer_make_writable (tmpbuf));
|
||||
}
|
||||
g_list_free (priv->headers);
|
||||
priv->headers = copy;
|
||||
|
@ -1424,9 +1441,8 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
|
|||
for (tmp = priv->headers; tmp; tmp = tmp->next) {
|
||||
GstBuffer *tmpbuf = GST_BUFFER (tmp->data);
|
||||
|
||||
gst_buffer_set_caps (tmpbuf, GST_PAD_CAPS (encoder->srcpad));
|
||||
gst_buffer_ref (tmpbuf);
|
||||
priv->bytes += GST_BUFFER_SIZE (tmpbuf);
|
||||
priv->bytes += gst_buffer_get_size (tmpbuf);
|
||||
if (G_UNLIKELY (discont)) {
|
||||
GST_LOG_OBJECT (encoder, "marking discont");
|
||||
GST_BUFFER_FLAG_SET (tmpbuf, GST_BUFFER_FLAG_DISCONT);
|
||||
|
@ -1443,9 +1459,6 @@ gst_video_encoder_finish_frame (GstVideoEncoder * encoder,
|
|||
GST_BUFFER_FLAG_SET (frame->output_buffer, GST_BUFFER_FLAG_DISCONT);
|
||||
}
|
||||
|
||||
gst_buffer_set_caps (GST_BUFFER (frame->output_buffer),
|
||||
GST_PAD_CAPS (encoder->srcpad));
|
||||
|
||||
if (encoder_class->pre_push)
|
||||
ret = encoder_class->pre_push (encoder, frame);
|
||||
|
||||
|
|
|
@ -135,7 +135,7 @@ G_BEGIN_DECLS
|
|||
*
|
||||
* Since: 0.10.36
|
||||
*/
|
||||
#define GST_VIDEO_ENCODER_STREAM_LOCK(encoder) g_static_rec_mutex_lock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
|
||||
#define GST_VIDEO_ENCODER_STREAM_LOCK(encoder) g_rec_mutex_lock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
|
||||
|
||||
/**
|
||||
* GST_VIDEO_ENCODER_STREAM_UNLOCK:
|
||||
|
@ -145,7 +145,7 @@ G_BEGIN_DECLS
|
|||
*
|
||||
* Since: 0.10.36
|
||||
*/
|
||||
#define GST_VIDEO_ENCODER_STREAM_UNLOCK(encoder) g_static_rec_mutex_unlock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
|
||||
#define GST_VIDEO_ENCODER_STREAM_UNLOCK(encoder) g_rec_mutex_unlock (&GST_VIDEO_ENCODER (encoder)->stream_lock)
|
||||
|
||||
typedef struct _GstVideoEncoder GstVideoEncoder;
|
||||
typedef struct _GstVideoEncoderPrivate GstVideoEncoderPrivate;
|
||||
|
@ -170,7 +170,7 @@ struct _GstVideoEncoder
|
|||
/* protects all data processing, i.e. is locked
|
||||
* in the chain function, finish_frame and when
|
||||
* processing serialized events */
|
||||
GStaticRecMutex stream_lock;
|
||||
GRecMutex stream_lock;
|
||||
|
||||
/* MT-protected (with STREAM_LOCK) */
|
||||
GstSegment input_segment;
|
||||
|
@ -258,7 +258,8 @@ struct _GstVideoEncoderClass
|
|||
GstFlowReturn (*pre_push) (GstVideoEncoder *encoder,
|
||||
GstVideoCodecFrame *frame);
|
||||
|
||||
GstCaps * (*getcaps) (GstVideoEncoder *enc);
|
||||
GstCaps * (*getcaps) (GstVideoEncoder *enc,
|
||||
GstCaps *filter);
|
||||
|
||||
gboolean (*sink_event) (GstVideoEncoder *encoder,
|
||||
GstEvent *event);
|
||||
|
@ -287,7 +288,8 @@ GstFlowReturn gst_video_encoder_finish_frame (GstVideoEncoder *encoder,
|
|||
GstVideoCodecFrame *frame);
|
||||
|
||||
GstCaps * gst_video_encoder_proxy_getcaps (GstVideoEncoder * enc,
|
||||
GstCaps * caps);
|
||||
GstCaps * caps,
|
||||
GstCaps * filter);
|
||||
void gst_video_encoder_set_discont (GstVideoEncoder *encoder);
|
||||
gboolean gst_video_encoder_get_discont (GstVideoEncoder *encoder);
|
||||
|
||||
|
|
|
@ -27,23 +27,9 @@
|
|||
|
||||
#include <string.h>
|
||||
|
||||
GType
|
||||
gst_video_codec_frame_get_type (void)
|
||||
{
|
||||
static volatile gsize type = 0;
|
||||
|
||||
if (g_once_init_enter (&type)) {
|
||||
GType _type;
|
||||
|
||||
_type = g_boxed_type_register_static ("GstVideoCodecFrame",
|
||||
(GBoxedCopyFunc) gst_video_codec_frame_ref,
|
||||
(GBoxedFreeFunc) gst_video_codec_frame_unref);
|
||||
g_once_init_leave (&type, _type);
|
||||
}
|
||||
return (GType) type;
|
||||
}
|
||||
|
||||
|
||||
G_DEFINE_BOXED_TYPE (GstVideoCodecFrame, gst_video_codec_frame,
|
||||
(GBoxedCopyFunc) gst_video_codec_frame_ref,
|
||||
(GBoxedFreeFunc) gst_video_codec_frame_unref);
|
||||
|
||||
static void
|
||||
_gst_video_codec_frame_free (GstVideoCodecFrame * frame)
|
||||
|
@ -172,18 +158,6 @@ gst_video_codec_state_unref (GstVideoCodecState * state)
|
|||
}
|
||||
}
|
||||
|
||||
GType
|
||||
gst_video_codec_state_get_type (void)
|
||||
{
|
||||
static volatile gsize type = 0;
|
||||
|
||||
if (g_once_init_enter (&type)) {
|
||||
GType _type;
|
||||
|
||||
_type = g_boxed_type_register_static ("GstVideoCodecState",
|
||||
(GBoxedCopyFunc) gst_video_codec_state_ref,
|
||||
(GBoxedFreeFunc) gst_video_codec_state_unref);
|
||||
g_once_init_leave (&type, _type);
|
||||
}
|
||||
return (GType) type;
|
||||
}
|
||||
G_DEFINE_BOXED_TYPE (GstVideoCodecState, gst_video_codec_state,
|
||||
(GBoxedCopyFunc) gst_video_codec_state_ref,
|
||||
(GBoxedFreeFunc) gst_video_codec_state_unref);
|
||||
|
|
|
@ -123,7 +123,6 @@ static VideoFormat formats[] = {
|
|||
{0x00000000, {GST_VIDEO_FORMAT_UNKNOWN, "UNKNOWN", "unknown video", 0, DPTH0,
|
||||
PSTR0, PLANE_NA,
|
||||
OFFS0}},
|
||||
|
||||
MAKE_YUV_FORMAT (I420, "raw video", GST_MAKE_FOURCC ('I', '4', '2', '0'),
|
||||
DPTH888, PSTR111,
|
||||
PLANE012, OFFS0, SUB420),
|
||||
|
@ -240,6 +239,8 @@ static VideoFormat formats[] = {
|
|||
MAKE_YUV_FORMAT (r210, "raw video", GST_MAKE_FOURCC ('r', '2', '1', '0'),
|
||||
DPTH10_10_10,
|
||||
PSTR444, PLANE0, OFFS0, SUB444),
|
||||
{0x00000000, {GST_VIDEO_FORMAT_ENCODED, "ENCODED", "encoded video",
|
||||
GST_VIDEO_FORMAT_FLAG_COMPLEX, DPTH0, PSTR0, PLANE_NA, OFFS0}},
|
||||
};
|
||||
|
||||
/**
|
||||
|
@ -839,8 +840,8 @@ gst_video_info_from_caps (GstVideoInfo * info, const GstCaps * caps)
|
|||
{
|
||||
GstStructure *structure;
|
||||
const gchar *s;
|
||||
GstVideoFormat format;
|
||||
gint width, height, views;
|
||||
GstVideoFormat format = GST_VIDEO_FORMAT_UNKNOWN;
|
||||
gint width = 0, height = 0, views;
|
||||
gint fps_n, fps_d;
|
||||
gint par_n, par_d;
|
||||
|
||||
|
@ -852,19 +853,26 @@ gst_video_info_from_caps (GstVideoInfo * info, const GstCaps * caps)
|
|||
|
||||
structure = gst_caps_get_structure (caps, 0);
|
||||
|
||||
if (!gst_structure_has_name (structure, "video/x-raw"))
|
||||
goto wrong_name;
|
||||
|
||||
if (!(s = gst_structure_get_string (structure, "format")))
|
||||
if (gst_structure_has_name (structure, "video/x-raw") &&
|
||||
!(s = gst_structure_get_string (structure, "format"))) {
|
||||
goto no_format;
|
||||
} else if (g_str_has_prefix (gst_structure_get_name (structure), "video/") ||
|
||||
g_str_has_prefix (gst_structure_get_name (structure), "image/")) {
|
||||
format = GST_VIDEO_FORMAT_ENCODED;
|
||||
} else {
|
||||
goto wrong_name;
|
||||
}
|
||||
|
||||
format = gst_video_format_from_string (s);
|
||||
if (format == GST_VIDEO_FORMAT_UNKNOWN)
|
||||
goto unknown_format;
|
||||
|
||||
if (!gst_structure_get_int (structure, "width", &width))
|
||||
/* width and height are mandatory, except for non-raw-formats */
|
||||
if (!gst_structure_get_int (structure, "width", &width) &&
|
||||
format != GST_VIDEO_FORMAT_ENCODED)
|
||||
goto no_width;
|
||||
if (!gst_structure_get_int (structure, "height", &height))
|
||||
if (!gst_structure_get_int (structure, "height", &height) &&
|
||||
format != GST_VIDEO_FORMAT_ENCODED)
|
||||
goto no_height;
|
||||
|
||||
gst_video_info_set_format (info, format, width, height);
|
||||
|
@ -917,7 +925,7 @@ gst_video_info_from_caps (GstVideoInfo * info, const GstCaps * caps)
|
|||
/* ERROR */
|
||||
wrong_name:
|
||||
{
|
||||
GST_ERROR ("wrong name '%s', expected video/x-raw",
|
||||
GST_ERROR ("wrong name '%s', expected video/ or image/",
|
||||
gst_structure_get_name (structure));
|
||||
return FALSE;
|
||||
}
|
||||
|
@ -943,6 +951,42 @@ no_height:
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* gst_video_info_is_equal:
|
||||
* @info: a #GstVideoInfo
|
||||
* @other: a #GstVideoInfo
|
||||
*
|
||||
* Compares two #GstVideoInfo and returns whether they are equal or not
|
||||
*
|
||||
* Returns: %TRUE if @info and @other are equal, else %FALSE.
|
||||
*/
|
||||
gboolean
|
||||
gst_video_info_is_equal (const GstVideoInfo * info, const GstVideoInfo * other)
|
||||
{
|
||||
if (GST_VIDEO_INFO_FORMAT (info) != GST_VIDEO_INFO_FORMAT (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_INTERLACE_MODE (info) !=
|
||||
GST_VIDEO_INFO_INTERLACE_MODE (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_FLAGS (info) != GST_VIDEO_INFO_FLAGS (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_WIDTH (info) != GST_VIDEO_INFO_WIDTH (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_HEIGHT (info) != GST_VIDEO_INFO_HEIGHT (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_SIZE (info) != GST_VIDEO_INFO_SIZE (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_PAR_N (info) != GST_VIDEO_INFO_PAR_N (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_PAR_D (info) != GST_VIDEO_INFO_PAR_D (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_FPS_N (info) != GST_VIDEO_INFO_FPS_N (other))
|
||||
return FALSE;
|
||||
if (GST_VIDEO_INFO_FPS_D (info) != GST_VIDEO_INFO_FPS_D (other))
|
||||
return FALSE;
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* gst_video_info_to_caps:
|
||||
* @info: a #GstVideoInfo
|
||||
|
@ -1387,6 +1431,7 @@ fill_planes (GstVideoInfo * info)
|
|||
info->size = info->offset[2] +
|
||||
info->stride[2] * (GST_ROUND_UP_4 (height) / 4);
|
||||
break;
|
||||
case GST_VIDEO_FORMAT_ENCODED:
|
||||
case GST_VIDEO_FORMAT_UNKNOWN:
|
||||
GST_ERROR ("invalid format");
|
||||
g_warning ("invalid format");
|
||||
|
|
|
@ -29,6 +29,7 @@ G_BEGIN_DECLS
|
|||
/**
|
||||
* GstVideoFormat:
|
||||
* @GST_VIDEO_FORMAT_UNKNOWN: Unknown or unset video format id
|
||||
* @GST_VIDEO_FORMAT_ENCODED: Encoded video format
|
||||
* @GST_VIDEO_FORMAT_I420: planar 4:2:0 YUV
|
||||
* @GST_VIDEO_FORMAT_YV12: planar 4:2:0 YVU (like I420 but UV planes swapped)
|
||||
* @GST_VIDEO_FORMAT_YUY2: packed 4:2:2 YUV (Y0-U0-Y1-V0 Y2-U2-Y3-V2 Y4 ...)
|
||||
|
@ -117,7 +118,8 @@ typedef enum {
|
|||
GST_VIDEO_FORMAT_IYU1,
|
||||
GST_VIDEO_FORMAT_ARGB64,
|
||||
GST_VIDEO_FORMAT_AYUV64,
|
||||
GST_VIDEO_FORMAT_r210
|
||||
GST_VIDEO_FORMAT_r210,
|
||||
GST_VIDEO_FORMAT_ENCODED
|
||||
} GstVideoFormat;
|
||||
|
||||
#define GST_VIDEO_MAX_PLANES 4
|
||||
|
@ -265,6 +267,7 @@ struct _GstVideoFormatInfo {
|
|||
#define GST_VIDEO_FORMAT_INFO_HAS_ALPHA(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_ALPHA)
|
||||
#define GST_VIDEO_FORMAT_INFO_IS_LE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_LE)
|
||||
#define GST_VIDEO_FORMAT_INFO_HAS_PALETTE(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_PALETTE)
|
||||
#define GST_VIDEO_FORMAT_INFO_IS_COMPLEX(info) ((info)->flags & GST_VIDEO_FORMAT_FLAG_COMPLEX)
|
||||
|
||||
#define GST_VIDEO_FORMAT_INFO_BITS(info) ((info)->bits)
|
||||
#define GST_VIDEO_FORMAT_INFO_N_COMPONENTS(info) ((info)->n_components)
|
||||
|
@ -313,8 +316,6 @@ typedef struct _GstVideoFrame GstVideoFrame;
|
|||
* are interlaced in one frame.
|
||||
* @GST_VIDEO_INTERLACE_MODE_MIXED: video contains both interlaced and
|
||||
* progressive frames, the buffer flags describe the frame and fields.
|
||||
* @GST_VIDEO_INTERLACE_MODE_FIELDS: video is interlaced and fields are stored
|
||||
* separately. Use the id property to get access to the required field.
|
||||
*
|
||||
* The possible values of the #GstVideoInterlaceMode describing the interlace
|
||||
* mode of the stream.
|
||||
|
@ -322,8 +323,7 @@ typedef struct _GstVideoFrame GstVideoFrame;
|
|||
typedef enum {
|
||||
GST_VIDEO_INTERLACE_MODE_PROGRESSIVE = 0,
|
||||
GST_VIDEO_INTERLACE_MODE_INTERLEAVED,
|
||||
GST_VIDEO_INTERLACE_MODE_MIXED,
|
||||
GST_VIDEO_INTERLACE_MODE_FIELDS
|
||||
GST_VIDEO_INTERLACE_MODE_MIXED
|
||||
} GstVideoInterlaceMode;
|
||||
|
||||
/**
|
||||
|
@ -593,6 +593,9 @@ gboolean gst_video_info_convert (GstVideoInfo *info,
|
|||
gint64 src_value,
|
||||
GstFormat dest_format,
|
||||
gint64 *dest_value);
|
||||
gboolean gst_video_info_is_equal (const GstVideoInfo *info,
|
||||
const GstVideoInfo *other);
|
||||
|
||||
/**
|
||||
* GstVideoFrameFlags:
|
||||
* @GST_VIDEO_FRAME_FLAG_NONE: no flags
|
||||
|
|
Loading…
Reference in a new issue