2012-03-07 09:18:49 +00:00
|
|
|
/* GStreamer
|
|
|
|
* Copyright (C) 2008 David Schleef <ds@schleef.org>
|
|
|
|
* Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
|
|
|
|
* Copyright (C) 2011 Nokia Corporation. All rights reserved.
|
|
|
|
* Contact: Stefan Kost <stefan.kost@nokia.com>
|
|
|
|
* Copyright (C) 2012 Collabora Ltd.
|
|
|
|
* Author : Edward Hervey <edward@collabora.com>
|
|
|
|
*
|
|
|
|
* This library is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU Library General Public
|
|
|
|
* License as published by the Free Software Foundation; either
|
|
|
|
* version 2 of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This library is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
|
|
* Library General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU Library General Public
|
|
|
|
* License along with this library; if not, write to the
|
2012-11-03 23:05:09 +00:00
|
|
|
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
|
|
|
|
* Boston, MA 02110-1301, USA.
|
2012-03-07 09:18:49 +00:00
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
|
|
|
* SECTION:gstvideodecoder
|
|
|
|
* @short_description: Base class for video decoders
|
|
|
|
* @see_also:
|
|
|
|
*
|
|
|
|
* This base class is for video decoders turning encoded data into raw video
|
|
|
|
* frames.
|
|
|
|
*
|
2012-06-19 13:16:12 +00:00
|
|
|
* The GstVideoDecoder base class and derived subclasses should cooperate as follows:
|
2012-03-07 09:18:49 +00:00
|
|
|
* <orderedlist>
|
|
|
|
* <listitem>
|
|
|
|
* <itemizedlist><title>Configuration</title>
|
|
|
|
* <listitem><para>
|
|
|
|
* Initially, GstVideoDecoder calls @start when the decoder element
|
2012-06-19 13:16:12 +00:00
|
|
|
* is activated, which allows the subclass to perform any global setup.
|
2012-03-07 09:18:49 +00:00
|
|
|
* </para></listitem>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* GstVideoDecoder calls @set_format to inform the subclass of caps
|
2012-03-07 09:18:49 +00:00
|
|
|
* describing input video data that it is about to receive, including
|
|
|
|
* possibly configuration data.
|
|
|
|
* While unlikely, it might be called more than once, if changing input
|
|
|
|
* parameters require reconfiguration.
|
|
|
|
* </para></listitem>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* Incoming data buffers are processed as needed, described in Data Processing below.
|
|
|
|
* </para></listitem>
|
|
|
|
* <listitem><para>
|
2012-03-07 09:18:49 +00:00
|
|
|
* GstVideoDecoder calls @stop at end of all processing.
|
|
|
|
* </para></listitem>
|
|
|
|
* </itemizedlist>
|
|
|
|
* </listitem>
|
|
|
|
* <listitem>
|
|
|
|
* <itemizedlist>
|
|
|
|
* <title>Data processing</title>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* The base class gathers input data, and optionally allows subclass
|
2012-03-07 09:18:49 +00:00
|
|
|
* to parse this into subsequently manageable chunks, typically
|
|
|
|
* corresponding to and referred to as 'frames'.
|
|
|
|
* </para></listitem>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* Each input frame is provided in turn to the subclass' @handle_frame callback.
|
|
|
|
* The ownership of the frame is given to the @handle_frame callback.
|
2012-03-07 09:18:49 +00:00
|
|
|
* </para></listitem>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* If codec processing results in decoded data, the subclass should call
|
2012-05-01 14:09:12 +00:00
|
|
|
* @gst_video_decoder_finish_frame to have decoded data pushed.
|
2012-06-19 13:16:12 +00:00
|
|
|
* downstream. Otherwise, the subclass must call @gst_video_decoder_drop_frame, to
|
|
|
|
* allow the base class to do timestamp and offset tracking, and possibly to
|
|
|
|
* requeue the frame for a later attempt in the case of reverse playback.
|
2012-03-07 09:18:49 +00:00
|
|
|
* </para></listitem>
|
|
|
|
* </itemizedlist>
|
|
|
|
* </listitem>
|
|
|
|
* <listitem>
|
|
|
|
* <itemizedlist><title>Shutdown phase</title>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* The GstVideoDecoder class calls @stop to inform the subclass that data
|
2012-03-07 09:18:49 +00:00
|
|
|
* parsing will be stopped.
|
|
|
|
* </para></listitem>
|
|
|
|
* </itemizedlist>
|
|
|
|
* </listitem>
|
2012-06-19 13:16:12 +00:00
|
|
|
* <listitem>
|
|
|
|
* <itemizedlist><title>Additional Notes</title>
|
|
|
|
* <listitem>
|
|
|
|
* <itemizedlist><title>Seeking/Flushing</title>
|
|
|
|
* <listitem><para>
|
|
|
|
* When the pipeline is seeked or otherwise flushed, the subclass is informed via a call
|
|
|
|
* to its @reset callback, with the hard parameter set to true. This indicates the
|
|
|
|
* subclass should drop any internal data queues and timestamps and prepare for a fresh
|
|
|
|
* set of buffers to arrive for parsing and decoding.
|
|
|
|
* </para></listitem>
|
|
|
|
* </itemizedlist>
|
|
|
|
* </listitem>
|
|
|
|
* <listitem>
|
|
|
|
* <itemizedlist><title>End Of Stream</title>
|
|
|
|
* <listitem><para>
|
|
|
|
* At end-of-stream, the subclass @parse function may be called some final times with the
|
|
|
|
* at_eos parameter set to true, indicating that the element should not expect any more data
|
|
|
|
* to be arriving, and it should parse and remaining frames and call
|
|
|
|
* gst_video_decoder_have_frame() if possible.
|
|
|
|
* </para></listitem>
|
|
|
|
* </itemizedlist>
|
|
|
|
* </listitem>
|
|
|
|
* </itemizedlist>
|
|
|
|
* </listitem>
|
2012-03-07 09:18:49 +00:00
|
|
|
* </orderedlist>
|
|
|
|
*
|
2012-06-19 13:16:12 +00:00
|
|
|
* The subclass is responsible for providing pad template caps for
|
2012-03-07 09:18:49 +00:00
|
|
|
* source and sink pads. The pads need to be named "sink" and "src". It also
|
2012-06-19 13:16:12 +00:00
|
|
|
* needs to provide information about the ouptput caps, when they are known.
|
|
|
|
* This may be when the base class calls the subclass' @set_format function,
|
|
|
|
* though it might be during decoding, before calling
|
|
|
|
* @gst_video_decoder_finish_frame. This is done via
|
|
|
|
* @gst_video_decoder_set_output_state
|
2012-03-07 09:18:49 +00:00
|
|
|
*
|
2012-06-19 13:16:12 +00:00
|
|
|
* The subclass is also responsible for providing (presentation) timestamps
|
2012-03-07 09:18:49 +00:00
|
|
|
* (likely based on corresponding input ones). If that is not applicable
|
2012-06-19 13:16:12 +00:00
|
|
|
* or possible, the base class provides limited framerate based interpolation.
|
2012-03-07 09:18:49 +00:00
|
|
|
*
|
2012-06-19 13:16:12 +00:00
|
|
|
* Similarly, the base class provides some limited (legacy) seeking support
|
|
|
|
* if specifically requested by the subclass, as full-fledged support
|
2012-03-07 09:18:49 +00:00
|
|
|
* should rather be left to upstream demuxer, parser or alike. This simple
|
|
|
|
* approach caters for seeking and duration reporting using estimated input
|
2012-06-19 13:16:12 +00:00
|
|
|
* bitrates. To enable it, a subclass should call
|
|
|
|
* @gst_video_decoder_set_estimate_rate to enable handling of incoming byte-streams.
|
2012-03-07 09:18:49 +00:00
|
|
|
*
|
2012-06-19 13:16:12 +00:00
|
|
|
* The base class provides some support for reverse playback, in particular
|
2012-03-07 09:18:49 +00:00
|
|
|
* in case incoming data is not packetized or upstream does not provide
|
2012-06-19 13:16:12 +00:00
|
|
|
* fragments on keyframe boundaries. However, the subclass should then be prepared
|
|
|
|
* for the parsing and frame processing stage to occur separately (in normal
|
|
|
|
* forward processing, the latter immediately follows the former),
|
|
|
|
* The subclass also needs to ensure the parsing stage properly marks keyframes,
|
|
|
|
* unless it knows the upstream elements will do so properly for incoming data.
|
2012-03-07 09:18:49 +00:00
|
|
|
*
|
2012-06-19 13:16:12 +00:00
|
|
|
* The bare minimum that a functional subclass needs to implement is:
|
2012-03-07 09:18:49 +00:00
|
|
|
* <itemizedlist>
|
|
|
|
* <listitem><para>Provide pad templates</para></listitem>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* Inform the base class of output caps via @gst_video_decoder_set_output_state
|
2012-03-07 09:18:49 +00:00
|
|
|
* </para></listitem>
|
|
|
|
* <listitem><para>
|
2012-06-19 13:16:12 +00:00
|
|
|
* Parse input data, if it is not considered packetized from upstream
|
2012-03-07 09:18:49 +00:00
|
|
|
* Data will be provided to @parse which should invoke @gst_video_decoder_add_to_frame and
|
2012-06-19 13:16:12 +00:00
|
|
|
* @gst_video_decoder_have_frame to separate the data belonging to each video frame.
|
2012-03-07 09:18:49 +00:00
|
|
|
* </para></listitem>
|
|
|
|
* <listitem><para>
|
|
|
|
* Accept data in @handle_frame and provide decoded results to
|
2012-06-19 13:16:12 +00:00
|
|
|
* @gst_video_decoder_finish_frame, or call @gst_video_decoder_drop_frame.
|
2012-03-07 09:18:49 +00:00
|
|
|
* </para></listitem>
|
|
|
|
* </itemizedlist>
|
|
|
|
*/
|
|
|
|
|
|
|
|
#ifdef HAVE_CONFIG_H
|
|
|
|
#include "config.h"
|
|
|
|
#endif
|
|
|
|
|
|
|
|
/* TODO
|
|
|
|
*
|
|
|
|
* * Add a flag/boolean for I-frame-only/image decoders so we can do extra
|
|
|
|
* features, like applying QoS on input (as opposed to after the frame is
|
|
|
|
* decoded).
|
|
|
|
* * Add a flag/boolean for decoders that require keyframes, so the base
|
|
|
|
* class can automatically discard non-keyframes before one has arrived
|
2012-04-24 17:35:24 +00:00
|
|
|
* * Detect reordered frame/timestamps and fix the pts/dts
|
2012-03-07 09:18:49 +00:00
|
|
|
* * Support for GstIndex (or shall we not care ?)
|
|
|
|
* * Calculate actual latency based on input/output timestamp/frame_number
|
|
|
|
* and if it exceeds the recorded one, save it and emit a GST_MESSAGE_LATENCY
|
|
|
|
* * Emit latency message when it changes
|
|
|
|
*
|
|
|
|
*/
|
|
|
|
|
2012-06-19 13:16:12 +00:00
|
|
|
/* Implementation notes:
|
|
|
|
* The Video Decoder base class operates in 2 primary processing modes, depending
|
|
|
|
* on whether forward or reverse playback is requested.
|
|
|
|
*
|
|
|
|
* Forward playback:
|
|
|
|
* * Incoming buffer -> @parse() -> add_to_frame()/have_frame() -> handle_frame() ->
|
|
|
|
* push downstream
|
|
|
|
*
|
|
|
|
* Reverse playback is more complicated, since it involves gathering incoming data regions
|
|
|
|
* as we loop backwards through the upstream data. The processing concept (using incoming
|
|
|
|
* buffers as containing one frame each to simplify things) is:
|
|
|
|
*
|
|
|
|
* Upstream data we want to play:
|
|
|
|
* Buffer encoded order: 1 2 3 4 5 6 7 8 9 EOS
|
|
|
|
* Keyframe flag: K K
|
|
|
|
* Groupings: AAAAAAA BBBBBBB CCCCCCC
|
|
|
|
*
|
|
|
|
* Input:
|
|
|
|
* Buffer reception order: 7 8 9 4 5 6 1 2 3 EOS
|
|
|
|
* Keyframe flag: K K
|
|
|
|
* Discont flag: D D D
|
|
|
|
*
|
|
|
|
* - Each Discont marks a discont in the decoding order.
|
|
|
|
* - The keyframes mark where we can start decoding.
|
|
|
|
*
|
|
|
|
* Initially, we prepend incoming buffers to the gather queue. Whenever the
|
|
|
|
* discont flag is set on an incoming buffer, the gather queue is flushed out
|
|
|
|
* before the new buffer is collected.
|
|
|
|
*
|
|
|
|
* The above data will be accumulated in the gather queue like this:
|
|
|
|
*
|
|
|
|
* gather queue: 9 8 7
|
|
|
|
* D
|
|
|
|
*
|
|
|
|
* Whe buffer 4 is received (with a DISCONT), we flush the gather queue like
|
|
|
|
* this:
|
|
|
|
*
|
|
|
|
* while (gather)
|
|
|
|
* take head of queue and prepend to parse queue (this reverses the sequence,
|
|
|
|
* so parse queue is 7 -> 8 -> 9)
|
|
|
|
*
|
|
|
|
* Next, we process the parse queue, which now contains all un-parsed packets (including
|
|
|
|
* any leftover ones from the previous decode section)
|
|
|
|
*
|
|
|
|
* for each buffer now in the parse queue:
|
|
|
|
* Call the subclass parse function, prepending each resulting frame to
|
|
|
|
* the parse_gather queue. Buffers which precede the first one that
|
|
|
|
* produces a parsed frame are retained in the parse queue for re-processing on
|
|
|
|
* the next cycle of parsing.
|
|
|
|
*
|
|
|
|
* The parse_gather queue now contains frame objects ready for decoding, in reverse order.
|
|
|
|
* parse_gather: 9 -> 8 -> 7
|
|
|
|
*
|
|
|
|
* while (parse_gather)
|
|
|
|
* Take the head of the queue and prepend it to the decode queue
|
|
|
|
* If the frame was a keyframe, process the decode queue
|
|
|
|
* decode is now 7-8-9
|
|
|
|
*
|
|
|
|
* Processing the decode queue results in frames with attached output buffers
|
|
|
|
* stored in the 'output_queue' ready for outputting in reverse order.
|
|
|
|
*
|
|
|
|
* After we flushed the gather queue and parsed it, we add 4 to the (now empty) gather queue.
|
|
|
|
* We get the following situation:
|
|
|
|
*
|
|
|
|
* gather queue: 4
|
|
|
|
* decode queue: 7 8 9
|
|
|
|
*
|
|
|
|
* After we received 5 (Keyframe) and 6:
|
|
|
|
*
|
|
|
|
* gather queue: 6 5 4
|
|
|
|
* decode queue: 7 8 9
|
|
|
|
*
|
|
|
|
* When we receive 1 (DISCONT) which triggers a flush of the gather queue:
|
|
|
|
*
|
|
|
|
* Copy head of the gather queue (6) to decode queue:
|
|
|
|
*
|
|
|
|
* gather queue: 5 4
|
|
|
|
* decode queue: 6 7 8 9
|
|
|
|
*
|
|
|
|
* Copy head of the gather queue (5) to decode queue. This is a keyframe so we
|
|
|
|
* can start decoding.
|
|
|
|
*
|
|
|
|
* gather queue: 4
|
|
|
|
* decode queue: 5 6 7 8 9
|
|
|
|
*
|
|
|
|
* Decode frames in decode queue, store raw decoded data in output queue, we
|
|
|
|
* can take the head of the decode queue and prepend the decoded result in the
|
|
|
|
* output queue:
|
|
|
|
*
|
|
|
|
* gather queue: 4
|
|
|
|
* decode queue:
|
|
|
|
* output queue: 9 8 7 6 5
|
|
|
|
*
|
|
|
|
* Now output all the frames in the output queue, picking a frame from the
|
|
|
|
* head of the queue.
|
|
|
|
*
|
|
|
|
* Copy head of the gather queue (4) to decode queue, we flushed the gather
|
|
|
|
* queue and can now store input buffer in the gather queue:
|
|
|
|
*
|
|
|
|
* gather queue: 1
|
|
|
|
* decode queue: 4
|
|
|
|
*
|
|
|
|
* When we receive EOS, the queue looks like:
|
|
|
|
*
|
|
|
|
* gather queue: 3 2 1
|
|
|
|
* decode queue: 4
|
|
|
|
*
|
|
|
|
* Fill decode queue, first keyframe we copy is 2:
|
|
|
|
*
|
|
|
|
* gather queue: 1
|
|
|
|
* decode queue: 2 3 4
|
|
|
|
*
|
|
|
|
* Decoded output:
|
|
|
|
*
|
|
|
|
* gather queue: 1
|
|
|
|
* decode queue:
|
|
|
|
* output queue: 4 3 2
|
|
|
|
*
|
|
|
|
* Leftover buffer 1 cannot be decoded and must be discarded.
|
|
|
|
*/
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
#include "gstvideodecoder.h"
|
|
|
|
#include "gstvideoutils.h"
|
|
|
|
|
2012-12-12 17:13:10 +00:00
|
|
|
#include <gst/video/video.h>
|
2012-09-03 06:19:09 +00:00
|
|
|
#include <gst/video/video-event.h>
|
2012-04-24 18:04:48 +00:00
|
|
|
#include <gst/video/gstvideopool.h>
|
|
|
|
#include <gst/video/gstvideometa.h>
|
2012-03-07 09:18:49 +00:00
|
|
|
#include <string.h>
|
|
|
|
|
|
|
|
GST_DEBUG_CATEGORY (videodecoder_debug);
|
|
|
|
#define GST_CAT_DEFAULT videodecoder_debug
|
|
|
|
|
|
|
|
#define GST_VIDEO_DECODER_GET_PRIVATE(obj) \
|
|
|
|
(G_TYPE_INSTANCE_GET_PRIVATE ((obj), GST_TYPE_VIDEO_DECODER, \
|
|
|
|
GstVideoDecoderPrivate))
|
|
|
|
|
|
|
|
struct _GstVideoDecoderPrivate
|
|
|
|
{
|
|
|
|
/* FIXME introduce a context ? */
|
|
|
|
|
2012-04-24 18:04:48 +00:00
|
|
|
GstBufferPool *pool;
|
2012-04-26 16:11:08 +00:00
|
|
|
GstAllocator *allocator;
|
|
|
|
GstAllocationParams params;
|
2012-04-24 18:04:48 +00:00
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* parse tracking */
|
|
|
|
/* input data */
|
|
|
|
GstAdapter *input_adapter;
|
|
|
|
/* assembles current frame */
|
|
|
|
GstAdapter *output_adapter;
|
|
|
|
|
|
|
|
/* Whether we attempt to convert newsegment from bytes to
|
|
|
|
* time using a bitrate estimation */
|
|
|
|
gboolean do_estimate_rate;
|
|
|
|
|
|
|
|
/* Whether input is considered packetized or not */
|
|
|
|
gboolean packetized;
|
|
|
|
|
|
|
|
/* Error handling */
|
|
|
|
gint max_errors;
|
|
|
|
gint error_count;
|
|
|
|
|
2012-07-26 12:28:26 +00:00
|
|
|
gboolean do_caps;
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* ... being tracked here;
|
|
|
|
* only available during parsing */
|
|
|
|
GstVideoCodecFrame *current_frame;
|
|
|
|
/* events that should apply to the current frame */
|
|
|
|
GList *current_frame_events;
|
2013-05-09 13:34:10 +00:00
|
|
|
/* events that should be pushed before the next frame */
|
|
|
|
GList *pending_events;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* relative offset of input data */
|
|
|
|
guint64 input_offset;
|
|
|
|
/* relative offset of frame */
|
|
|
|
guint64 frame_offset;
|
|
|
|
/* tracking ts and offsets */
|
|
|
|
GList *timestamps;
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
/* last outgoing ts */
|
2012-06-19 14:36:38 +00:00
|
|
|
GstClockTime last_timestamp_out;
|
2012-09-27 09:31:34 +00:00
|
|
|
/* incoming pts - dts */
|
|
|
|
GstClockTime pts_delta;
|
2012-10-10 13:04:07 +00:00
|
|
|
gboolean reordered_output;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* reverse playback */
|
|
|
|
/* collect input */
|
|
|
|
GList *gather;
|
|
|
|
/* to-be-parsed */
|
|
|
|
GList *parse;
|
|
|
|
/* collected parsed frames */
|
|
|
|
GList *parse_gather;
|
|
|
|
/* frames to be handled == decoded */
|
|
|
|
GList *decode;
|
2012-06-19 13:46:44 +00:00
|
|
|
/* collected output - of buffer objects, not frames */
|
|
|
|
GList *output_queued;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-19 14:36:38 +00:00
|
|
|
|
|
|
|
/* base_picture_number is the picture number of the reference picture */
|
2012-03-07 09:18:49 +00:00
|
|
|
guint64 base_picture_number;
|
2012-06-19 14:36:38 +00:00
|
|
|
/* combine with base_picture_number, framerate and calcs to yield (presentation) ts */
|
|
|
|
GstClockTime base_timestamp;
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* FIXME : reorder_depth is never set */
|
|
|
|
int reorder_depth;
|
|
|
|
int distance_from_sync;
|
|
|
|
|
2012-07-26 16:36:53 +00:00
|
|
|
guint32 system_frame_number;
|
|
|
|
guint32 decode_frame_number;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GList *frames; /* Protected with OBJECT_LOCK */
|
|
|
|
GstVideoCodecState *input_state;
|
2012-09-29 00:07:43 +00:00
|
|
|
GstVideoCodecState *output_state; /* OBJECT_LOCK and STREAM_LOCK */
|
2012-03-07 09:18:49 +00:00
|
|
|
gboolean output_state_changed;
|
|
|
|
|
|
|
|
/* QoS properties */
|
2012-09-24 09:16:09 +00:00
|
|
|
gdouble proportion; /* OBJECT_LOCK */
|
|
|
|
GstClockTime earliest_time; /* OBJECT_LOCK */
|
|
|
|
GstClockTime qos_frame_duration; /* OBJECT_LOCK */
|
2012-03-07 09:18:49 +00:00
|
|
|
gboolean discont;
|
|
|
|
/* qos messages: frames dropped/processed */
|
|
|
|
guint dropped;
|
|
|
|
guint processed;
|
|
|
|
|
|
|
|
/* Outgoing byte size ? */
|
|
|
|
gint64 bytes_out;
|
|
|
|
gint64 time;
|
|
|
|
|
|
|
|
gint64 min_latency;
|
|
|
|
gint64 max_latency;
|
2012-08-09 14:02:42 +00:00
|
|
|
|
|
|
|
GstTagList *tags;
|
|
|
|
gboolean tags_changed;
|
2012-03-07 09:18:49 +00:00
|
|
|
};
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
static GstElementClass *parent_class = NULL;
|
|
|
|
static void gst_video_decoder_class_init (GstVideoDecoderClass * klass);
|
|
|
|
static void gst_video_decoder_init (GstVideoDecoder * dec,
|
|
|
|
GstVideoDecoderClass * klass);
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
static void gst_video_decoder_finalize (GObject * object);
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
static gboolean gst_video_decoder_setcaps (GstVideoDecoder * dec,
|
|
|
|
GstCaps * caps);
|
|
|
|
static gboolean gst_video_decoder_sink_event (GstPad * pad, GstObject * parent,
|
|
|
|
GstEvent * event);
|
|
|
|
static gboolean gst_video_decoder_src_event (GstPad * pad, GstObject * parent,
|
|
|
|
GstEvent * event);
|
|
|
|
static GstFlowReturn gst_video_decoder_chain (GstPad * pad, GstObject * parent,
|
|
|
|
GstBuffer * buf);
|
|
|
|
static gboolean gst_video_decoder_sink_query (GstPad * pad, GstObject * parent,
|
|
|
|
GstQuery * query);
|
|
|
|
static GstStateChangeReturn gst_video_decoder_change_state (GstElement *
|
|
|
|
element, GstStateChange transition);
|
|
|
|
static gboolean gst_video_decoder_src_query (GstPad * pad, GstObject * parent,
|
|
|
|
GstQuery * query);
|
2013-08-15 12:15:05 +00:00
|
|
|
static void gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full,
|
|
|
|
gboolean flush_hard);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-19 13:28:08 +00:00
|
|
|
static GstFlowReturn gst_video_decoder_decode_frame (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecFrame * frame);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
static void gst_video_decoder_release_frame (GstVideoDecoder * dec,
|
|
|
|
GstVideoCodecFrame * frame);
|
2012-06-19 14:36:38 +00:00
|
|
|
static GstClockTime gst_video_decoder_get_frame_duration (GstVideoDecoder *
|
|
|
|
decoder, GstVideoCodecFrame * frame);
|
2012-03-07 09:18:49 +00:00
|
|
|
static GstVideoCodecFrame *gst_video_decoder_new_frame (GstVideoDecoder *
|
|
|
|
decoder);
|
2012-06-19 14:08:57 +00:00
|
|
|
static GstFlowReturn gst_video_decoder_clip_and_push_buf (GstVideoDecoder *
|
|
|
|
decoder, GstBuffer * buf);
|
2012-06-19 14:46:05 +00:00
|
|
|
static GstFlowReturn gst_video_decoder_flush_parse (GstVideoDecoder * dec,
|
|
|
|
gboolean at_eos);
|
2012-06-19 14:22:25 +00:00
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
static void gst_video_decoder_clear_queues (GstVideoDecoder * dec);
|
|
|
|
|
2012-04-24 17:51:30 +00:00
|
|
|
static gboolean gst_video_decoder_sink_event_default (GstVideoDecoder * decoder,
|
|
|
|
GstEvent * event);
|
|
|
|
static gboolean gst_video_decoder_src_event_default (GstVideoDecoder * decoder,
|
|
|
|
GstEvent * event);
|
2012-04-25 10:37:39 +00:00
|
|
|
static gboolean gst_video_decoder_decide_allocation_default (GstVideoDecoder *
|
|
|
|
decoder, GstQuery * query);
|
2012-06-15 14:06:12 +00:00
|
|
|
static gboolean gst_video_decoder_propose_allocation_default (GstVideoDecoder *
|
|
|
|
decoder, GstQuery * query);
|
2012-08-09 12:35:22 +00:00
|
|
|
static gboolean gst_video_decoder_negotiate_default (GstVideoDecoder * decoder);
|
2013-01-02 11:15:25 +00:00
|
|
|
static GstFlowReturn gst_video_decoder_parse_available (GstVideoDecoder * dec,
|
2013-03-31 16:29:07 +00:00
|
|
|
gboolean at_eos, gboolean new_buffer);
|
2012-04-24 17:51:30 +00:00
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
/* we can't use G_DEFINE_ABSTRACT_TYPE because we need the klass in the _init
|
|
|
|
* method to get to the padtemplates */
|
|
|
|
GType
|
|
|
|
gst_video_decoder_get_type (void)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
2012-04-24 17:35:24 +00:00
|
|
|
static volatile gsize type = 0;
|
|
|
|
|
|
|
|
if (g_once_init_enter (&type)) {
|
|
|
|
GType _type;
|
|
|
|
static const GTypeInfo info = {
|
|
|
|
sizeof (GstVideoDecoderClass),
|
|
|
|
NULL,
|
|
|
|
NULL,
|
|
|
|
(GClassInitFunc) gst_video_decoder_class_init,
|
|
|
|
NULL,
|
|
|
|
NULL,
|
|
|
|
sizeof (GstVideoDecoder),
|
|
|
|
0,
|
|
|
|
(GInstanceInitFunc) gst_video_decoder_init,
|
|
|
|
};
|
|
|
|
|
|
|
|
_type = g_type_register_static (GST_TYPE_ELEMENT,
|
|
|
|
"GstVideoDecoder", &info, G_TYPE_FLAG_ABSTRACT);
|
|
|
|
g_once_init_leave (&type, _type);
|
|
|
|
}
|
|
|
|
return type;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_video_decoder_class_init (GstVideoDecoderClass * klass)
|
|
|
|
{
|
|
|
|
GObjectClass *gobject_class;
|
|
|
|
GstElementClass *gstelement_class;
|
|
|
|
|
|
|
|
gobject_class = G_OBJECT_CLASS (klass);
|
|
|
|
gstelement_class = GST_ELEMENT_CLASS (klass);
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
GST_DEBUG_CATEGORY_INIT (videodecoder_debug, "videodecoder", 0,
|
|
|
|
"Base Video Decoder");
|
|
|
|
|
|
|
|
parent_class = g_type_class_peek_parent (klass);
|
2012-03-07 09:18:49 +00:00
|
|
|
g_type_class_add_private (klass, sizeof (GstVideoDecoderPrivate));
|
|
|
|
|
|
|
|
gobject_class->finalize = gst_video_decoder_finalize;
|
|
|
|
|
|
|
|
gstelement_class->change_state =
|
|
|
|
GST_DEBUG_FUNCPTR (gst_video_decoder_change_state);
|
2012-04-24 17:51:30 +00:00
|
|
|
|
|
|
|
klass->sink_event = gst_video_decoder_sink_event_default;
|
|
|
|
klass->src_event = gst_video_decoder_src_event_default;
|
2012-04-25 10:37:39 +00:00
|
|
|
klass->decide_allocation = gst_video_decoder_decide_allocation_default;
|
2012-06-15 14:06:12 +00:00
|
|
|
klass->propose_allocation = gst_video_decoder_propose_allocation_default;
|
2012-08-09 12:35:22 +00:00
|
|
|
klass->negotiate = gst_video_decoder_negotiate_default;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_video_decoder_init (GstVideoDecoder * decoder, GstVideoDecoderClass * klass)
|
|
|
|
{
|
|
|
|
GstPadTemplate *pad_template;
|
|
|
|
GstPad *pad;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "gst_video_decoder_init");
|
|
|
|
|
|
|
|
decoder->priv = GST_VIDEO_DECODER_GET_PRIVATE (decoder);
|
|
|
|
|
|
|
|
pad_template =
|
|
|
|
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
|
|
|
|
g_return_if_fail (pad_template != NULL);
|
|
|
|
|
|
|
|
decoder->sinkpad = pad = gst_pad_new_from_template (pad_template, "sink");
|
|
|
|
|
|
|
|
gst_pad_set_chain_function (pad, GST_DEBUG_FUNCPTR (gst_video_decoder_chain));
|
|
|
|
gst_pad_set_event_function (pad,
|
|
|
|
GST_DEBUG_FUNCPTR (gst_video_decoder_sink_event));
|
|
|
|
gst_pad_set_query_function (pad,
|
|
|
|
GST_DEBUG_FUNCPTR (gst_video_decoder_sink_query));
|
|
|
|
gst_element_add_pad (GST_ELEMENT (decoder), decoder->sinkpad);
|
|
|
|
|
|
|
|
pad_template =
|
|
|
|
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
|
|
|
|
g_return_if_fail (pad_template != NULL);
|
|
|
|
|
|
|
|
decoder->srcpad = pad = gst_pad_new_from_template (pad_template, "src");
|
|
|
|
|
|
|
|
gst_pad_set_event_function (pad,
|
|
|
|
GST_DEBUG_FUNCPTR (gst_video_decoder_src_event));
|
|
|
|
gst_pad_set_query_function (pad,
|
|
|
|
GST_DEBUG_FUNCPTR (gst_video_decoder_src_query));
|
|
|
|
gst_pad_use_fixed_caps (pad);
|
|
|
|
gst_element_add_pad (GST_ELEMENT (decoder), decoder->srcpad);
|
|
|
|
|
|
|
|
gst_segment_init (&decoder->input_segment, GST_FORMAT_TIME);
|
|
|
|
gst_segment_init (&decoder->output_segment, GST_FORMAT_TIME);
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
g_rec_mutex_init (&decoder->stream_lock);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
decoder->priv->input_adapter = gst_adapter_new ();
|
|
|
|
decoder->priv->output_adapter = gst_adapter_new ();
|
|
|
|
decoder->priv->packetized = TRUE;
|
|
|
|
|
2013-08-15 12:15:05 +00:00
|
|
|
gst_video_decoder_reset (decoder, TRUE, TRUE);
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_video_rawvideo_convert (GstVideoCodecState * state,
|
|
|
|
GstFormat src_format, gint64 src_value,
|
|
|
|
GstFormat * dest_format, gint64 * dest_value)
|
|
|
|
{
|
|
|
|
gboolean res = FALSE;
|
|
|
|
guint vidsize;
|
|
|
|
guint fps_n, fps_d;
|
|
|
|
|
|
|
|
g_return_val_if_fail (dest_format != NULL, FALSE);
|
|
|
|
g_return_val_if_fail (dest_value != NULL, FALSE);
|
|
|
|
|
|
|
|
if (src_format == *dest_format || src_value == 0 || src_value == -1) {
|
|
|
|
*dest_value = src_value;
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
vidsize = GST_VIDEO_INFO_SIZE (&state->info);
|
|
|
|
fps_n = GST_VIDEO_INFO_FPS_N (&state->info);
|
|
|
|
fps_d = GST_VIDEO_INFO_FPS_D (&state->info);
|
|
|
|
|
|
|
|
if (src_format == GST_FORMAT_BYTES &&
|
|
|
|
*dest_format == GST_FORMAT_DEFAULT && vidsize) {
|
|
|
|
/* convert bytes to frames */
|
|
|
|
*dest_value = gst_util_uint64_scale_int (src_value, 1, vidsize);
|
|
|
|
res = TRUE;
|
|
|
|
} else if (src_format == GST_FORMAT_DEFAULT &&
|
|
|
|
*dest_format == GST_FORMAT_BYTES && vidsize) {
|
|
|
|
/* convert bytes to frames */
|
|
|
|
*dest_value = src_value * vidsize;
|
|
|
|
res = TRUE;
|
|
|
|
} else if (src_format == GST_FORMAT_DEFAULT &&
|
|
|
|
*dest_format == GST_FORMAT_TIME && fps_n) {
|
|
|
|
/* convert frames to time */
|
|
|
|
*dest_value = gst_util_uint64_scale (src_value, GST_SECOND * fps_d, fps_n);
|
|
|
|
res = TRUE;
|
|
|
|
} else if (src_format == GST_FORMAT_TIME &&
|
|
|
|
*dest_format == GST_FORMAT_DEFAULT && fps_d) {
|
|
|
|
/* convert time to frames */
|
|
|
|
*dest_value = gst_util_uint64_scale (src_value, fps_n, GST_SECOND * fps_d);
|
|
|
|
res = TRUE;
|
|
|
|
} else if (src_format == GST_FORMAT_TIME &&
|
|
|
|
*dest_format == GST_FORMAT_BYTES && fps_d && vidsize) {
|
|
|
|
/* convert time to frames */
|
|
|
|
*dest_value = gst_util_uint64_scale (src_value,
|
|
|
|
fps_n * vidsize, GST_SECOND * fps_d);
|
|
|
|
res = TRUE;
|
|
|
|
} else if (src_format == GST_FORMAT_BYTES &&
|
|
|
|
*dest_format == GST_FORMAT_TIME && fps_n && vidsize) {
|
|
|
|
/* convert frames to time */
|
|
|
|
*dest_value = gst_util_uint64_scale (src_value,
|
|
|
|
GST_SECOND * fps_d, fps_n * vidsize);
|
|
|
|
res = TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_video_encoded_video_convert (gint64 bytes, gint64 time,
|
|
|
|
GstFormat src_format, gint64 src_value, GstFormat * dest_format,
|
|
|
|
gint64 * dest_value)
|
|
|
|
{
|
|
|
|
gboolean res = FALSE;
|
|
|
|
|
|
|
|
g_return_val_if_fail (dest_format != NULL, FALSE);
|
|
|
|
g_return_val_if_fail (dest_value != NULL, FALSE);
|
|
|
|
|
|
|
|
if (G_UNLIKELY (src_format == *dest_format || src_value == 0 ||
|
|
|
|
src_value == -1)) {
|
|
|
|
if (dest_value)
|
|
|
|
*dest_value = src_value;
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (bytes <= 0 || time <= 0) {
|
|
|
|
GST_DEBUG ("not enough metadata yet to convert");
|
|
|
|
goto exit;
|
|
|
|
}
|
|
|
|
|
|
|
|
switch (src_format) {
|
|
|
|
case GST_FORMAT_BYTES:
|
|
|
|
switch (*dest_format) {
|
|
|
|
case GST_FORMAT_TIME:
|
|
|
|
*dest_value = gst_util_uint64_scale (src_value, time, bytes);
|
|
|
|
res = TRUE;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
res = FALSE;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
case GST_FORMAT_TIME:
|
|
|
|
switch (*dest_format) {
|
|
|
|
case GST_FORMAT_BYTES:
|
|
|
|
*dest_value = gst_util_uint64_scale (src_value, bytes, time);
|
|
|
|
res = TRUE;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
res = FALSE;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
GST_DEBUG ("unhandled conversion from %d to %d", src_format,
|
|
|
|
*dest_format);
|
|
|
|
res = FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
exit:
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstVideoCodecState *
|
|
|
|
_new_input_state (GstCaps * caps)
|
|
|
|
{
|
|
|
|
GstVideoCodecState *state;
|
|
|
|
GstStructure *structure;
|
|
|
|
const GValue *codec_data;
|
|
|
|
|
|
|
|
state = g_slice_new0 (GstVideoCodecState);
|
|
|
|
state->ref_count = 1;
|
|
|
|
gst_video_info_init (&state->info);
|
|
|
|
if (G_UNLIKELY (!gst_video_info_from_caps (&state->info, caps)))
|
|
|
|
goto parse_fail;
|
|
|
|
state->caps = gst_caps_ref (caps);
|
|
|
|
|
|
|
|
structure = gst_caps_get_structure (caps, 0);
|
|
|
|
|
|
|
|
codec_data = gst_structure_get_value (structure, "codec_data");
|
|
|
|
if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER)
|
2012-04-24 17:35:24 +00:00
|
|
|
state->codec_data = GST_BUFFER (g_value_dup_boxed (codec_data));
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
return state;
|
|
|
|
|
|
|
|
parse_fail:
|
|
|
|
{
|
|
|
|
g_slice_free (GstVideoCodecState, state);
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstVideoCodecState *
|
|
|
|
_new_output_state (GstVideoFormat fmt, guint width, guint height,
|
|
|
|
GstVideoCodecState * reference)
|
|
|
|
{
|
|
|
|
GstVideoCodecState *state;
|
|
|
|
|
|
|
|
state = g_slice_new0 (GstVideoCodecState);
|
|
|
|
state->ref_count = 1;
|
|
|
|
gst_video_info_init (&state->info);
|
|
|
|
gst_video_info_set_format (&state->info, fmt, width, height);
|
|
|
|
|
|
|
|
if (reference) {
|
|
|
|
GstVideoInfo *tgt, *ref;
|
|
|
|
|
|
|
|
tgt = &state->info;
|
|
|
|
ref = &reference->info;
|
|
|
|
|
|
|
|
/* Copy over extra fields from reference state */
|
|
|
|
tgt->interlace_mode = ref->interlace_mode;
|
|
|
|
tgt->flags = ref->flags;
|
|
|
|
tgt->chroma_site = ref->chroma_site;
|
2012-06-04 16:17:42 +00:00
|
|
|
/* only copy values that are not unknown so that we don't override the
|
|
|
|
* defaults. subclasses should really fill these in when they know. */
|
|
|
|
if (ref->colorimetry.range)
|
|
|
|
tgt->colorimetry.range = ref->colorimetry.range;
|
|
|
|
if (ref->colorimetry.matrix)
|
|
|
|
tgt->colorimetry.matrix = ref->colorimetry.matrix;
|
|
|
|
if (ref->colorimetry.transfer)
|
|
|
|
tgt->colorimetry.transfer = ref->colorimetry.transfer;
|
|
|
|
if (ref->colorimetry.primaries)
|
|
|
|
tgt->colorimetry.primaries = ref->colorimetry.primaries;
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_DEBUG ("reference par %d/%d fps %d/%d",
|
|
|
|
ref->par_n, ref->par_d, ref->fps_n, ref->fps_d);
|
|
|
|
tgt->par_n = ref->par_n;
|
|
|
|
tgt->par_d = ref->par_d;
|
|
|
|
tgt->fps_n = ref->fps_n;
|
|
|
|
tgt->fps_d = ref->fps_d;
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_DEBUG ("reference par %d/%d fps %d/%d",
|
|
|
|
state->info.par_n, state->info.par_d,
|
|
|
|
state->info.fps_n, state->info.fps_d);
|
|
|
|
|
|
|
|
return state;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_video_decoder_setcaps (GstVideoDecoder * decoder, GstCaps * caps)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderClass *decoder_class;
|
|
|
|
GstVideoCodecState *state;
|
|
|
|
gboolean ret = TRUE;
|
|
|
|
|
|
|
|
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "setcaps %" GST_PTR_FORMAT, caps);
|
|
|
|
|
2013-04-10 18:07:00 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
|
|
|
|
if (decoder->priv->input_state) {
|
|
|
|
GST_DEBUG_OBJECT (decoder,
|
|
|
|
"Checking if caps changed old %" GST_PTR_FORMAT " new %" GST_PTR_FORMAT,
|
|
|
|
decoder->priv->input_state->caps, caps);
|
|
|
|
if (gst_caps_is_equal (decoder->priv->input_state->caps, caps))
|
|
|
|
goto caps_not_changed;
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
state = _new_input_state (caps);
|
|
|
|
|
|
|
|
if (G_UNLIKELY (state == NULL))
|
|
|
|
goto parse_fail;
|
|
|
|
|
|
|
|
if (decoder_class->set_format)
|
|
|
|
ret = decoder_class->set_format (decoder, state);
|
|
|
|
|
|
|
|
if (!ret)
|
|
|
|
goto refused_format;
|
|
|
|
|
|
|
|
if (decoder->priv->input_state)
|
|
|
|
gst_video_codec_state_unref (decoder->priv->input_state);
|
|
|
|
decoder->priv->input_state = state;
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
|
2013-04-10 18:07:00 +00:00
|
|
|
caps_not_changed:
|
|
|
|
{
|
|
|
|
GST_DEBUG_OBJECT (decoder, "Caps did not change - ignore");
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
return TRUE;
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2013-04-10 18:07:00 +00:00
|
|
|
/* ERRORS */
|
2012-03-07 09:18:49 +00:00
|
|
|
parse_fail:
|
|
|
|
{
|
|
|
|
GST_WARNING_OBJECT (decoder, "Failed to parse caps");
|
2013-04-10 18:07:00 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
2012-03-07 09:18:49 +00:00
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
refused_format:
|
|
|
|
{
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
GST_WARNING_OBJECT (decoder, "Subclass refused caps");
|
|
|
|
gst_video_codec_state_unref (state);
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_video_decoder_finalize (GObject * object)
|
|
|
|
{
|
|
|
|
GstVideoDecoder *decoder;
|
|
|
|
|
|
|
|
decoder = GST_VIDEO_DECODER (object);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (object, "finalize");
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
g_rec_mutex_clear (&decoder->stream_lock);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
if (decoder->priv->input_adapter) {
|
|
|
|
g_object_unref (decoder->priv->input_adapter);
|
|
|
|
decoder->priv->input_adapter = NULL;
|
|
|
|
}
|
|
|
|
if (decoder->priv->output_adapter) {
|
|
|
|
g_object_unref (decoder->priv->output_adapter);
|
|
|
|
decoder->priv->output_adapter = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (decoder->priv->input_state)
|
|
|
|
gst_video_codec_state_unref (decoder->priv->input_state);
|
|
|
|
if (decoder->priv->output_state)
|
|
|
|
gst_video_codec_state_unref (decoder->priv->output_state);
|
|
|
|
|
2012-04-24 18:04:48 +00:00
|
|
|
if (decoder->priv->pool) {
|
2012-04-26 16:11:08 +00:00
|
|
|
gst_object_unref (decoder->priv->pool);
|
|
|
|
decoder->priv->pool = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (decoder->priv->allocator) {
|
2012-07-09 14:26:17 +00:00
|
|
|
gst_object_unref (decoder->priv->allocator);
|
2012-07-04 07:14:27 +00:00
|
|
|
decoder->priv->allocator = NULL;
|
2012-04-24 18:04:48 +00:00
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
G_OBJECT_CLASS (parent_class)->finalize (object);
|
|
|
|
}
|
|
|
|
|
|
|
|
/* hard == FLUSH, otherwise discont */
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_video_decoder_flush (GstVideoDecoder * dec, gboolean hard)
|
|
|
|
{
|
2013-08-14 14:55:55 +00:00
|
|
|
GstVideoDecoderClass *klass = GST_VIDEO_DECODER_GET_CLASS (dec);
|
2012-03-07 09:18:49 +00:00
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (dec, "flush hard %d", hard);
|
|
|
|
|
2013-08-14 14:55:55 +00:00
|
|
|
/* Inform subclass */
|
|
|
|
if (klass->reset) {
|
|
|
|
GST_FIXME_OBJECT (dec, "GstVideoDecoder::reset() is deprecated");
|
|
|
|
klass->reset (dec, hard);
|
|
|
|
}
|
|
|
|
|
2013-08-15 12:15:05 +00:00
|
|
|
if (klass->flush)
|
2013-08-15 10:44:56 +00:00
|
|
|
klass->flush (dec);
|
2013-07-25 08:20:01 +00:00
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* and get (re)set for the sequel */
|
2013-08-15 12:15:05 +00:00
|
|
|
gst_video_decoder_reset (dec, FALSE, hard);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2012-04-24 17:51:30 +00:00
|
|
|
static gboolean
|
|
|
|
gst_video_decoder_push_event (GstVideoDecoder * decoder, GstEvent * event)
|
|
|
|
{
|
|
|
|
switch (GST_EVENT_TYPE (event)) {
|
|
|
|
case GST_EVENT_SEGMENT:
|
|
|
|
{
|
|
|
|
GstSegment segment;
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
|
|
|
|
gst_event_copy_segment (event, &segment);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "segment %" GST_SEGMENT_FORMAT, &segment);
|
|
|
|
|
|
|
|
if (segment.format != GST_FORMAT_TIME) {
|
|
|
|
GST_DEBUG_OBJECT (decoder, "received non TIME newsegment");
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
decoder->output_segment = segment;
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return gst_pad_push_event (decoder->srcpad, event);
|
|
|
|
}
|
|
|
|
|
2013-01-02 11:15:25 +00:00
|
|
|
static GstFlowReturn
|
2013-03-31 16:29:07 +00:00
|
|
|
gst_video_decoder_parse_available (GstVideoDecoder * dec, gboolean at_eos,
|
|
|
|
gboolean new_buffer)
|
2013-01-02 11:15:25 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_GET_CLASS (dec);
|
|
|
|
GstVideoDecoderPrivate *priv = dec->priv;
|
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
gsize start_size, available;
|
|
|
|
|
|
|
|
available = gst_adapter_available (priv->input_adapter);
|
|
|
|
start_size = 0;
|
|
|
|
|
2013-03-31 16:29:07 +00:00
|
|
|
while (ret == GST_FLOW_OK && ((available && start_size != available)
|
|
|
|
|| new_buffer)) {
|
|
|
|
new_buffer = FALSE;
|
2013-01-02 11:15:25 +00:00
|
|
|
/* current frame may have been parsed and handled,
|
|
|
|
* so we need to set up a new one when asking subclass to parse */
|
|
|
|
if (priv->current_frame == NULL)
|
|
|
|
priv->current_frame = gst_video_decoder_new_frame (dec);
|
|
|
|
|
|
|
|
start_size = available;
|
|
|
|
ret = decoder_class->parse (dec, priv->current_frame,
|
|
|
|
priv->input_adapter, at_eos);
|
|
|
|
available = gst_adapter_available (priv->input_adapter);
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2012-06-19 14:46:05 +00:00
|
|
|
static GstFlowReturn
|
2012-09-03 06:19:09 +00:00
|
|
|
gst_video_decoder_drain_out (GstVideoDecoder * dec, gboolean at_eos)
|
2012-06-19 14:46:05 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderClass *decoder_class = GST_VIDEO_DECODER_GET_CLASS (dec);
|
|
|
|
GstVideoDecoderPrivate *priv = dec->priv;
|
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (dec);
|
|
|
|
|
|
|
|
if (dec->input_segment.rate > 0.0) {
|
|
|
|
/* Forward mode, if unpacketized, give the child class
|
|
|
|
* a final chance to flush out packets */
|
|
|
|
if (!priv->packetized) {
|
2013-03-31 16:29:07 +00:00
|
|
|
ret = gst_video_decoder_parse_available (dec, TRUE, FALSE);
|
2012-06-19 14:46:05 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
/* Reverse playback mode */
|
|
|
|
ret = gst_video_decoder_flush_parse (dec, TRUE);
|
|
|
|
}
|
|
|
|
|
2012-09-03 06:19:09 +00:00
|
|
|
if (at_eos) {
|
|
|
|
if (decoder_class->finish)
|
|
|
|
ret = decoder_class->finish (dec);
|
|
|
|
}
|
2012-06-19 14:46:05 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (dec);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
static gboolean
|
2012-04-24 17:51:30 +00:00
|
|
|
gst_video_decoder_sink_event_default (GstVideoDecoder * decoder,
|
|
|
|
GstEvent * event)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv;
|
2012-04-24 17:51:30 +00:00
|
|
|
gboolean ret = FALSE;
|
2012-09-03 06:19:09 +00:00
|
|
|
gboolean forward_immediate = FALSE;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
priv = decoder->priv;
|
|
|
|
|
|
|
|
switch (GST_EVENT_TYPE (event)) {
|
2013-03-30 18:13:47 +00:00
|
|
|
case GST_EVENT_STREAM_START:
|
|
|
|
{
|
|
|
|
GstFlowReturn flow_ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
flow_ret = gst_video_decoder_drain_out (decoder, FALSE);
|
|
|
|
ret = (flow_ret == GST_FLOW_OK);
|
|
|
|
|
2013-07-30 21:37:43 +00:00
|
|
|
GST_DEBUG_OBJECT (decoder, "received STREAM_START. Clearing taglist");
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
/* Flush our merged taglist after a STREAM_START */
|
|
|
|
if (priv->tags)
|
|
|
|
gst_tag_list_unref (priv->tags);
|
|
|
|
priv->tags = NULL;
|
|
|
|
priv->tags_changed = FALSE;
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
2013-03-30 18:13:47 +00:00
|
|
|
/* Forward STREAM_START immediately. Everything is drained after
|
|
|
|
* the STREAM_START event and we can forward this event immediately
|
|
|
|
* now without having buffers out of order.
|
|
|
|
*/
|
|
|
|
forward_immediate = TRUE;
|
|
|
|
break;
|
|
|
|
}
|
2012-04-24 17:35:24 +00:00
|
|
|
case GST_EVENT_CAPS:
|
|
|
|
{
|
|
|
|
GstCaps *caps;
|
|
|
|
|
|
|
|
gst_event_parse_caps (event, &caps);
|
2012-07-26 12:28:26 +00:00
|
|
|
ret = TRUE;
|
|
|
|
decoder->priv->do_caps = TRUE;
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_event_unref (event);
|
2012-04-24 17:51:30 +00:00
|
|
|
event = NULL;
|
2012-04-24 17:35:24 +00:00
|
|
|
break;
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
case GST_EVENT_EOS:
|
|
|
|
{
|
|
|
|
GstFlowReturn flow_ret = GST_FLOW_OK;
|
|
|
|
|
2012-09-03 06:19:09 +00:00
|
|
|
flow_ret = gst_video_decoder_drain_out (decoder, TRUE);
|
2012-04-24 17:51:30 +00:00
|
|
|
ret = (flow_ret == GST_FLOW_OK);
|
2012-09-20 08:03:32 +00:00
|
|
|
/* Forward EOS immediately. This is required because no
|
|
|
|
* buffer or serialized event will come after EOS and
|
|
|
|
* nothing could trigger another _finish_frame() call.
|
|
|
|
*
|
|
|
|
* The subclass can override this behaviour by overriding
|
|
|
|
* the ::sink_event() vfunc and not chaining up to the
|
|
|
|
* parent class' ::sink_event() until a later time.
|
|
|
|
*/
|
2012-09-03 06:19:09 +00:00
|
|
|
forward_immediate = TRUE;
|
2012-03-07 09:18:49 +00:00
|
|
|
break;
|
|
|
|
}
|
2012-09-11 01:44:56 +00:00
|
|
|
case GST_EVENT_GAP:
|
|
|
|
{
|
|
|
|
GstFlowReturn flow_ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
flow_ret = gst_video_decoder_drain_out (decoder, FALSE);
|
|
|
|
ret = (flow_ret == GST_FLOW_OK);
|
2012-09-20 08:03:32 +00:00
|
|
|
|
|
|
|
/* Forward GAP immediately. Everything is drained after
|
|
|
|
* the GAP event and we can forward this event immediately
|
|
|
|
* now without having buffers out of order.
|
|
|
|
*/
|
2012-09-11 01:44:56 +00:00
|
|
|
forward_immediate = TRUE;
|
|
|
|
break;
|
|
|
|
}
|
2012-09-03 06:19:09 +00:00
|
|
|
case GST_EVENT_CUSTOM_DOWNSTREAM:
|
2012-09-03 06:19:09 +00:00
|
|
|
{
|
|
|
|
gboolean in_still;
|
|
|
|
GstFlowReturn flow_ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
if (gst_video_event_parse_still_frame (event, &in_still)) {
|
|
|
|
if (in_still) {
|
2012-09-03 06:19:09 +00:00
|
|
|
GST_DEBUG_OBJECT (decoder, "draining current data for still-frame");
|
2012-09-03 06:19:09 +00:00
|
|
|
flow_ret = gst_video_decoder_drain_out (decoder, FALSE);
|
|
|
|
ret = (flow_ret == GST_FLOW_OK);
|
|
|
|
}
|
2012-09-20 08:03:32 +00:00
|
|
|
/* Forward STILL_FRAME immediately. Everything is drained after
|
|
|
|
* the STILL_FRAME event and we can forward this event immediately
|
|
|
|
* now without having buffers out of order.
|
|
|
|
*/
|
2012-09-03 06:19:09 +00:00
|
|
|
forward_immediate = TRUE;
|
2012-09-03 06:19:09 +00:00
|
|
|
}
|
2012-09-03 06:19:09 +00:00
|
|
|
break;
|
2012-09-03 06:19:09 +00:00
|
|
|
}
|
2012-04-24 17:35:24 +00:00
|
|
|
case GST_EVENT_SEGMENT:
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
2012-04-24 17:35:24 +00:00
|
|
|
GstSegment segment;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_event_copy_segment (event, &segment);
|
|
|
|
|
|
|
|
if (segment.format == GST_FORMAT_TIME) {
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_DEBUG_OBJECT (decoder,
|
2012-04-24 17:35:24 +00:00
|
|
|
"received TIME SEGMENT %" GST_SEGMENT_FORMAT, &segment);
|
2012-03-07 09:18:49 +00:00
|
|
|
} else {
|
2012-04-24 17:35:24 +00:00
|
|
|
gint64 start;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder,
|
2012-04-24 17:35:24 +00:00
|
|
|
"received SEGMENT %" GST_SEGMENT_FORMAT, &segment);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* handle newsegment as a result from our legacy simple seeking */
|
|
|
|
/* note that initial 0 should convert to 0 in any case */
|
|
|
|
if (priv->do_estimate_rate &&
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_pad_query_convert (decoder->sinkpad, GST_FORMAT_BYTES,
|
|
|
|
segment.start, GST_FORMAT_TIME, &start)) {
|
2012-03-07 09:18:49 +00:00
|
|
|
/* best attempt convert */
|
|
|
|
/* as these are only estimates, stop is kept open-ended to avoid
|
|
|
|
* premature cutting */
|
|
|
|
GST_DEBUG_OBJECT (decoder,
|
|
|
|
"converted to TIME start %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (start));
|
2012-04-24 17:35:24 +00:00
|
|
|
segment.start = start;
|
|
|
|
segment.stop = GST_CLOCK_TIME_NONE;
|
|
|
|
segment.time = start;
|
2012-03-07 09:18:49 +00:00
|
|
|
/* replace event */
|
|
|
|
gst_event_unref (event);
|
2012-04-24 17:35:24 +00:00
|
|
|
event = gst_event_new_segment (&segment);
|
2012-03-07 09:18:49 +00:00
|
|
|
} else {
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
goto newseg_wrong_format;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2012-06-19 14:36:38 +00:00
|
|
|
priv->base_timestamp = GST_CLOCK_TIME_NONE;
|
|
|
|
priv->base_picture_number = 0;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
decoder->input_segment = segment;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_EVENT_FLUSH_STOP:
|
|
|
|
{
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
/* well, this is kind of worse than a DISCONT */
|
|
|
|
gst_video_decoder_flush (decoder, TRUE);
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
2012-09-20 08:03:32 +00:00
|
|
|
/* Forward FLUSH_STOP immediately. This is required because it is
|
|
|
|
* expected to be forwarded immediately and no buffers are queued
|
|
|
|
* anyway.
|
|
|
|
*/
|
2012-09-03 06:19:09 +00:00
|
|
|
forward_immediate = TRUE;
|
2012-08-09 18:57:49 +00:00
|
|
|
break;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
2012-08-09 14:19:32 +00:00
|
|
|
case GST_EVENT_TAG:
|
|
|
|
{
|
|
|
|
GstTagList *tags;
|
|
|
|
|
|
|
|
gst_event_parse_tag (event, &tags);
|
|
|
|
|
|
|
|
if (gst_tag_list_get_scope (tags) == GST_TAG_SCOPE_STREAM) {
|
|
|
|
gst_video_decoder_merge_tags (decoder, tags, GST_TAG_MERGE_REPLACE);
|
|
|
|
gst_event_unref (event);
|
|
|
|
event = NULL;
|
|
|
|
ret = TRUE;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2012-09-20 08:03:32 +00:00
|
|
|
/* Forward non-serialized events immediately, and all other
|
|
|
|
* events which can be forwarded immediately without potentially
|
|
|
|
* causing the event to go out of order with other events and
|
|
|
|
* buffers as decided above.
|
2012-04-24 17:51:30 +00:00
|
|
|
*/
|
|
|
|
if (event) {
|
2012-09-03 06:19:09 +00:00
|
|
|
if (!GST_EVENT_IS_SERIALIZED (event) || forward_immediate) {
|
2012-04-24 17:51:30 +00:00
|
|
|
ret = gst_video_decoder_push_event (decoder, event);
|
|
|
|
} else {
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
decoder->priv->current_frame_events =
|
|
|
|
g_list_prepend (decoder->priv->current_frame_events, event);
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
2012-04-24 19:32:08 +00:00
|
|
|
ret = TRUE;
|
2012-04-24 17:51:30 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
newseg_wrong_format:
|
|
|
|
{
|
|
|
|
GST_DEBUG_OBJECT (decoder, "received non TIME newsegment");
|
|
|
|
gst_event_unref (event);
|
|
|
|
/* SWALLOW EVENT */
|
2012-04-24 19:32:08 +00:00
|
|
|
return TRUE;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_video_decoder_sink_event (GstPad * pad, GstObject * parent,
|
|
|
|
GstEvent * event)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoder *decoder;
|
|
|
|
GstVideoDecoderClass *decoder_class;
|
|
|
|
gboolean ret = FALSE;
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
decoder = GST_VIDEO_DECODER (parent);
|
2012-03-07 09:18:49 +00:00
|
|
|
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "received event %d, %s", GST_EVENT_TYPE (event),
|
|
|
|
GST_EVENT_TYPE_NAME (event));
|
|
|
|
|
|
|
|
if (decoder_class->sink_event)
|
2012-04-24 17:51:30 +00:00
|
|
|
ret = decoder_class->sink_event (decoder, event);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* perform upstream byte <-> time conversion (duration, seeking)
|
|
|
|
* if subclass allows and if enough data for moderately decent conversion */
|
|
|
|
static inline gboolean
|
|
|
|
gst_video_decoder_do_byte (GstVideoDecoder * dec)
|
|
|
|
{
|
|
|
|
return dec->priv->do_estimate_rate && (dec->priv->bytes_out > 0)
|
|
|
|
&& (dec->priv->time > GST_SECOND);
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
|
|
|
gst_video_decoder_do_seek (GstVideoDecoder * dec, GstEvent * event)
|
|
|
|
{
|
2012-04-24 17:35:24 +00:00
|
|
|
GstFormat format;
|
2012-03-07 09:18:49 +00:00
|
|
|
GstSeekFlags flags;
|
|
|
|
GstSeekType start_type, end_type;
|
|
|
|
gdouble rate;
|
|
|
|
gint64 start, start_time, end_time;
|
|
|
|
GstSegment seek_segment;
|
|
|
|
guint32 seqnum;
|
|
|
|
|
|
|
|
gst_event_parse_seek (event, &rate, &format, &flags, &start_type,
|
|
|
|
&start_time, &end_type, &end_time);
|
|
|
|
|
|
|
|
/* we'll handle plain open-ended flushing seeks with the simple approach */
|
|
|
|
if (rate != 1.0) {
|
|
|
|
GST_DEBUG_OBJECT (dec, "unsupported seek: rate");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (start_type != GST_SEEK_TYPE_SET) {
|
|
|
|
GST_DEBUG_OBJECT (dec, "unsupported seek: start time");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (end_type != GST_SEEK_TYPE_NONE ||
|
|
|
|
(end_type == GST_SEEK_TYPE_SET && end_time != GST_CLOCK_TIME_NONE)) {
|
|
|
|
GST_DEBUG_OBJECT (dec, "unsupported seek: end time");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!(flags & GST_SEEK_FLAG_FLUSH)) {
|
|
|
|
GST_DEBUG_OBJECT (dec, "unsupported seek: not flushing");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
memcpy (&seek_segment, &dec->output_segment, sizeof (seek_segment));
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_segment_do_seek (&seek_segment, rate, format, flags, start_type,
|
2012-03-07 09:18:49 +00:00
|
|
|
start_time, end_type, end_time, NULL);
|
2012-04-24 17:35:24 +00:00
|
|
|
start_time = seek_segment.position;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
if (!gst_pad_query_convert (dec->sinkpad, GST_FORMAT_TIME, start_time,
|
2012-04-24 17:35:24 +00:00
|
|
|
GST_FORMAT_BYTES, &start)) {
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_DEBUG_OBJECT (dec, "conversion failed");
|
|
|
|
return FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
seqnum = gst_event_get_seqnum (event);
|
|
|
|
event = gst_event_new_seek (1.0, GST_FORMAT_BYTES, flags,
|
|
|
|
GST_SEEK_TYPE_SET, start, GST_SEEK_TYPE_NONE, -1);
|
|
|
|
gst_event_set_seqnum (event, seqnum);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (dec, "seeking to %" GST_TIME_FORMAT " at byte offset %"
|
|
|
|
G_GINT64_FORMAT, GST_TIME_ARGS (start_time), start);
|
|
|
|
|
|
|
|
return gst_pad_push_event (dec->sinkpad, event);
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-04-24 17:51:30 +00:00
|
|
|
gst_video_decoder_src_event_default (GstVideoDecoder * decoder,
|
|
|
|
GstEvent * event)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv;
|
|
|
|
gboolean res = FALSE;
|
|
|
|
|
|
|
|
priv = decoder->priv;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder,
|
|
|
|
"received event %d, %s", GST_EVENT_TYPE (event),
|
|
|
|
GST_EVENT_TYPE_NAME (event));
|
|
|
|
|
|
|
|
switch (GST_EVENT_TYPE (event)) {
|
|
|
|
case GST_EVENT_SEEK:
|
|
|
|
{
|
2012-04-24 17:35:24 +00:00
|
|
|
GstFormat format;
|
2012-03-07 09:18:49 +00:00
|
|
|
gdouble rate;
|
|
|
|
GstSeekFlags flags;
|
2012-07-27 13:21:51 +00:00
|
|
|
GstSeekType start_type, stop_type;
|
|
|
|
gint64 start, stop;
|
|
|
|
gint64 tstart, tstop;
|
2012-03-07 09:18:49 +00:00
|
|
|
guint32 seqnum;
|
|
|
|
|
2012-07-27 13:21:51 +00:00
|
|
|
gst_event_parse_seek (event, &rate, &format, &flags, &start_type, &start,
|
2012-03-07 09:18:49 +00:00
|
|
|
&stop_type, &stop);
|
|
|
|
seqnum = gst_event_get_seqnum (event);
|
|
|
|
|
|
|
|
/* upstream gets a chance first */
|
|
|
|
if ((res = gst_pad_push_event (decoder->sinkpad, event)))
|
|
|
|
break;
|
|
|
|
|
|
|
|
/* if upstream fails for a time seek, maybe we can help if allowed */
|
|
|
|
if (format == GST_FORMAT_TIME) {
|
|
|
|
if (gst_video_decoder_do_byte (decoder))
|
|
|
|
res = gst_video_decoder_do_seek (decoder, event);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* ... though a non-time seek can be aided as well */
|
|
|
|
/* First bring the requested format to time */
|
2012-04-24 17:35:24 +00:00
|
|
|
if (!(res =
|
2012-07-27 13:21:51 +00:00
|
|
|
gst_pad_query_convert (decoder->srcpad, format, start,
|
|
|
|
GST_FORMAT_TIME, &tstart)))
|
2012-03-07 09:18:49 +00:00
|
|
|
goto convert_error;
|
2012-04-24 17:35:24 +00:00
|
|
|
if (!(res =
|
2012-04-24 17:51:30 +00:00
|
|
|
gst_pad_query_convert (decoder->srcpad, format, stop,
|
|
|
|
GST_FORMAT_TIME, &tstop)))
|
2012-03-07 09:18:49 +00:00
|
|
|
goto convert_error;
|
|
|
|
|
|
|
|
/* then seek with time on the peer */
|
|
|
|
event = gst_event_new_seek (rate, GST_FORMAT_TIME,
|
2012-07-27 13:21:51 +00:00
|
|
|
flags, start_type, tstart, stop_type, tstop);
|
2012-03-07 09:18:49 +00:00
|
|
|
gst_event_set_seqnum (event, seqnum);
|
|
|
|
|
|
|
|
res = gst_pad_push_event (decoder->sinkpad, event);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_EVENT_QOS:
|
|
|
|
{
|
2012-04-24 17:35:24 +00:00
|
|
|
GstQOSType type;
|
2012-03-07 09:18:49 +00:00
|
|
|
gdouble proportion;
|
|
|
|
GstClockTimeDiff diff;
|
|
|
|
GstClockTime timestamp;
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_event_parse_qos (event, &type, &proportion, &diff, ×tamp);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_OBJECT_LOCK (decoder);
|
|
|
|
priv->proportion = proportion;
|
|
|
|
if (G_LIKELY (GST_CLOCK_TIME_IS_VALID (timestamp))) {
|
|
|
|
if (G_UNLIKELY (diff > 0)) {
|
2012-09-24 09:16:09 +00:00
|
|
|
priv->earliest_time = timestamp + 2 * diff + priv->qos_frame_duration;
|
2012-03-07 09:18:49 +00:00
|
|
|
} else {
|
|
|
|
priv->earliest_time = timestamp + diff;
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
priv->earliest_time = GST_CLOCK_TIME_NONE;
|
|
|
|
}
|
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder,
|
|
|
|
"got QoS %" GST_TIME_FORMAT ", %" G_GINT64_FORMAT ", %g",
|
|
|
|
GST_TIME_ARGS (timestamp), diff, proportion);
|
|
|
|
|
|
|
|
res = gst_pad_push_event (decoder->sinkpad, event);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
res = gst_pad_push_event (decoder->sinkpad, event);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
done:
|
|
|
|
return res;
|
|
|
|
|
|
|
|
convert_error:
|
|
|
|
GST_DEBUG_OBJECT (decoder, "could not convert format");
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
|
2012-04-24 17:51:30 +00:00
|
|
|
static gboolean
|
|
|
|
gst_video_decoder_src_event (GstPad * pad, GstObject * parent, GstEvent * event)
|
|
|
|
{
|
|
|
|
GstVideoDecoder *decoder;
|
|
|
|
GstVideoDecoderClass *decoder_class;
|
|
|
|
gboolean ret = FALSE;
|
|
|
|
|
|
|
|
decoder = GST_VIDEO_DECODER (parent);
|
|
|
|
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "received event %d, %s", GST_EVENT_TYPE (event),
|
|
|
|
GST_EVENT_TYPE_NAME (event));
|
|
|
|
|
|
|
|
if (decoder_class->src_event)
|
|
|
|
ret = decoder_class->src_event (decoder, event);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
static gboolean
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_video_decoder_src_query (GstPad * pad, GstObject * parent, GstQuery * query)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoder *dec;
|
|
|
|
gboolean res = TRUE;
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
dec = GST_VIDEO_DECODER (parent);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_LOG_OBJECT (dec, "handling query: %" GST_PTR_FORMAT, query);
|
|
|
|
|
|
|
|
switch (GST_QUERY_TYPE (query)) {
|
|
|
|
case GST_QUERY_POSITION:
|
|
|
|
{
|
|
|
|
GstFormat format;
|
|
|
|
gint64 time, value;
|
|
|
|
|
|
|
|
/* upstream gets a chance first */
|
|
|
|
if ((res = gst_pad_peer_query (dec->sinkpad, query))) {
|
|
|
|
GST_LOG_OBJECT (dec, "returning peer response");
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* we start from the last seen time */
|
2012-06-19 14:36:38 +00:00
|
|
|
time = dec->priv->last_timestamp_out;
|
2012-03-07 09:18:49 +00:00
|
|
|
/* correct for the segment values */
|
|
|
|
time = gst_segment_to_stream_time (&dec->output_segment,
|
|
|
|
GST_FORMAT_TIME, time);
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (dec,
|
|
|
|
"query %p: our time: %" GST_TIME_FORMAT, query, GST_TIME_ARGS (time));
|
|
|
|
|
|
|
|
/* and convert to the final format */
|
|
|
|
gst_query_parse_position (query, &format, NULL);
|
|
|
|
if (!(res = gst_pad_query_convert (pad, GST_FORMAT_TIME, time,
|
2012-04-24 17:35:24 +00:00
|
|
|
format, &value)))
|
2012-03-07 09:18:49 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
gst_query_set_position (query, format, value);
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (dec,
|
|
|
|
"query %p: we return %" G_GINT64_FORMAT " (format %u)", query, value,
|
|
|
|
format);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_QUERY_DURATION:
|
|
|
|
{
|
|
|
|
GstFormat format;
|
|
|
|
|
|
|
|
/* upstream in any case */
|
2012-04-24 17:35:24 +00:00
|
|
|
if ((res = gst_pad_query_default (pad, parent, query)))
|
2012-03-07 09:18:49 +00:00
|
|
|
break;
|
|
|
|
|
|
|
|
gst_query_parse_duration (query, &format, NULL);
|
|
|
|
/* try answering TIME by converting from BYTE if subclass allows */
|
|
|
|
if (format == GST_FORMAT_TIME && gst_video_decoder_do_byte (dec)) {
|
|
|
|
gint64 value;
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
if (gst_pad_peer_query_duration (dec->sinkpad, GST_FORMAT_BYTES,
|
|
|
|
&value)) {
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_LOG_OBJECT (dec, "upstream size %" G_GINT64_FORMAT, value);
|
|
|
|
if (gst_pad_query_convert (dec->sinkpad,
|
2012-04-24 17:35:24 +00:00
|
|
|
GST_FORMAT_BYTES, value, GST_FORMAT_TIME, &value)) {
|
2012-03-07 09:18:49 +00:00
|
|
|
gst_query_set_duration (query, GST_FORMAT_TIME, value);
|
|
|
|
res = TRUE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_QUERY_CONVERT:
|
|
|
|
{
|
|
|
|
GstFormat src_fmt, dest_fmt;
|
|
|
|
gint64 src_val, dest_val;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (dec, "convert query");
|
|
|
|
|
|
|
|
gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
|
2012-09-29 00:07:43 +00:00
|
|
|
GST_OBJECT_LOCK (dec);
|
2012-09-20 01:16:01 +00:00
|
|
|
if (dec->priv->output_state != NULL)
|
|
|
|
res = gst_video_rawvideo_convert (dec->priv->output_state,
|
|
|
|
src_fmt, src_val, &dest_fmt, &dest_val);
|
|
|
|
else
|
|
|
|
res = FALSE;
|
2012-09-29 00:07:43 +00:00
|
|
|
GST_OBJECT_UNLOCK (dec);
|
2012-03-07 09:18:49 +00:00
|
|
|
if (!res)
|
|
|
|
goto error;
|
|
|
|
gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
case GST_QUERY_LATENCY:
|
|
|
|
{
|
|
|
|
gboolean live;
|
|
|
|
GstClockTime min_latency, max_latency;
|
|
|
|
|
|
|
|
res = gst_pad_peer_query (dec->sinkpad, query);
|
|
|
|
if (res) {
|
|
|
|
gst_query_parse_latency (query, &live, &min_latency, &max_latency);
|
2012-09-20 01:16:01 +00:00
|
|
|
GST_DEBUG_OBJECT (dec, "Peer qlatency: live %d, min %"
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_TIME_FORMAT " max %" GST_TIME_FORMAT, live,
|
|
|
|
GST_TIME_ARGS (min_latency), GST_TIME_ARGS (max_latency));
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (dec);
|
|
|
|
min_latency += dec->priv->min_latency;
|
2012-04-25 11:52:51 +00:00
|
|
|
if (dec->priv->max_latency == GST_CLOCK_TIME_NONE) {
|
|
|
|
max_latency = GST_CLOCK_TIME_NONE;
|
|
|
|
} else if (max_latency != GST_CLOCK_TIME_NONE) {
|
2012-03-07 09:18:49 +00:00
|
|
|
max_latency += dec->priv->max_latency;
|
|
|
|
}
|
|
|
|
GST_OBJECT_UNLOCK (dec);
|
|
|
|
|
|
|
|
gst_query_set_latency (query, live, min_latency, max_latency);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
default:
|
2012-04-24 17:35:24 +00:00
|
|
|
res = gst_pad_query_default (pad, parent, query);
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
return res;
|
|
|
|
|
|
|
|
error:
|
|
|
|
GST_ERROR_OBJECT (dec, "query failed");
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static gboolean
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_video_decoder_sink_query (GstPad * pad, GstObject * parent,
|
|
|
|
GstQuery * query)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoder *decoder;
|
|
|
|
GstVideoDecoderPrivate *priv;
|
|
|
|
gboolean res = FALSE;
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
decoder = GST_VIDEO_DECODER (parent);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv = decoder->priv;
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder, "handling query: %" GST_PTR_FORMAT, query);
|
|
|
|
|
|
|
|
switch (GST_QUERY_TYPE (query)) {
|
|
|
|
case GST_QUERY_CONVERT:
|
|
|
|
{
|
|
|
|
GstFormat src_fmt, dest_fmt;
|
|
|
|
gint64 src_val, dest_val;
|
|
|
|
|
|
|
|
gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
|
|
|
|
res =
|
|
|
|
gst_video_encoded_video_convert (priv->bytes_out, priv->time, src_fmt,
|
|
|
|
src_val, &dest_fmt, &dest_val);
|
|
|
|
if (!res)
|
|
|
|
goto error;
|
|
|
|
gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
|
|
|
|
break;
|
|
|
|
}
|
2012-06-15 14:06:12 +00:00
|
|
|
case GST_QUERY_ALLOCATION:{
|
|
|
|
GstVideoDecoderClass *klass = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
|
|
|
|
if (klass->propose_allocation)
|
|
|
|
res = klass->propose_allocation (decoder, query);
|
|
|
|
break;
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
default:
|
2012-04-24 17:35:24 +00:00
|
|
|
res = gst_pad_query_default (pad, parent, query);
|
2012-03-07 09:18:49 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
done:
|
|
|
|
|
|
|
|
return res;
|
|
|
|
error:
|
|
|
|
GST_DEBUG_OBJECT (decoder, "query failed");
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
|
|
|
|
typedef struct _Timestamp Timestamp;
|
|
|
|
struct _Timestamp
|
|
|
|
{
|
|
|
|
guint64 offset;
|
2012-06-27 11:48:58 +00:00
|
|
|
GstClockTime pts;
|
|
|
|
GstClockTime dts;
|
2012-03-07 09:18:49 +00:00
|
|
|
GstClockTime duration;
|
|
|
|
};
|
|
|
|
|
2012-06-27 14:42:10 +00:00
|
|
|
static void
|
|
|
|
timestamp_free (Timestamp * ts)
|
|
|
|
{
|
|
|
|
g_slice_free (Timestamp, ts);
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
static void
|
|
|
|
gst_video_decoder_add_timestamp (GstVideoDecoder * decoder, GstBuffer * buffer)
|
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
Timestamp *ts;
|
|
|
|
|
2012-06-27 14:42:10 +00:00
|
|
|
ts = g_slice_new (Timestamp);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder,
|
2012-06-27 11:48:58 +00:00
|
|
|
"adding PTS %" GST_TIME_FORMAT " DTS %" GST_TIME_FORMAT
|
|
|
|
" (offset:%" G_GUINT64_FORMAT ")",
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (buffer)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_DTS (buffer)), priv->input_offset);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
ts->offset = priv->input_offset;
|
2012-06-27 11:48:58 +00:00
|
|
|
ts->pts = GST_BUFFER_PTS (buffer);
|
|
|
|
ts->dts = GST_BUFFER_DTS (buffer);
|
2012-03-07 09:18:49 +00:00
|
|
|
ts->duration = GST_BUFFER_DURATION (buffer);
|
|
|
|
|
|
|
|
priv->timestamps = g_list_append (priv->timestamps, ts);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_video_decoder_get_timestamp_at_offset (GstVideoDecoder *
|
2012-06-27 11:48:58 +00:00
|
|
|
decoder, guint64 offset, GstClockTime * pts, GstClockTime * dts,
|
|
|
|
GstClockTime * duration)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
2012-06-19 13:43:27 +00:00
|
|
|
#ifndef GST_DISABLE_GST_DEBUG
|
|
|
|
guint64 got_offset = 0;
|
|
|
|
#endif
|
2012-03-07 09:18:49 +00:00
|
|
|
Timestamp *ts;
|
|
|
|
GList *g;
|
|
|
|
|
2012-06-27 11:48:58 +00:00
|
|
|
*pts = GST_CLOCK_TIME_NONE;
|
|
|
|
*dts = GST_CLOCK_TIME_NONE;
|
2012-03-07 09:18:49 +00:00
|
|
|
*duration = GST_CLOCK_TIME_NONE;
|
|
|
|
|
|
|
|
g = decoder->priv->timestamps;
|
|
|
|
while (g) {
|
|
|
|
ts = g->data;
|
|
|
|
if (ts->offset <= offset) {
|
2012-06-19 13:43:27 +00:00
|
|
|
#ifndef GST_DISABLE_GST_DEBUG
|
|
|
|
got_offset = ts->offset;
|
|
|
|
#endif
|
2012-06-27 11:48:58 +00:00
|
|
|
*pts = ts->pts;
|
|
|
|
*dts = ts->dts;
|
2012-03-07 09:18:49 +00:00
|
|
|
*duration = ts->duration;
|
2012-06-27 14:42:10 +00:00
|
|
|
timestamp_free (ts);
|
2012-03-07 09:18:49 +00:00
|
|
|
g = g->next;
|
|
|
|
decoder->priv->timestamps = g_list_remove (decoder->priv->timestamps, ts);
|
|
|
|
} else {
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder,
|
2012-06-27 11:48:58 +00:00
|
|
|
"got PTS %" GST_TIME_FORMAT " DTS %" GST_TIME_FORMAT " @ offs %"
|
|
|
|
G_GUINT64_FORMAT " (wanted offset:%" G_GUINT64_FORMAT ")",
|
|
|
|
GST_TIME_ARGS (*pts), GST_TIME_ARGS (*dts), got_offset, offset);
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_video_decoder_clear_queues (GstVideoDecoder * dec)
|
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = dec->priv;
|
|
|
|
|
2012-06-19 13:46:44 +00:00
|
|
|
g_list_free_full (priv->output_queued,
|
|
|
|
(GDestroyNotify) gst_mini_object_unref);
|
|
|
|
priv->output_queued = NULL;
|
|
|
|
|
2012-06-27 12:13:02 +00:00
|
|
|
g_list_free_full (priv->gather, (GDestroyNotify) gst_mini_object_unref);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->gather = NULL;
|
2012-06-27 12:13:02 +00:00
|
|
|
g_list_free_full (priv->decode, (GDestroyNotify) gst_video_codec_frame_unref);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->decode = NULL;
|
2012-06-27 12:13:02 +00:00
|
|
|
g_list_free_full (priv->parse, (GDestroyNotify) gst_mini_object_unref);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->parse = NULL;
|
2012-06-27 12:13:02 +00:00
|
|
|
g_list_free_full (priv->parse_gather,
|
|
|
|
(GDestroyNotify) gst_video_codec_frame_unref);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->parse_gather = NULL;
|
2012-06-27 12:13:02 +00:00
|
|
|
g_list_free_full (priv->frames, (GDestroyNotify) gst_video_codec_frame_unref);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->frames = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2013-08-15 12:15:05 +00:00
|
|
|
gst_video_decoder_reset (GstVideoDecoder * decoder, gboolean full,
|
|
|
|
gboolean flush_hard)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "reset full %d", full);
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
|
2013-08-15 12:15:05 +00:00
|
|
|
if (full || flush_hard) {
|
2012-03-07 09:18:49 +00:00
|
|
|
gst_segment_init (&decoder->input_segment, GST_FORMAT_UNDEFINED);
|
|
|
|
gst_segment_init (&decoder->output_segment, GST_FORMAT_UNDEFINED);
|
|
|
|
gst_video_decoder_clear_queues (decoder);
|
2013-08-15 12:15:05 +00:00
|
|
|
|
|
|
|
if (priv->current_frame) {
|
|
|
|
gst_video_codec_frame_unref (priv->current_frame);
|
|
|
|
priv->current_frame = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
g_list_free_full (priv->current_frame_events,
|
|
|
|
(GDestroyNotify) gst_event_unref);
|
|
|
|
priv->current_frame_events = NULL;
|
|
|
|
g_list_free_full (priv->pending_events, (GDestroyNotify) gst_event_unref);
|
|
|
|
priv->pending_events = NULL;
|
|
|
|
|
|
|
|
priv->error_count = 0;
|
|
|
|
priv->max_errors = GST_VIDEO_DECODER_MAX_ERRORS;
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (decoder);
|
|
|
|
priv->earliest_time = GST_CLOCK_TIME_NONE;
|
|
|
|
priv->proportion = 0.5;
|
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (full) {
|
2012-03-07 09:18:49 +00:00
|
|
|
if (priv->input_state)
|
|
|
|
gst_video_codec_state_unref (priv->input_state);
|
|
|
|
priv->input_state = NULL;
|
2012-09-29 00:07:43 +00:00
|
|
|
GST_OBJECT_LOCK (decoder);
|
2012-03-07 09:18:49 +00:00
|
|
|
if (priv->output_state)
|
|
|
|
gst_video_codec_state_unref (priv->output_state);
|
|
|
|
priv->output_state = NULL;
|
2012-09-24 09:16:09 +00:00
|
|
|
|
|
|
|
priv->qos_frame_duration = 0;
|
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->min_latency = 0;
|
|
|
|
priv->max_latency = 0;
|
2012-08-09 14:02:42 +00:00
|
|
|
|
|
|
|
if (priv->tags)
|
|
|
|
gst_tag_list_unref (priv->tags);
|
|
|
|
priv->tags = NULL;
|
|
|
|
priv->tags_changed = FALSE;
|
2012-10-10 13:04:07 +00:00
|
|
|
priv->reordered_output = FALSE;
|
2013-08-15 12:15:05 +00:00
|
|
|
|
|
|
|
priv->dropped = 0;
|
|
|
|
priv->processed = 0;
|
|
|
|
|
|
|
|
priv->decode_frame_number = 0;
|
|
|
|
priv->base_picture_number = 0;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
priv->discont = TRUE;
|
|
|
|
|
2012-06-19 14:36:38 +00:00
|
|
|
priv->base_timestamp = GST_CLOCK_TIME_NONE;
|
|
|
|
priv->last_timestamp_out = GST_CLOCK_TIME_NONE;
|
2012-09-27 09:31:34 +00:00
|
|
|
priv->pts_delta = GST_CLOCK_TIME_NONE;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
priv->input_offset = 0;
|
|
|
|
priv->frame_offset = 0;
|
|
|
|
gst_adapter_clear (priv->input_adapter);
|
|
|
|
gst_adapter_clear (priv->output_adapter);
|
2012-06-27 14:42:10 +00:00
|
|
|
g_list_free_full (priv->timestamps, (GDestroyNotify) timestamp_free);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->timestamps = NULL;
|
|
|
|
|
|
|
|
priv->bytes_out = 0;
|
|
|
|
priv->time = 0;
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
2012-06-19 14:46:05 +00:00
|
|
|
gst_video_decoder_chain_forward (GstVideoDecoder * decoder,
|
|
|
|
GstBuffer * buf, gboolean at_eos)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv;
|
|
|
|
GstVideoDecoderClass *klass;
|
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
klass = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
priv = decoder->priv;
|
|
|
|
|
|
|
|
g_return_val_if_fail (priv->packetized || klass->parse, GST_FLOW_ERROR);
|
|
|
|
|
2012-06-19 13:28:08 +00:00
|
|
|
if (priv->current_frame == NULL)
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->current_frame = gst_video_decoder_new_frame (decoder);
|
|
|
|
|
2012-12-10 11:49:46 +00:00
|
|
|
if (GST_BUFFER_PTS_IS_VALID (buf) && !priv->packetized) {
|
2012-03-07 09:18:49 +00:00
|
|
|
gst_video_decoder_add_timestamp (decoder, buf);
|
|
|
|
}
|
2012-04-24 17:35:24 +00:00
|
|
|
priv->input_offset += gst_buffer_get_size (buf);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
if (priv->packetized) {
|
|
|
|
if (!GST_BUFFER_FLAG_IS_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT)) {
|
|
|
|
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (priv->current_frame);
|
|
|
|
}
|
|
|
|
|
2012-06-19 13:28:08 +00:00
|
|
|
priv->current_frame->input_buffer = buf;
|
|
|
|
|
|
|
|
if (decoder->input_segment.rate < 0.0) {
|
|
|
|
priv->parse_gather =
|
|
|
|
g_list_prepend (priv->parse_gather, priv->current_frame);
|
|
|
|
} else {
|
|
|
|
ret = gst_video_decoder_decode_frame (decoder, priv->current_frame);
|
|
|
|
}
|
|
|
|
priv->current_frame = NULL;
|
2012-03-07 09:18:49 +00:00
|
|
|
} else {
|
|
|
|
gst_adapter_push (priv->input_adapter, buf);
|
|
|
|
|
2013-03-31 16:29:07 +00:00
|
|
|
ret = gst_video_decoder_parse_available (decoder, at_eos, TRUE);
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (ret == GST_VIDEO_DECODER_FLOW_NEED_DATA)
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_video_decoder_flush_decode (GstVideoDecoder * dec)
|
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = dec->priv;
|
|
|
|
GstFlowReturn res = GST_FLOW_OK;
|
|
|
|
GList *walk;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (dec, "flushing buffers to decode");
|
|
|
|
|
|
|
|
/* clear buffer and decoder state */
|
|
|
|
gst_video_decoder_flush (dec, FALSE);
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
walk = priv->decode;
|
2012-03-07 09:18:49 +00:00
|
|
|
while (walk) {
|
|
|
|
GList *next;
|
|
|
|
GstVideoCodecFrame *frame = (GstVideoCodecFrame *) (walk->data);
|
|
|
|
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_DEBUG_OBJECT (dec, "decoding frame %p buffer %p, PTS %" GST_TIME_FORMAT
|
2012-10-02 07:29:27 +00:00
|
|
|
", DTS %" GST_TIME_FORMAT, frame, frame->input_buffer,
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (frame->input_buffer)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_DTS (frame->input_buffer)));
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
next = walk->next;
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
priv->decode = g_list_delete_link (priv->decode, walk);
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* decode buffer, resulting data prepended to queue */
|
2012-06-19 13:28:08 +00:00
|
|
|
res = gst_video_decoder_decode_frame (dec, frame);
|
2012-06-19 14:22:25 +00:00
|
|
|
if (res != GST_FLOW_OK)
|
|
|
|
break;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
walk = next;
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
/* gst_video_decoder_flush_parse is called from the
|
|
|
|
* chain_reverse() function when a buffer containing
|
|
|
|
* a DISCONT - indicating that reverse playback
|
|
|
|
* looped back to the next data block, and therefore
|
|
|
|
* all available data should be fed through the
|
|
|
|
* decoder and frames gathered for reversed output
|
|
|
|
*/
|
2012-03-07 09:18:49 +00:00
|
|
|
static GstFlowReturn
|
2012-06-19 14:46:05 +00:00
|
|
|
gst_video_decoder_flush_parse (GstVideoDecoder * dec, gboolean at_eos)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = dec->priv;
|
|
|
|
GstFlowReturn res = GST_FLOW_OK;
|
|
|
|
GList *walk;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (dec, "flushing buffers to parsing");
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
/* Reverse the gather list, and prepend it to the parse list,
|
|
|
|
* then flush to parse whatever we can */
|
|
|
|
priv->gather = g_list_reverse (priv->gather);
|
|
|
|
priv->parse = g_list_concat (priv->gather, priv->parse);
|
|
|
|
priv->gather = NULL;
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* clear buffer and decoder state */
|
|
|
|
gst_video_decoder_flush (dec, FALSE);
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
walk = priv->parse;
|
2012-03-07 09:18:49 +00:00
|
|
|
while (walk) {
|
|
|
|
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
|
2012-06-19 14:22:25 +00:00
|
|
|
GList *next = walk->next;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_DEBUG_OBJECT (dec, "parsing buffer %p, PTS %" GST_TIME_FORMAT
|
|
|
|
", DTS %" GST_TIME_FORMAT, buf, GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_DTS (buf)));
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* parse buffer, resulting frames prepended to parse_gather queue */
|
|
|
|
gst_buffer_ref (buf);
|
2012-06-19 14:46:05 +00:00
|
|
|
res = gst_video_decoder_chain_forward (dec, buf, at_eos);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* if we generated output, we can discard the buffer, else we
|
|
|
|
* keep it in the queue */
|
|
|
|
if (priv->parse_gather) {
|
|
|
|
GST_DEBUG_OBJECT (dec, "parsed buffer to %p", priv->parse_gather->data);
|
|
|
|
priv->parse = g_list_delete_link (priv->parse, walk);
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
} else {
|
|
|
|
GST_DEBUG_OBJECT (dec, "buffer did not decode, keeping");
|
|
|
|
}
|
|
|
|
walk = next;
|
|
|
|
}
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
/* now we can process frames. Start by moving each frame from the parse_gather
|
|
|
|
* to the decode list, reverse the order as we go, and stopping when/if we
|
|
|
|
* copy a keyframe. */
|
|
|
|
GST_DEBUG_OBJECT (dec, "checking parsed frames for a keyframe to decode");
|
|
|
|
walk = priv->parse_gather;
|
|
|
|
while (walk) {
|
|
|
|
GstVideoCodecFrame *frame = (GstVideoCodecFrame *) (walk->data);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* remove from the gather list */
|
2012-06-19 14:22:25 +00:00
|
|
|
priv->parse_gather = g_list_remove_link (priv->parse_gather, walk);
|
|
|
|
|
|
|
|
/* move it to the front of the decode queue */
|
|
|
|
priv->decode = g_list_concat (walk, priv->decode);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* if we copied a keyframe, flush and decode the decode queue */
|
|
|
|
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame)) {
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_DEBUG_OBJECT (dec, "found keyframe %p with PTS %" GST_TIME_FORMAT
|
|
|
|
", DTS %" GST_TIME_FORMAT, frame,
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (frame->input_buffer)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_DTS (frame->input_buffer)));
|
2012-03-07 09:18:49 +00:00
|
|
|
res = gst_video_decoder_flush_decode (dec);
|
2012-06-19 14:22:25 +00:00
|
|
|
if (res != GST_FLOW_OK)
|
|
|
|
goto done;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
2012-06-19 14:22:25 +00:00
|
|
|
|
|
|
|
walk = priv->parse_gather;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* now send queued data downstream */
|
2012-06-19 13:46:44 +00:00
|
|
|
walk = priv->output_queued;
|
|
|
|
while (walk) {
|
|
|
|
GstBuffer *buf = GST_BUFFER_CAST (walk->data);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
if (G_LIKELY (res == GST_FLOW_OK)) {
|
|
|
|
/* avoid stray DISCONT from forward processing,
|
|
|
|
* which have no meaning in reverse pushing */
|
|
|
|
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DISCONT);
|
2012-06-19 14:22:25 +00:00
|
|
|
|
|
|
|
/* Last chance to calculate a timestamp as we loop backwards
|
|
|
|
* through the list */
|
|
|
|
if (GST_BUFFER_TIMESTAMP (buf) != GST_CLOCK_TIME_NONE)
|
2012-06-19 14:36:38 +00:00
|
|
|
priv->last_timestamp_out = GST_BUFFER_TIMESTAMP (buf);
|
|
|
|
else if (priv->last_timestamp_out != GST_CLOCK_TIME_NONE &&
|
2012-06-19 14:22:25 +00:00
|
|
|
GST_BUFFER_DURATION (buf) != GST_CLOCK_TIME_NONE) {
|
|
|
|
GST_BUFFER_TIMESTAMP (buf) =
|
2012-06-19 14:36:38 +00:00
|
|
|
priv->last_timestamp_out - GST_BUFFER_DURATION (buf);
|
|
|
|
priv->last_timestamp_out = GST_BUFFER_TIMESTAMP (buf);
|
2012-06-19 14:22:25 +00:00
|
|
|
GST_LOG_OBJECT (dec,
|
|
|
|
"Calculated TS %" GST_TIME_FORMAT " working backwards",
|
2012-06-19 14:36:38 +00:00
|
|
|
GST_TIME_ARGS (priv->last_timestamp_out));
|
2012-06-19 14:22:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
res = gst_video_decoder_clip_and_push_buf (dec, buf);
|
2012-03-07 09:18:49 +00:00
|
|
|
} else {
|
|
|
|
gst_buffer_unref (buf);
|
|
|
|
}
|
|
|
|
|
2012-06-19 13:46:44 +00:00
|
|
|
priv->output_queued =
|
|
|
|
g_list_delete_link (priv->output_queued, priv->output_queued);
|
|
|
|
walk = priv->output_queued;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
done:
|
2012-03-07 09:18:49 +00:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_video_decoder_chain_reverse (GstVideoDecoder * dec, GstBuffer * buf)
|
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = dec->priv;
|
|
|
|
GstFlowReturn result = GST_FLOW_OK;
|
|
|
|
|
|
|
|
/* if we have a discont, move buffers to the decode list */
|
|
|
|
if (!buf || GST_BUFFER_IS_DISCONT (buf)) {
|
|
|
|
GST_DEBUG_OBJECT (dec, "received discont");
|
2012-06-19 14:22:25 +00:00
|
|
|
|
|
|
|
/* parse and decode stuff in the gather and parse queues */
|
2012-06-19 14:46:05 +00:00
|
|
|
gst_video_decoder_flush_parse (dec, FALSE);
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
if (G_LIKELY (buf)) {
|
2012-04-24 17:35:24 +00:00
|
|
|
GST_DEBUG_OBJECT (dec, "gathering buffer %p of size %" G_GSIZE_FORMAT ", "
|
2012-06-27 11:48:58 +00:00
|
|
|
"PTS %" GST_TIME_FORMAT ", DTS %" GST_TIME_FORMAT ", dur %"
|
|
|
|
GST_TIME_FORMAT, buf, gst_buffer_get_size (buf),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
|
|
|
|
|
|
|
/* add buffer to gather queue */
|
|
|
|
priv->gather = g_list_prepend (priv->gather, buf);
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstFlowReturn
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_video_decoder_chain (GstPad * pad, GstObject * parent, GstBuffer * buf)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoder *decoder;
|
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
decoder = GST_VIDEO_DECODER (parent);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-07-26 12:28:26 +00:00
|
|
|
if (G_UNLIKELY (decoder->priv->do_caps)) {
|
|
|
|
GstCaps *caps = gst_pad_get_current_caps (decoder->sinkpad);
|
|
|
|
if (caps) {
|
|
|
|
if (!gst_video_decoder_setcaps (decoder, caps)) {
|
|
|
|
gst_caps_unref (caps);
|
|
|
|
goto not_negotiated;
|
|
|
|
}
|
|
|
|
gst_caps_unref (caps);
|
|
|
|
}
|
|
|
|
decoder->priv->do_caps = FALSE;
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_LOG_OBJECT (decoder,
|
2012-06-27 11:48:58 +00:00
|
|
|
"chain PTS %" GST_TIME_FORMAT ", DTS %" GST_TIME_FORMAT " duration %"
|
|
|
|
GST_TIME_FORMAT " size %" G_GSIZE_FORMAT,
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_DTS (buf)),
|
2012-04-24 17:35:24 +00:00
|
|
|
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)), gst_buffer_get_size (buf));
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
|
|
|
|
/* NOTE:
|
|
|
|
* requiring the pad to be negotiated makes it impossible to use
|
|
|
|
* oggdemux or filesrc ! decoder */
|
|
|
|
|
|
|
|
if (decoder->input_segment.format == GST_FORMAT_UNDEFINED) {
|
|
|
|
GstEvent *event;
|
2012-04-24 17:35:24 +00:00
|
|
|
GstSegment *segment = &decoder->input_segment;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_WARNING_OBJECT (decoder,
|
|
|
|
"Received buffer without a new-segment. "
|
|
|
|
"Assuming timestamps start from 0.");
|
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
gst_segment_init (segment, GST_FORMAT_TIME);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
event = gst_event_new_segment (segment);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
decoder->priv->current_frame_events =
|
|
|
|
g_list_prepend (decoder->priv->current_frame_events, event);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (decoder->input_segment.rate > 0.0)
|
2012-06-19 14:46:05 +00:00
|
|
|
ret = gst_video_decoder_chain_forward (decoder, buf, FALSE);
|
2012-03-07 09:18:49 +00:00
|
|
|
else
|
|
|
|
ret = gst_video_decoder_chain_reverse (decoder, buf);
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
return ret;
|
2012-07-26 12:28:26 +00:00
|
|
|
|
|
|
|
/* ERRORS */
|
|
|
|
not_negotiated:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (decoder, CORE, NEGOTIATION, (NULL),
|
2013-05-09 13:34:10 +00:00
|
|
|
("decoder not initialized"));
|
2012-07-26 12:28:26 +00:00
|
|
|
gst_buffer_unref (buf);
|
|
|
|
return GST_FLOW_NOT_NEGOTIATED;
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static GstStateChangeReturn
|
|
|
|
gst_video_decoder_change_state (GstElement * element, GstStateChange transition)
|
|
|
|
{
|
|
|
|
GstVideoDecoder *decoder;
|
|
|
|
GstVideoDecoderClass *decoder_class;
|
|
|
|
GstStateChangeReturn ret;
|
|
|
|
|
|
|
|
decoder = GST_VIDEO_DECODER (element);
|
|
|
|
decoder_class = GST_VIDEO_DECODER_GET_CLASS (element);
|
|
|
|
|
|
|
|
switch (transition) {
|
|
|
|
case GST_STATE_CHANGE_NULL_TO_READY:
|
|
|
|
/* open device/library if needed */
|
|
|
|
if (decoder_class->open && !decoder_class->open (decoder))
|
|
|
|
goto open_failed;
|
|
|
|
break;
|
|
|
|
case GST_STATE_CHANGE_READY_TO_PAUSED:
|
|
|
|
/* Initialize device/library if needed */
|
|
|
|
if (decoder_class->start && !decoder_class->start (decoder))
|
|
|
|
goto start_failed;
|
2013-07-26 08:22:32 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
2013-08-15 12:15:05 +00:00
|
|
|
gst_video_decoder_reset (decoder, TRUE, TRUE);
|
2013-07-26 08:22:32 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
2012-03-07 09:18:49 +00:00
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
ret = GST_ELEMENT_CLASS (parent_class)->change_state (element, transition);
|
|
|
|
|
|
|
|
switch (transition) {
|
|
|
|
case GST_STATE_CHANGE_PAUSED_TO_READY:
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
2013-08-15 12:15:05 +00:00
|
|
|
gst_video_decoder_reset (decoder, TRUE, TRUE);
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
2013-07-26 08:22:32 +00:00
|
|
|
if (decoder_class->stop && !decoder_class->stop (decoder))
|
|
|
|
goto stop_failed;
|
2012-03-07 09:18:49 +00:00
|
|
|
break;
|
|
|
|
case GST_STATE_CHANGE_READY_TO_NULL:
|
|
|
|
/* close device/library if needed */
|
|
|
|
if (decoder_class->close && !decoder_class->close (decoder))
|
|
|
|
goto close_failed;
|
|
|
|
break;
|
|
|
|
default:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
|
|
|
|
/* Errors */
|
|
|
|
open_failed:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (decoder, LIBRARY, INIT, (NULL),
|
|
|
|
("Failed to open decoder"));
|
|
|
|
return GST_STATE_CHANGE_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
start_failed:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (decoder, LIBRARY, INIT, (NULL),
|
|
|
|
("Failed to start decoder"));
|
|
|
|
return GST_STATE_CHANGE_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
stop_failed:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (decoder, LIBRARY, INIT, (NULL),
|
|
|
|
("Failed to stop decoder"));
|
|
|
|
return GST_STATE_CHANGE_FAILURE;
|
|
|
|
}
|
|
|
|
|
|
|
|
close_failed:
|
|
|
|
{
|
|
|
|
GST_ELEMENT_ERROR (decoder, LIBRARY, INIT, (NULL),
|
|
|
|
("Failed to close decoder"));
|
|
|
|
return GST_STATE_CHANGE_FAILURE;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
static GstVideoCodecFrame *
|
|
|
|
gst_video_decoder_new_frame (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
GstVideoCodecFrame *frame;
|
|
|
|
|
|
|
|
frame = g_slice_new0 (GstVideoCodecFrame);
|
|
|
|
|
|
|
|
frame->ref_count = 1;
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
frame->system_frame_number = priv->system_frame_number;
|
|
|
|
priv->system_frame_number++;
|
|
|
|
frame->decode_frame_number = priv->decode_frame_number;
|
|
|
|
priv->decode_frame_number++;
|
|
|
|
|
|
|
|
frame->dts = GST_CLOCK_TIME_NONE;
|
|
|
|
frame->pts = GST_CLOCK_TIME_NONE;
|
|
|
|
frame->duration = GST_CLOCK_TIME_NONE;
|
|
|
|
frame->events = priv->current_frame_events;
|
|
|
|
priv->current_frame_events = NULL;
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
2012-05-01 12:45:46 +00:00
|
|
|
GST_LOG_OBJECT (decoder, "Created new frame %p (sfn:%d)",
|
|
|
|
frame, frame->system_frame_number);
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
return frame;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
|
|
|
gst_video_decoder_prepare_finish_frame (GstVideoDecoder *
|
2012-06-12 15:58:05 +00:00
|
|
|
decoder, GstVideoCodecFrame * frame, gboolean dropping)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
GList *l, *events = NULL;
|
2013-07-24 07:24:45 +00:00
|
|
|
gboolean sync;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
#ifndef GST_DISABLE_GST_DEBUG
|
2012-05-20 20:27:42 +00:00
|
|
|
GST_LOG_OBJECT (decoder, "n %d in %" G_GSIZE_FORMAT " out %" G_GSIZE_FORMAT,
|
2012-03-07 09:18:49 +00:00
|
|
|
g_list_length (priv->frames),
|
|
|
|
gst_adapter_available (priv->input_adapter),
|
|
|
|
gst_adapter_available (priv->output_adapter));
|
|
|
|
#endif
|
|
|
|
|
2013-07-24 07:24:45 +00:00
|
|
|
sync = GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame);
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_LOG_OBJECT (decoder,
|
2012-06-27 11:48:58 +00:00
|
|
|
"finish frame %p (#%d) sync:%d PTS:%" GST_TIME_FORMAT " DTS:%"
|
2012-06-19 14:42:42 +00:00
|
|
|
GST_TIME_FORMAT,
|
|
|
|
frame, frame->system_frame_number,
|
2013-07-24 07:24:45 +00:00
|
|
|
sync, GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->dts));
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
/* Push all pending events that arrived before this frame */
|
|
|
|
for (l = priv->frames; l; l = l->next) {
|
|
|
|
GstVideoCodecFrame *tmp = l->data;
|
|
|
|
|
|
|
|
if (tmp->events) {
|
2012-05-01 13:35:47 +00:00
|
|
|
events = g_list_concat (events, tmp->events);
|
2012-03-07 09:18:49 +00:00
|
|
|
tmp->events = NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (tmp == frame)
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
|
2013-05-09 13:34:10 +00:00
|
|
|
if (dropping || !decoder->priv->output_state) {
|
|
|
|
/* Push before the next frame that is not dropped */
|
|
|
|
decoder->priv->pending_events =
|
|
|
|
g_list_concat (decoder->priv->pending_events, events);
|
|
|
|
} else {
|
|
|
|
for (l = g_list_last (decoder->priv->pending_events); l;
|
|
|
|
l = g_list_previous (l)) {
|
|
|
|
GST_LOG_OBJECT (decoder, "pushing %s event",
|
|
|
|
GST_EVENT_TYPE_NAME (l->data));
|
|
|
|
gst_video_decoder_push_event (decoder, l->data);
|
|
|
|
}
|
|
|
|
g_list_free (decoder->priv->pending_events);
|
|
|
|
decoder->priv->pending_events = NULL;
|
|
|
|
|
|
|
|
for (l = g_list_last (events); l; l = g_list_previous (l)) {
|
|
|
|
GST_LOG_OBJECT (decoder, "pushing %s event",
|
|
|
|
GST_EVENT_TYPE_NAME (l->data));
|
|
|
|
gst_video_decoder_push_event (decoder, l->data);
|
|
|
|
}
|
|
|
|
g_list_free (events);
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Check if the data should not be displayed. For example altref/invisible
|
|
|
|
* frame in vp8. In this case we should not update the timestamps. */
|
2012-04-24 17:35:24 +00:00
|
|
|
if (GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (frame))
|
2012-03-07 09:18:49 +00:00
|
|
|
return;
|
|
|
|
|
2012-06-19 14:42:42 +00:00
|
|
|
/* If the frame is meant to be output but we don't have an output_buffer
|
2012-04-24 17:35:24 +00:00
|
|
|
* we have a problem :) */
|
2013-05-09 08:37:06 +00:00
|
|
|
if (G_UNLIKELY ((frame->output_buffer == NULL) && !dropping))
|
2012-04-24 17:35:24 +00:00
|
|
|
goto no_output_buffer;
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
if (GST_CLOCK_TIME_IS_VALID (frame->pts)) {
|
2012-06-19 14:36:38 +00:00
|
|
|
if (frame->pts != priv->base_timestamp) {
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_DEBUG_OBJECT (decoder,
|
|
|
|
"sync timestamp %" GST_TIME_FORMAT " diff %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (frame->pts),
|
|
|
|
GST_TIME_ARGS (frame->pts - decoder->output_segment.start));
|
2012-06-19 14:36:38 +00:00
|
|
|
priv->base_timestamp = frame->pts;
|
|
|
|
priv->base_picture_number = frame->decode_frame_number;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
}
|
2012-06-19 14:36:38 +00:00
|
|
|
|
2012-06-19 17:40:29 +00:00
|
|
|
if (frame->duration == GST_CLOCK_TIME_NONE) {
|
2012-03-07 09:18:49 +00:00
|
|
|
frame->duration = gst_video_decoder_get_frame_duration (decoder, frame);
|
2012-06-19 17:40:29 +00:00
|
|
|
GST_LOG_OBJECT (decoder,
|
|
|
|
"Guessing duration %" GST_TIME_FORMAT " for frame...",
|
|
|
|
GST_TIME_ARGS (frame->duration));
|
|
|
|
}
|
2012-06-19 14:36:38 +00:00
|
|
|
|
2012-09-28 11:59:24 +00:00
|
|
|
/* PTS is expected montone ascending,
|
|
|
|
* so a good guess is lowest unsent DTS */
|
|
|
|
{
|
|
|
|
GstClockTime min_ts = GST_CLOCK_TIME_NONE;
|
|
|
|
GstVideoCodecFrame *oframe = NULL;
|
|
|
|
gboolean seen_none = FALSE;
|
|
|
|
|
|
|
|
/* some maintenance regardless */
|
|
|
|
for (l = priv->frames; l; l = l->next) {
|
|
|
|
GstVideoCodecFrame *tmp = l->data;
|
|
|
|
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts)) {
|
|
|
|
seen_none = TRUE;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts < min_ts) {
|
|
|
|
min_ts = tmp->abidata.ABI.ts;
|
|
|
|
oframe = tmp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
/* save a ts if needed */
|
|
|
|
if (oframe && oframe != frame) {
|
|
|
|
oframe->abidata.ABI.ts = frame->abidata.ABI.ts;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* and set if needed;
|
|
|
|
* valid delta means we have reasonable DTS input */
|
2012-10-10 13:04:07 +00:00
|
|
|
/* also, if we ended up reordered, means this approach is conflicting
|
|
|
|
* with some sparse existing PTS, and so it does not work out */
|
|
|
|
if (!priv->reordered_output &&
|
|
|
|
!GST_CLOCK_TIME_IS_VALID (frame->pts) && !seen_none &&
|
2012-09-28 11:59:24 +00:00
|
|
|
GST_CLOCK_TIME_IS_VALID (priv->pts_delta)) {
|
|
|
|
frame->pts = min_ts + priv->pts_delta;
|
|
|
|
GST_DEBUG_OBJECT (decoder,
|
|
|
|
"no valid PTS, using oldest DTS %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (frame->pts));
|
2012-09-27 09:31:34 +00:00
|
|
|
}
|
2012-10-10 13:04:07 +00:00
|
|
|
|
|
|
|
/* some more maintenance, ts2 holds PTS */
|
|
|
|
min_ts = GST_CLOCK_TIME_NONE;
|
|
|
|
seen_none = FALSE;
|
|
|
|
for (l = priv->frames; l; l = l->next) {
|
|
|
|
GstVideoCodecFrame *tmp = l->data;
|
|
|
|
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (tmp->abidata.ABI.ts2)) {
|
|
|
|
seen_none = TRUE;
|
|
|
|
continue;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (min_ts) || tmp->abidata.ABI.ts2 < min_ts) {
|
|
|
|
min_ts = tmp->abidata.ABI.ts2;
|
|
|
|
oframe = tmp;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
/* save a ts if needed */
|
|
|
|
if (oframe && oframe != frame) {
|
|
|
|
oframe->abidata.ABI.ts2 = frame->abidata.ABI.ts2;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* if we detected reordered output, then PTS are void,
|
|
|
|
* however those were obtained; bogus input, subclass etc */
|
|
|
|
if (priv->reordered_output && !seen_none) {
|
|
|
|
GST_DEBUG_OBJECT (decoder, "invaliding PTS");
|
|
|
|
frame->pts = GST_CLOCK_TIME_NONE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (!GST_CLOCK_TIME_IS_VALID (frame->pts) && !seen_none) {
|
|
|
|
frame->pts = min_ts;
|
|
|
|
GST_DEBUG_OBJECT (decoder,
|
|
|
|
"no valid PTS, using oldest PTS %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (frame->pts));
|
|
|
|
}
|
2012-09-27 09:31:34 +00:00
|
|
|
}
|
|
|
|
|
2012-09-28 11:59:24 +00:00
|
|
|
|
2012-06-19 17:40:29 +00:00
|
|
|
if (frame->pts == GST_CLOCK_TIME_NONE) {
|
2012-06-19 14:36:38 +00:00
|
|
|
/* Last ditch timestamp guess: Just add the duration to the previous
|
|
|
|
* frame */
|
2012-06-19 17:40:29 +00:00
|
|
|
if (priv->last_timestamp_out != GST_CLOCK_TIME_NONE &&
|
2012-06-19 14:36:38 +00:00
|
|
|
frame->duration != GST_CLOCK_TIME_NONE) {
|
|
|
|
frame->pts = priv->last_timestamp_out + frame->duration;
|
|
|
|
GST_LOG_OBJECT (decoder,
|
|
|
|
"Guessing timestamp %" GST_TIME_FORMAT " for frame...",
|
|
|
|
GST_TIME_ARGS (frame->pts));
|
2013-07-24 07:24:45 +00:00
|
|
|
} else if (sync && frame->dts != GST_CLOCK_TIME_NONE) {
|
|
|
|
frame->pts = frame->dts;
|
|
|
|
GST_LOG_OBJECT (decoder,
|
|
|
|
"Setting DTS as PTS %" GST_TIME_FORMAT " for frame...",
|
|
|
|
GST_TIME_ARGS (frame->pts));
|
2012-06-19 14:36:38 +00:00
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
2012-06-19 14:36:38 +00:00
|
|
|
if (GST_CLOCK_TIME_IS_VALID (priv->last_timestamp_out)) {
|
|
|
|
if (frame->pts < priv->last_timestamp_out) {
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_WARNING_OBJECT (decoder,
|
|
|
|
"decreasing timestamp (%" GST_TIME_FORMAT " < %"
|
|
|
|
GST_TIME_FORMAT ")",
|
2012-06-19 14:36:38 +00:00
|
|
|
GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (priv->last_timestamp_out));
|
2012-10-10 13:04:07 +00:00
|
|
|
priv->reordered_output = TRUE;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
}
|
2012-06-19 14:36:38 +00:00
|
|
|
|
|
|
|
if (GST_CLOCK_TIME_IS_VALID (frame->pts))
|
|
|
|
priv->last_timestamp_out = frame->pts;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-04-24 13:07:31 +00:00
|
|
|
return;
|
2012-04-24 17:35:24 +00:00
|
|
|
|
|
|
|
/* ERRORS */
|
|
|
|
no_output_buffer:
|
|
|
|
{
|
|
|
|
GST_ERROR_OBJECT (decoder, "No buffer to output !");
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
static void
|
2012-06-19 14:22:25 +00:00
|
|
|
gst_video_decoder_release_frame (GstVideoDecoder * dec,
|
2012-03-07 09:18:49 +00:00
|
|
|
GstVideoCodecFrame * frame)
|
|
|
|
{
|
2012-06-02 13:34:15 +00:00
|
|
|
GList *link;
|
|
|
|
|
2012-05-01 12:46:06 +00:00
|
|
|
/* unref once from the list */
|
2012-06-02 13:34:15 +00:00
|
|
|
link = g_list_find (dec->priv->frames, frame);
|
|
|
|
if (link) {
|
|
|
|
gst_video_codec_frame_unref (frame);
|
|
|
|
dec->priv->frames = g_list_delete_link (dec->priv->frames, link);
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-05-01 12:46:06 +00:00
|
|
|
/* unref because this function takes ownership */
|
2012-03-07 09:18:49 +00:00
|
|
|
gst_video_codec_frame_unref (frame);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_drop_frame:
|
|
|
|
* @dec: a #GstVideoDecoder
|
|
|
|
* @frame: (transfer full): the #GstVideoCodecFrame to drop
|
|
|
|
*
|
|
|
|
* Similar to gst_video_decoder_finish_frame(), but drops @frame in any
|
|
|
|
* case and posts a QoS message with the frame's details on the bus.
|
|
|
|
* In any case, the frame is considered finished and released.
|
|
|
|
*
|
|
|
|
* Returns: a #GstFlowReturn, usually GST_FLOW_OK.
|
|
|
|
*/
|
|
|
|
GstFlowReturn
|
|
|
|
gst_video_decoder_drop_frame (GstVideoDecoder * dec, GstVideoCodecFrame * frame)
|
|
|
|
{
|
|
|
|
GstClockTime stream_time, jitter, earliest_time, qostime, timestamp;
|
|
|
|
GstSegment *segment;
|
|
|
|
GstMessage *qos_msg;
|
|
|
|
gdouble proportion;
|
|
|
|
|
2012-05-01 13:35:47 +00:00
|
|
|
GST_LOG_OBJECT (dec, "drop frame %p", frame);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (dec);
|
|
|
|
|
2012-06-12 15:58:05 +00:00
|
|
|
gst_video_decoder_prepare_finish_frame (dec, frame, TRUE);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (dec, "dropping frame %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (frame->pts));
|
|
|
|
|
|
|
|
dec->priv->dropped++;
|
|
|
|
|
|
|
|
/* post QoS message */
|
2012-09-24 09:16:09 +00:00
|
|
|
GST_OBJECT_LOCK (dec);
|
2012-03-07 09:18:49 +00:00
|
|
|
proportion = dec->priv->proportion;
|
2012-09-24 09:16:09 +00:00
|
|
|
earliest_time = dec->priv->earliest_time;
|
|
|
|
GST_OBJECT_UNLOCK (dec);
|
|
|
|
|
|
|
|
timestamp = frame->pts;
|
2012-03-07 09:18:49 +00:00
|
|
|
segment = &dec->output_segment;
|
2013-06-17 06:58:13 +00:00
|
|
|
if (G_UNLIKELY (segment->format == GST_FORMAT_UNDEFINED))
|
|
|
|
segment = &dec->input_segment;
|
2012-03-07 09:18:49 +00:00
|
|
|
stream_time =
|
|
|
|
gst_segment_to_stream_time (segment, GST_FORMAT_TIME, timestamp);
|
|
|
|
qostime = gst_segment_to_running_time (segment, GST_FORMAT_TIME, timestamp);
|
|
|
|
jitter = GST_CLOCK_DIFF (qostime, earliest_time);
|
|
|
|
qos_msg =
|
|
|
|
gst_message_new_qos (GST_OBJECT_CAST (dec), FALSE, qostime, stream_time,
|
|
|
|
timestamp, GST_CLOCK_TIME_NONE);
|
|
|
|
gst_message_set_qos_values (qos_msg, jitter, proportion, 1000000);
|
|
|
|
gst_message_set_qos_stats (qos_msg, GST_FORMAT_BUFFERS,
|
|
|
|
dec->priv->processed, dec->priv->dropped);
|
|
|
|
gst_element_post_message (GST_ELEMENT_CAST (dec), qos_msg);
|
|
|
|
|
|
|
|
/* now free the frame */
|
2012-06-19 14:22:25 +00:00
|
|
|
gst_video_decoder_release_frame (dec, frame);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (dec);
|
|
|
|
|
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_finish_frame:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @frame: (transfer full): a decoded #GstVideoCodecFrame
|
|
|
|
*
|
|
|
|
* @frame should have a valid decoded data buffer, whose metadata fields
|
|
|
|
* are then appropriately set according to frame data and pushed downstream.
|
|
|
|
* If no output data is provided, @frame is considered skipped.
|
|
|
|
* In any case, the frame is considered finished and released.
|
|
|
|
*
|
2012-07-05 12:29:42 +00:00
|
|
|
* After calling this function the output buffer of the frame is to be
|
|
|
|
* considered read-only. This function will also change the metadata
|
|
|
|
* of the buffer.
|
|
|
|
*
|
2012-03-07 09:18:49 +00:00
|
|
|
* Returns: a #GstFlowReturn resulting from sending data downstream
|
|
|
|
*/
|
|
|
|
GstFlowReturn
|
|
|
|
gst_video_decoder_finish_frame (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecFrame * frame)
|
|
|
|
{
|
2012-06-19 14:08:57 +00:00
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
2012-03-07 09:18:49 +00:00
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
GstBuffer *output_buffer;
|
|
|
|
|
2012-05-01 13:35:47 +00:00
|
|
|
GST_LOG_OBJECT (decoder, "finish frame %p", frame);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-07-23 09:50:11 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
|
2012-04-24 20:35:29 +00:00
|
|
|
if (G_UNLIKELY (priv->output_state_changed || (priv->output_state
|
2013-06-30 16:17:15 +00:00
|
|
|
&& gst_pad_check_reconfigure (decoder->srcpad)))) {
|
|
|
|
if (!gst_video_decoder_negotiate (decoder)) {
|
|
|
|
if (GST_PAD_IS_FLUSHING (decoder->srcpad))
|
|
|
|
ret = GST_FLOW_FLUSHING;
|
|
|
|
else
|
|
|
|
ret = GST_FLOW_NOT_NEGOTIATED;
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-12 15:58:05 +00:00
|
|
|
gst_video_decoder_prepare_finish_frame (decoder, frame, FALSE);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->processed++;
|
2012-06-19 14:08:57 +00:00
|
|
|
|
2012-08-09 14:02:42 +00:00
|
|
|
if (priv->tags && priv->tags_changed) {
|
|
|
|
gst_video_decoder_push_event (decoder,
|
|
|
|
gst_event_new_tag (gst_tag_list_ref (priv->tags)));
|
|
|
|
priv->tags_changed = FALSE;
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* no buffer data means this frame is skipped */
|
|
|
|
if (!frame->output_buffer || GST_VIDEO_CODEC_FRAME_IS_DECODE_ONLY (frame)) {
|
|
|
|
GST_DEBUG_OBJECT (decoder, "skipping frame %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (frame->pts));
|
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
|
2012-07-05 12:29:42 +00:00
|
|
|
output_buffer = frame->output_buffer;
|
2012-07-05 11:38:48 +00:00
|
|
|
|
|
|
|
GST_BUFFER_FLAG_UNSET (output_buffer, GST_BUFFER_FLAG_DELTA_UNIT);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-27 11:48:58 +00:00
|
|
|
/* set PTS and DTS to both the PTS for decoded frames */
|
2012-07-05 11:38:48 +00:00
|
|
|
GST_BUFFER_PTS (output_buffer) = frame->pts;
|
|
|
|
GST_BUFFER_DTS (output_buffer) = frame->pts;
|
|
|
|
GST_BUFFER_DURATION (output_buffer) = frame->duration;
|
2012-06-19 14:08:57 +00:00
|
|
|
|
2012-07-05 11:38:48 +00:00
|
|
|
GST_BUFFER_OFFSET (output_buffer) = GST_BUFFER_OFFSET_NONE;
|
|
|
|
GST_BUFFER_OFFSET_END (output_buffer) = GST_BUFFER_OFFSET_NONE;
|
2012-06-19 14:08:57 +00:00
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
if (priv->discont) {
|
2012-07-05 11:38:48 +00:00
|
|
|
GST_BUFFER_FLAG_SET (output_buffer, GST_BUFFER_FLAG_DISCONT);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->discont = FALSE;
|
|
|
|
}
|
|
|
|
|
2012-07-05 12:29:42 +00:00
|
|
|
/* Get an additional ref to the buffer, which is going to be pushed
|
|
|
|
* downstream, the original ref is owned by the frame
|
|
|
|
*
|
|
|
|
* FIXME: clip_and_push_buf() changes buffer metadata but the buffer
|
|
|
|
* might have a refcount > 1 */
|
|
|
|
output_buffer = gst_buffer_ref (output_buffer);
|
2013-04-25 14:13:10 +00:00
|
|
|
|
|
|
|
/* Release frame so the buffer is writable when we push it downstream
|
|
|
|
* if possible, i.e. if the subclass does not hold additional references
|
|
|
|
* to the frame
|
|
|
|
*/
|
|
|
|
gst_video_decoder_release_frame (decoder, frame);
|
|
|
|
frame = NULL;
|
|
|
|
|
2012-06-19 14:08:57 +00:00
|
|
|
if (decoder->output_segment.rate < 0.0) {
|
|
|
|
GST_LOG_OBJECT (decoder, "queued frame");
|
|
|
|
priv->output_queued = g_list_prepend (priv->output_queued, output_buffer);
|
|
|
|
} else {
|
|
|
|
ret = gst_video_decoder_clip_and_push_buf (decoder, output_buffer);
|
|
|
|
}
|
|
|
|
|
|
|
|
done:
|
2013-04-25 14:13:10 +00:00
|
|
|
if (frame)
|
|
|
|
gst_video_decoder_release_frame (decoder, frame);
|
2012-06-19 14:08:57 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/* With stream lock, takes the frame reference */
|
|
|
|
static GstFlowReturn
|
|
|
|
gst_video_decoder_clip_and_push_buf (GstVideoDecoder * decoder, GstBuffer * buf)
|
|
|
|
{
|
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
guint64 start, stop;
|
2012-06-19 14:36:38 +00:00
|
|
|
guint64 cstart, cstop;
|
2012-06-19 14:08:57 +00:00
|
|
|
GstSegment *segment;
|
|
|
|
GstClockTime duration;
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/* Check for clipping */
|
2012-06-27 11:48:58 +00:00
|
|
|
start = GST_BUFFER_PTS (buf);
|
2012-06-19 14:08:57 +00:00
|
|
|
duration = GST_BUFFER_DURATION (buf);
|
|
|
|
|
|
|
|
stop = GST_CLOCK_TIME_NONE;
|
|
|
|
|
|
|
|
if (GST_CLOCK_TIME_IS_VALID (start) && GST_CLOCK_TIME_IS_VALID (duration)) {
|
|
|
|
stop = start + duration;
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
segment = &decoder->output_segment;
|
2012-06-19 14:36:38 +00:00
|
|
|
if (gst_segment_clip (segment, GST_FORMAT_TIME, start, stop, &cstart, &cstop)) {
|
|
|
|
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_BUFFER_PTS (buf) = cstart;
|
2012-06-19 14:36:38 +00:00
|
|
|
|
|
|
|
if (stop != GST_CLOCK_TIME_NONE)
|
|
|
|
GST_BUFFER_DURATION (buf) = cstop - cstart;
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_LOG_OBJECT (decoder,
|
|
|
|
"accepting buffer inside segment: %" GST_TIME_FORMAT " %"
|
|
|
|
GST_TIME_FORMAT " seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
|
|
|
|
" time %" GST_TIME_FORMAT,
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
|
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (buf) +
|
2012-06-19 14:08:57 +00:00
|
|
|
GST_BUFFER_DURATION (buf)),
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_TIME_ARGS (segment->start), GST_TIME_ARGS (segment->stop),
|
|
|
|
GST_TIME_ARGS (segment->time));
|
|
|
|
} else {
|
|
|
|
GST_LOG_OBJECT (decoder,
|
|
|
|
"dropping buffer outside segment: %" GST_TIME_FORMAT
|
|
|
|
" %" GST_TIME_FORMAT
|
|
|
|
" seg %" GST_TIME_FORMAT " to %" GST_TIME_FORMAT
|
|
|
|
" time %" GST_TIME_FORMAT,
|
2012-06-19 14:08:57 +00:00
|
|
|
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_TIME_ARGS (segment->start),
|
|
|
|
GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->time));
|
2012-06-19 14:08:57 +00:00
|
|
|
gst_buffer_unref (buf);
|
2012-03-07 09:18:49 +00:00
|
|
|
goto done;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* update rate estimate */
|
2012-06-19 14:08:57 +00:00
|
|
|
priv->bytes_out += gst_buffer_get_size (buf);
|
|
|
|
if (GST_CLOCK_TIME_IS_VALID (duration)) {
|
|
|
|
priv->time += duration;
|
2012-03-07 09:18:49 +00:00
|
|
|
} else {
|
|
|
|
/* FIXME : Use difference between current and previous outgoing
|
|
|
|
* timestamp, and relate to difference between current and previous
|
|
|
|
* bytes */
|
|
|
|
/* better none than nothing valid */
|
|
|
|
priv->time = GST_CLOCK_TIME_NONE;
|
|
|
|
}
|
|
|
|
|
2012-06-19 14:22:25 +00:00
|
|
|
GST_DEBUG_OBJECT (decoder, "pushing buffer %p of size %" G_GSIZE_FORMAT ", "
|
2012-06-27 11:48:58 +00:00
|
|
|
"PTS %" GST_TIME_FORMAT ", dur %" GST_TIME_FORMAT, buf,
|
2012-06-19 14:22:25 +00:00
|
|
|
gst_buffer_get_size (buf),
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_TIME_ARGS (GST_BUFFER_PTS (buf)),
|
2012-06-19 14:08:57 +00:00
|
|
|
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
|
|
|
|
|
|
|
/* we got data, so note things are looking up again, reduce
|
|
|
|
* the error count, if there is one */
|
2012-03-07 09:18:49 +00:00
|
|
|
if (G_UNLIKELY (priv->error_count))
|
2012-11-09 15:46:15 +00:00
|
|
|
priv->error_count = 0;
|
2012-06-19 14:22:25 +00:00
|
|
|
|
|
|
|
ret = gst_pad_push (decoder->srcpad, buf);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
done:
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_add_to_frame:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
2012-05-22 13:49:58 +00:00
|
|
|
* @n_bytes: the number of bytes to add
|
2012-03-07 09:18:49 +00:00
|
|
|
*
|
|
|
|
* Removes next @n_bytes of input data and adds it to currently parsed frame.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_add_to_frame (GstVideoDecoder * decoder, int n_bytes)
|
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
GstBuffer *buf;
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder, "add %d bytes to frame", n_bytes);
|
|
|
|
|
|
|
|
if (n_bytes == 0)
|
|
|
|
return;
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
if (gst_adapter_available (priv->output_adapter) == 0) {
|
|
|
|
priv->frame_offset =
|
|
|
|
priv->input_offset - gst_adapter_available (priv->input_adapter);
|
|
|
|
}
|
|
|
|
buf = gst_adapter_take_buffer (priv->input_adapter, n_bytes);
|
|
|
|
|
|
|
|
gst_adapter_push (priv->output_adapter, buf);
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
}
|
|
|
|
|
|
|
|
static guint64
|
|
|
|
gst_video_decoder_get_frame_duration (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecFrame * frame)
|
|
|
|
{
|
|
|
|
GstVideoCodecState *state = decoder->priv->output_state;
|
|
|
|
|
2012-06-27 14:38:38 +00:00
|
|
|
/* it's possible that we don't have a state yet when we are dropping the
|
|
|
|
* initial buffers */
|
|
|
|
if (state == NULL)
|
|
|
|
return GST_CLOCK_TIME_NONE;
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
if (state->info.fps_d == 0 || state->info.fps_n == 0) {
|
|
|
|
return GST_CLOCK_TIME_NONE;
|
|
|
|
}
|
|
|
|
|
2012-04-25 16:21:03 +00:00
|
|
|
/* FIXME: For interlaced frames this needs to take into account
|
|
|
|
* the number of valid fields in the frame
|
|
|
|
*/
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-04-25 16:21:03 +00:00
|
|
|
return gst_util_uint64_scale (GST_SECOND, state->info.fps_d,
|
2012-03-07 09:18:49 +00:00
|
|
|
state->info.fps_n);
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_have_frame:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Gathers all data collected for currently parsed frame, gathers corresponding
|
|
|
|
* metadata and passes it along for further processing, i.e. @handle_frame.
|
|
|
|
*
|
|
|
|
* Returns: a #GstFlowReturn
|
|
|
|
*/
|
|
|
|
GstFlowReturn
|
|
|
|
gst_video_decoder_have_frame (GstVideoDecoder * decoder)
|
|
|
|
{
|
2012-06-19 13:28:08 +00:00
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
2012-03-07 09:18:49 +00:00
|
|
|
GstBuffer *buffer;
|
|
|
|
int n_available;
|
2012-06-27 11:48:58 +00:00
|
|
|
GstClockTime pts, dts, duration;
|
2012-03-07 09:18:49 +00:00
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder, "have_frame");
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
|
2012-06-19 13:43:27 +00:00
|
|
|
n_available = gst_adapter_available (priv->output_adapter);
|
2012-03-07 09:18:49 +00:00
|
|
|
if (n_available) {
|
2012-06-19 13:43:27 +00:00
|
|
|
buffer = gst_adapter_take_buffer (priv->output_adapter, n_available);
|
2012-03-07 09:18:49 +00:00
|
|
|
} else {
|
|
|
|
buffer = gst_buffer_new_and_alloc (0);
|
|
|
|
}
|
|
|
|
|
2012-06-19 13:43:27 +00:00
|
|
|
priv->current_frame->input_buffer = buffer;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
gst_video_decoder_get_timestamp_at_offset (decoder,
|
2012-06-27 11:48:58 +00:00
|
|
|
priv->frame_offset, &pts, &dts, &duration);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_BUFFER_PTS (buffer) = pts;
|
|
|
|
GST_BUFFER_DTS (buffer) = dts;
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_BUFFER_DURATION (buffer) = duration;
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder, "collected frame size %d, "
|
2012-06-27 11:48:58 +00:00
|
|
|
"PTS %" GST_TIME_FORMAT ", DTS %" GST_TIME_FORMAT ", dur %"
|
|
|
|
GST_TIME_FORMAT, n_available, GST_TIME_ARGS (pts), GST_TIME_ARGS (dts),
|
|
|
|
GST_TIME_ARGS (duration));
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-06-19 13:28:08 +00:00
|
|
|
/* In reverse playback, just capture and queue frames for later processing */
|
|
|
|
if (decoder->output_segment.rate < 0.0) {
|
|
|
|
priv->parse_gather =
|
|
|
|
g_list_prepend (priv->parse_gather, priv->current_frame);
|
|
|
|
} else {
|
|
|
|
/* Otherwise, decode the frame, which gives away our ref */
|
|
|
|
ret = gst_video_decoder_decode_frame (decoder, priv->current_frame);
|
|
|
|
}
|
|
|
|
/* Current frame is gone now, either way */
|
|
|
|
priv->current_frame = NULL;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2012-06-19 13:28:08 +00:00
|
|
|
/* Pass the frame in priv->current_frame through the
|
|
|
|
* handle_frame() callback for decoding and passing to gvd_finish_frame(),
|
|
|
|
* or dropping by passing to gvd_drop_frame() */
|
2012-03-07 09:18:49 +00:00
|
|
|
static GstFlowReturn
|
2012-06-19 13:28:08 +00:00
|
|
|
gst_video_decoder_decode_frame (GstVideoDecoder * decoder,
|
|
|
|
GstVideoCodecFrame * frame)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
GstVideoDecoderClass *decoder_class;
|
|
|
|
GstFlowReturn ret = GST_FLOW_OK;
|
|
|
|
|
|
|
|
decoder_class = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
|
|
|
|
/* FIXME : This should only have to be checked once (either the subclass has an
|
|
|
|
* implementation, or it doesn't) */
|
|
|
|
g_return_val_if_fail (decoder_class->handle_frame != NULL, GST_FLOW_ERROR);
|
|
|
|
|
|
|
|
frame->distance_from_sync = priv->distance_from_sync;
|
|
|
|
priv->distance_from_sync++;
|
2012-06-27 11:48:58 +00:00
|
|
|
frame->pts = GST_BUFFER_PTS (frame->input_buffer);
|
|
|
|
frame->dts = GST_BUFFER_DTS (frame->input_buffer);
|
2012-03-07 09:18:49 +00:00
|
|
|
frame->duration = GST_BUFFER_DURATION (frame->input_buffer);
|
|
|
|
|
2013-02-08 23:31:28 +00:00
|
|
|
/* For keyframes, PTS = DTS + constant_offset, usually 0 to 3 frame
|
|
|
|
* durations. */
|
2012-10-10 13:04:07 +00:00
|
|
|
/* FIXME upstream can be quite wrong about the keyframe aspect,
|
|
|
|
* so we could be going off here as well,
|
|
|
|
* maybe let subclass decide if it really is/was a keyframe */
|
2013-02-08 23:31:28 +00:00
|
|
|
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame) &&
|
|
|
|
GST_CLOCK_TIME_IS_VALID (frame->pts)
|
|
|
|
&& GST_CLOCK_TIME_IS_VALID (frame->dts)) {
|
|
|
|
/* just in case they are not equal as might ideally be,
|
|
|
|
* e.g. quicktime has a (positive) delta approach */
|
|
|
|
priv->pts_delta = frame->pts - frame->dts;
|
|
|
|
GST_DEBUG_OBJECT (decoder, "PTS delta %d ms",
|
|
|
|
(gint) (priv->pts_delta / GST_MSECOND));
|
2012-06-27 11:48:58 +00:00
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-10-10 13:04:07 +00:00
|
|
|
frame->abidata.ABI.ts = frame->dts;
|
|
|
|
frame->abidata.ABI.ts2 = frame->pts;
|
|
|
|
|
2012-06-27 11:48:58 +00:00
|
|
|
GST_LOG_OBJECT (decoder, "PTS %" GST_TIME_FORMAT ", DTS %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (frame->pts), GST_TIME_ARGS (frame->dts));
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_LOG_OBJECT (decoder, "dist %d", frame->distance_from_sync);
|
2012-06-27 12:13:02 +00:00
|
|
|
|
|
|
|
gst_video_codec_frame_ref (frame);
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->frames = g_list_append (priv->frames, frame);
|
2013-02-11 19:54:46 +00:00
|
|
|
|
|
|
|
if (g_list_length (priv->frames) > 10) {
|
2013-06-04 15:49:55 +00:00
|
|
|
GST_DEBUG_OBJECT (decoder, "decoder frame list getting long: %d frames,"
|
2013-02-11 19:54:46 +00:00
|
|
|
"possible internal leaking?", g_list_length (priv->frames));
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
frame->deadline =
|
|
|
|
gst_segment_to_running_time (&decoder->input_segment, GST_FORMAT_TIME,
|
|
|
|
frame->pts);
|
|
|
|
|
|
|
|
/* do something with frame */
|
|
|
|
ret = decoder_class->handle_frame (decoder, frame);
|
|
|
|
if (ret != GST_FLOW_OK)
|
|
|
|
GST_DEBUG_OBJECT (decoder, "flow error %s", gst_flow_get_name (ret));
|
|
|
|
|
2012-06-19 13:28:08 +00:00
|
|
|
/* the frame has either been added to parse_gather or sent to
|
2012-03-07 09:18:49 +00:00
|
|
|
handle frame so there is no need to unref it */
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_output_state:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Get the #GstVideoCodecState currently describing the output stream.
|
|
|
|
*
|
|
|
|
* Returns: (transfer full): #GstVideoCodecState describing format of video data.
|
|
|
|
*/
|
|
|
|
GstVideoCodecState *
|
|
|
|
gst_video_decoder_get_output_state (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstVideoCodecState *state = NULL;
|
|
|
|
|
2012-09-29 00:07:43 +00:00
|
|
|
GST_OBJECT_LOCK (decoder);
|
2012-03-07 09:18:49 +00:00
|
|
|
if (decoder->priv->output_state)
|
|
|
|
state = gst_video_codec_state_ref (decoder->priv->output_state);
|
2012-09-29 00:07:43 +00:00
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
return state;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_set_output_state:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @fmt: a #GstVideoFormat
|
|
|
|
* @width: The width in pixels
|
|
|
|
* @height: The height in pixels
|
|
|
|
* @reference: (allow-none) (transfer none): An optional reference #GstVideoCodecState
|
|
|
|
*
|
|
|
|
* Creates a new #GstVideoCodecState with the specified @fmt, @width and @height
|
|
|
|
* as the output state for the decoder.
|
|
|
|
* Any previously set output state on @decoder will be replaced by the newly
|
|
|
|
* created one.
|
|
|
|
*
|
|
|
|
* If the subclass wishes to copy over existing fields (like pixel aspec ratio,
|
|
|
|
* or framerate) from an existing #GstVideoCodecState, it can be provided as a
|
|
|
|
* @reference.
|
|
|
|
*
|
|
|
|
* If the subclass wishes to override some fields from the output state (like
|
|
|
|
* pixel-aspect-ratio or framerate) it can do so on the returned #GstVideoCodecState.
|
|
|
|
*
|
|
|
|
* The new output state will only take effect (set on pads and buffers) starting
|
|
|
|
* from the next call to #gst_video_decoder_finish_frame().
|
|
|
|
*
|
|
|
|
* Returns: (transfer full): the newly configured output state.
|
|
|
|
*/
|
|
|
|
GstVideoCodecState *
|
|
|
|
gst_video_decoder_set_output_state (GstVideoDecoder * decoder,
|
|
|
|
GstVideoFormat fmt, guint width, guint height,
|
|
|
|
GstVideoCodecState * reference)
|
|
|
|
{
|
|
|
|
GstVideoDecoderPrivate *priv = decoder->priv;
|
|
|
|
GstVideoCodecState *state;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "fmt:%d, width:%d, height:%d, reference:%p",
|
|
|
|
fmt, width, height, reference);
|
|
|
|
|
|
|
|
/* Create the new output state */
|
|
|
|
state = _new_output_state (fmt, width, height, reference);
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
2012-09-29 00:07:43 +00:00
|
|
|
|
|
|
|
GST_OBJECT_LOCK (decoder);
|
2012-03-07 09:18:49 +00:00
|
|
|
/* Replace existing output state by new one */
|
|
|
|
if (priv->output_state)
|
|
|
|
gst_video_codec_state_unref (priv->output_state);
|
|
|
|
priv->output_state = gst_video_codec_state_ref (state);
|
|
|
|
|
2012-09-24 09:16:09 +00:00
|
|
|
if (priv->output_state != NULL && priv->output_state->info.fps_n > 0) {
|
2012-09-29 00:07:43 +00:00
|
|
|
priv->qos_frame_duration =
|
2012-09-24 09:16:09 +00:00
|
|
|
gst_util_uint64_scale (GST_SECOND, priv->output_state->info.fps_d,
|
|
|
|
priv->output_state->info.fps_n);
|
|
|
|
} else {
|
2012-09-29 00:07:43 +00:00
|
|
|
priv->qos_frame_duration = 0;
|
2012-09-24 09:16:09 +00:00
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
priv->output_state_changed = TRUE;
|
2012-09-24 09:16:09 +00:00
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
|
2012-09-29 00:07:43 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
return state;
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_oldest_frame:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Get the oldest pending unfinished #GstVideoCodecFrame
|
|
|
|
*
|
2012-05-01 14:55:13 +00:00
|
|
|
* Returns: (transfer full): oldest pending unfinished #GstVideoCodecFrame.
|
2012-03-07 09:18:49 +00:00
|
|
|
*/
|
|
|
|
GstVideoCodecFrame *
|
|
|
|
gst_video_decoder_get_oldest_frame (GstVideoDecoder * decoder)
|
|
|
|
{
|
2012-05-01 14:55:13 +00:00
|
|
|
GstVideoCodecFrame *frame = NULL;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
2012-05-01 14:55:13 +00:00
|
|
|
if (decoder->priv->frames)
|
|
|
|
frame = gst_video_codec_frame_ref (decoder->priv->frames->data);
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
2012-05-01 14:55:13 +00:00
|
|
|
return (GstVideoCodecFrame *) frame;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_frame:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @frame_number: system_frame_number of a frame
|
|
|
|
*
|
|
|
|
* Get a pending unfinished #GstVideoCodecFrame
|
|
|
|
*
|
2012-06-19 08:25:00 +00:00
|
|
|
* Returns: (transfer full): pending unfinished #GstVideoCodecFrame identified by @frame_number.
|
2012-03-07 09:18:49 +00:00
|
|
|
*/
|
|
|
|
GstVideoCodecFrame *
|
|
|
|
gst_video_decoder_get_frame (GstVideoDecoder * decoder, int frame_number)
|
|
|
|
{
|
|
|
|
GList *g;
|
|
|
|
GstVideoCodecFrame *frame = NULL;
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "frame_number : %d", frame_number);
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
for (g = decoder->priv->frames; g; g = g->next) {
|
|
|
|
GstVideoCodecFrame *tmp = g->data;
|
|
|
|
|
|
|
|
if (tmp->system_frame_number == frame_number) {
|
2012-06-19 08:25:00 +00:00
|
|
|
frame = gst_video_codec_frame_ref (tmp);
|
2012-03-07 09:18:49 +00:00
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return frame;
|
|
|
|
}
|
|
|
|
|
2012-08-16 10:12:06 +00:00
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_frames:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Get all pending unfinished #GstVideoCodecFrame
|
|
|
|
*
|
|
|
|
* Returns: (transfer full) (element-type GstVideoCodecFrame): pending unfinished #GstVideoCodecFrame.
|
|
|
|
*/
|
|
|
|
GList *
|
|
|
|
gst_video_decoder_get_frames (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GList *frames;
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
frames = g_list_copy (decoder->priv->frames);
|
|
|
|
g_list_foreach (frames, (GFunc) gst_video_codec_frame_ref, NULL);
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return frames;
|
|
|
|
}
|
|
|
|
|
2012-04-24 20:35:29 +00:00
|
|
|
static gboolean
|
2012-04-25 10:37:39 +00:00
|
|
|
gst_video_decoder_decide_allocation_default (GstVideoDecoder * decoder,
|
|
|
|
GstQuery * query)
|
2012-04-24 20:35:29 +00:00
|
|
|
{
|
2012-04-26 16:11:08 +00:00
|
|
|
GstCaps *outcaps;
|
|
|
|
GstBufferPool *pool = NULL;
|
|
|
|
guint size, min, max;
|
|
|
|
GstAllocator *allocator = NULL;
|
|
|
|
GstAllocationParams params;
|
|
|
|
GstStructure *config;
|
|
|
|
gboolean update_pool, update_allocator;
|
|
|
|
GstVideoInfo vinfo;
|
|
|
|
|
|
|
|
gst_query_parse_allocation (query, &outcaps, NULL);
|
|
|
|
gst_video_info_init (&vinfo);
|
|
|
|
gst_video_info_from_caps (&vinfo, outcaps);
|
|
|
|
|
|
|
|
/* we got configuration from our peer or the decide_allocation method,
|
|
|
|
* parse them */
|
|
|
|
if (gst_query_get_n_allocation_params (query) > 0) {
|
|
|
|
/* try the allocator */
|
|
|
|
gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
|
|
|
|
update_allocator = TRUE;
|
|
|
|
} else {
|
|
|
|
allocator = NULL;
|
|
|
|
gst_allocation_params_init (¶ms);
|
|
|
|
update_allocator = FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (gst_query_get_n_allocation_pools (query) > 0) {
|
|
|
|
gst_query_parse_nth_allocation_pool (query, 0, &pool, &size, &min, &max);
|
|
|
|
size = MAX (size, vinfo.size);
|
|
|
|
update_pool = TRUE;
|
|
|
|
} else {
|
|
|
|
pool = NULL;
|
|
|
|
size = vinfo.size;
|
|
|
|
min = max = 0;
|
|
|
|
|
|
|
|
update_pool = FALSE;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (pool == NULL) {
|
|
|
|
/* no pool, we can make our own */
|
|
|
|
GST_DEBUG_OBJECT (decoder, "no pool, making new pool");
|
|
|
|
pool = gst_video_buffer_pool_new ();
|
|
|
|
}
|
|
|
|
|
|
|
|
/* now configure */
|
|
|
|
config = gst_buffer_pool_get_config (pool);
|
|
|
|
gst_buffer_pool_config_set_params (config, outcaps, size, min, max);
|
|
|
|
gst_buffer_pool_config_set_allocator (config, allocator, ¶ms);
|
|
|
|
gst_buffer_pool_set_config (pool, config);
|
|
|
|
|
|
|
|
if (update_allocator)
|
|
|
|
gst_query_set_nth_allocation_param (query, 0, allocator, ¶ms);
|
|
|
|
else
|
|
|
|
gst_query_add_allocation_param (query, allocator, ¶ms);
|
|
|
|
if (allocator)
|
2012-07-09 14:26:17 +00:00
|
|
|
gst_object_unref (allocator);
|
2012-04-26 16:11:08 +00:00
|
|
|
|
|
|
|
if (update_pool)
|
|
|
|
gst_query_set_nth_allocation_pool (query, 0, pool, size, min, max);
|
|
|
|
else
|
|
|
|
gst_query_add_allocation_pool (query, pool, size, min, max);
|
|
|
|
|
|
|
|
if (pool)
|
|
|
|
gst_object_unref (pool);
|
|
|
|
|
2012-04-24 20:35:29 +00:00
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
2012-06-15 14:06:12 +00:00
|
|
|
static gboolean
|
|
|
|
gst_video_decoder_propose_allocation_default (GstVideoDecoder * decoder,
|
|
|
|
GstQuery * query)
|
|
|
|
{
|
|
|
|
return TRUE;
|
|
|
|
}
|
|
|
|
|
2012-08-09 12:35:22 +00:00
|
|
|
static gboolean
|
|
|
|
gst_video_decoder_negotiate_default (GstVideoDecoder * decoder)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
|
|
|
GstVideoCodecState *state = decoder->priv->output_state;
|
2012-04-24 20:35:29 +00:00
|
|
|
GstVideoDecoderClass *klass;
|
2012-04-25 10:37:39 +00:00
|
|
|
GstQuery *query = NULL;
|
2012-04-26 16:11:08 +00:00
|
|
|
GstBufferPool *pool = NULL;
|
2012-04-25 10:37:39 +00:00
|
|
|
GstAllocator *allocator;
|
|
|
|
GstAllocationParams params;
|
|
|
|
gboolean ret = TRUE;
|
2013-05-08 13:50:34 +00:00
|
|
|
GstVideoCodecFrame *frame;
|
2013-08-23 13:22:43 +00:00
|
|
|
GstCaps *prevcaps;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
g_return_val_if_fail (GST_VIDEO_INFO_WIDTH (&state->info) != 0, FALSE);
|
|
|
|
g_return_val_if_fail (GST_VIDEO_INFO_HEIGHT (&state->info) != 0, FALSE);
|
|
|
|
|
2012-04-24 20:35:29 +00:00
|
|
|
klass = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
GST_DEBUG_OBJECT (decoder, "output_state par %d/%d fps %d/%d",
|
|
|
|
state->info.par_n, state->info.par_d,
|
|
|
|
state->info.fps_n, state->info.fps_d);
|
|
|
|
|
2012-07-05 14:54:48 +00:00
|
|
|
if (state->caps == NULL)
|
2012-03-07 09:18:49 +00:00
|
|
|
state->caps = gst_video_info_to_caps (&state->info);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "setting caps %" GST_PTR_FORMAT, state->caps);
|
|
|
|
|
2013-05-08 13:50:34 +00:00
|
|
|
/* Push all pending pre-caps events of the oldest frame before
|
|
|
|
* setting caps */
|
|
|
|
frame = decoder->priv->frames ? decoder->priv->frames->data : NULL;
|
2013-05-09 13:34:10 +00:00
|
|
|
if (frame || decoder->priv->current_frame_events) {
|
|
|
|
GList **events, *l;
|
2013-05-08 13:50:34 +00:00
|
|
|
|
2013-05-09 13:34:10 +00:00
|
|
|
if (frame) {
|
|
|
|
events = &frame->events;
|
|
|
|
} else {
|
|
|
|
events = &decoder->priv->current_frame_events;
|
|
|
|
}
|
|
|
|
|
|
|
|
for (l = g_list_last (*events); l;) {
|
2013-05-08 13:50:34 +00:00
|
|
|
GstEvent *event = GST_EVENT (l->data);
|
2013-05-09 13:34:10 +00:00
|
|
|
GList *tmp;
|
2013-05-08 13:50:34 +00:00
|
|
|
|
2013-07-01 09:16:34 +00:00
|
|
|
if (GST_EVENT_TYPE (event) < GST_EVENT_CAPS) {
|
|
|
|
gst_video_decoder_push_event (decoder, event);
|
|
|
|
tmp = l;
|
|
|
|
l = l->prev;
|
|
|
|
*events = g_list_delete_link (*events, tmp);
|
|
|
|
} else {
|
|
|
|
l = l->prev;
|
2013-05-08 13:50:34 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2013-08-23 13:22:43 +00:00
|
|
|
|
|
|
|
prevcaps = gst_pad_get_current_caps (decoder->srcpad);
|
|
|
|
if (!prevcaps || !gst_caps_is_equal (prevcaps, state->caps))
|
|
|
|
ret = gst_pad_set_caps (decoder->srcpad, state->caps);
|
|
|
|
else
|
|
|
|
ret = TRUE;
|
|
|
|
if (prevcaps)
|
|
|
|
gst_caps_unref (prevcaps);
|
2013-05-08 13:50:34 +00:00
|
|
|
|
2012-04-25 10:37:39 +00:00
|
|
|
if (!ret)
|
|
|
|
goto done;
|
2012-03-07 09:18:49 +00:00
|
|
|
decoder->priv->output_state_changed = FALSE;
|
|
|
|
|
2012-04-24 18:04:48 +00:00
|
|
|
/* Negotiate pool */
|
|
|
|
query = gst_query_new_allocation (state->caps, TRUE);
|
|
|
|
|
|
|
|
if (!gst_pad_peer_query (decoder->srcpad, query)) {
|
|
|
|
GST_DEBUG_OBJECT (decoder, "didn't get downstream ALLOCATION hints");
|
|
|
|
}
|
|
|
|
|
2012-04-26 16:11:08 +00:00
|
|
|
g_assert (klass->decide_allocation != NULL);
|
|
|
|
ret = klass->decide_allocation (decoder, query);
|
|
|
|
|
|
|
|
GST_DEBUG_OBJECT (decoder, "ALLOCATION (%d) params: %" GST_PTR_FORMAT, ret,
|
|
|
|
query);
|
|
|
|
|
|
|
|
if (!ret)
|
|
|
|
goto no_decide_allocation;
|
2012-04-25 10:37:39 +00:00
|
|
|
|
|
|
|
/* we got configuration from our peer or the decide_allocation method,
|
|
|
|
* parse them */
|
|
|
|
if (gst_query_get_n_allocation_params (query) > 0) {
|
|
|
|
gst_query_parse_nth_allocation_param (query, 0, &allocator, ¶ms);
|
|
|
|
} else {
|
|
|
|
allocator = NULL;
|
|
|
|
gst_allocation_params_init (¶ms);
|
|
|
|
}
|
|
|
|
|
2012-04-26 16:11:08 +00:00
|
|
|
if (gst_query_get_n_allocation_pools (query) > 0)
|
|
|
|
gst_query_parse_nth_allocation_pool (query, 0, &pool, NULL, NULL, NULL);
|
|
|
|
if (!pool) {
|
|
|
|
if (allocator)
|
2012-07-09 14:26:17 +00:00
|
|
|
gst_object_unref (allocator);
|
2012-04-26 16:11:08 +00:00
|
|
|
ret = FALSE;
|
|
|
|
goto no_decide_allocation;
|
2012-04-24 18:04:48 +00:00
|
|
|
}
|
|
|
|
|
2012-04-26 16:11:08 +00:00
|
|
|
if (decoder->priv->allocator)
|
2012-07-09 14:26:17 +00:00
|
|
|
gst_object_unref (decoder->priv->allocator);
|
2012-04-26 16:11:08 +00:00
|
|
|
decoder->priv->allocator = allocator;
|
|
|
|
decoder->priv->params = params;
|
2012-04-25 10:37:39 +00:00
|
|
|
|
2012-04-24 18:04:48 +00:00
|
|
|
if (decoder->priv->pool) {
|
|
|
|
gst_buffer_pool_set_active (decoder->priv->pool, FALSE);
|
|
|
|
gst_object_unref (decoder->priv->pool);
|
|
|
|
}
|
|
|
|
decoder->priv->pool = pool;
|
|
|
|
|
|
|
|
/* and activate */
|
2012-04-25 10:37:39 +00:00
|
|
|
gst_buffer_pool_set_active (pool, TRUE);
|
2012-04-24 18:04:48 +00:00
|
|
|
|
2012-04-25 10:37:39 +00:00
|
|
|
done:
|
|
|
|
if (query)
|
|
|
|
gst_query_unref (query);
|
2012-04-24 18:04:48 +00:00
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
return ret;
|
2012-04-26 16:11:08 +00:00
|
|
|
|
|
|
|
/* Errors */
|
|
|
|
no_decide_allocation:
|
|
|
|
{
|
|
|
|
GST_WARNING_OBJECT (decoder, "Subclass failed to decide allocation");
|
|
|
|
goto done;
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
2012-08-09 12:35:22 +00:00
|
|
|
/**
|
|
|
|
* gst_video_decoder_negotiate:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Negotiate with downstreame elements to currently configured #GstVideoCodecState.
|
|
|
|
*
|
|
|
|
* Returns: #TRUE if the negotiation succeeded, else #FALSE.
|
|
|
|
*/
|
|
|
|
gboolean
|
|
|
|
gst_video_decoder_negotiate (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
GstVideoDecoderClass *klass;
|
|
|
|
gboolean ret = TRUE;
|
|
|
|
|
|
|
|
g_return_val_if_fail (GST_IS_VIDEO_DECODER (decoder), FALSE);
|
2013-05-24 14:51:17 +00:00
|
|
|
g_return_val_if_fail (decoder->priv->output_state, FALSE);
|
2012-08-09 12:35:22 +00:00
|
|
|
|
|
|
|
klass = GST_VIDEO_DECODER_GET_CLASS (decoder);
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
if (klass->negotiate)
|
|
|
|
ret = klass->negotiate (decoder);
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return ret;
|
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
/**
|
2012-07-23 08:18:41 +00:00
|
|
|
* gst_video_decoder_allocate_output_buffer:
|
2012-03-07 09:18:49 +00:00
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
2012-05-16 11:40:07 +00:00
|
|
|
* Helper function that allocates a buffer to hold a video frame for @decoder's
|
2012-03-07 09:18:49 +00:00
|
|
|
* current #GstVideoCodecState.
|
|
|
|
*
|
2012-10-17 09:55:01 +00:00
|
|
|
* You should use gst_video_decoder_allocate_output_frame() instead of this
|
|
|
|
* function, if possible at all.
|
|
|
|
*
|
|
|
|
* Returns: (transfer full): allocated buffer, or NULL if no buffer could be
|
|
|
|
* allocated (e.g. when downstream is flushing or shutting down)
|
2012-03-07 09:18:49 +00:00
|
|
|
*/
|
|
|
|
GstBuffer *
|
2012-07-23 08:18:41 +00:00
|
|
|
gst_video_decoder_allocate_output_buffer (GstVideoDecoder * decoder)
|
2012-03-07 09:18:49 +00:00
|
|
|
{
|
2012-10-17 09:55:01 +00:00
|
|
|
GstFlowReturn flow;
|
2013-05-24 14:51:17 +00:00
|
|
|
GstBuffer *buffer = NULL;
|
|
|
|
|
|
|
|
g_return_val_if_fail (decoder->priv->output_state, NULL);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-04-24 17:35:24 +00:00
|
|
|
GST_DEBUG ("alloc src buffer");
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
2012-04-24 20:35:29 +00:00
|
|
|
if (G_UNLIKELY (decoder->priv->output_state_changed
|
2013-05-24 14:51:17 +00:00
|
|
|
|| gst_pad_check_reconfigure (decoder->srcpad))) {
|
|
|
|
if (!gst_video_decoder_negotiate (decoder)) {
|
|
|
|
GST_DEBUG_OBJECT (decoder, "Failed to negotiate, fallback allocation");
|
|
|
|
goto fallback;
|
|
|
|
}
|
|
|
|
}
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-10-17 09:55:01 +00:00
|
|
|
flow = gst_buffer_pool_acquire_buffer (decoder->priv->pool, &buffer, NULL);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-10-17 09:55:01 +00:00
|
|
|
if (flow != GST_FLOW_OK) {
|
|
|
|
GST_INFO_OBJECT (decoder, "couldn't allocate output buffer, flow %s",
|
|
|
|
gst_flow_get_name (flow));
|
2013-05-24 14:51:17 +00:00
|
|
|
goto fallback;
|
2012-10-17 09:55:01 +00:00
|
|
|
}
|
2013-05-24 14:51:17 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return buffer;
|
|
|
|
|
|
|
|
fallback:
|
|
|
|
buffer =
|
|
|
|
gst_buffer_new_allocate (NULL, decoder->priv->output_state->info.size,
|
|
|
|
NULL);
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
2012-10-17 09:55:01 +00:00
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
return buffer;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
2012-07-23 08:18:41 +00:00
|
|
|
* gst_video_decoder_allocate_output_frame:
|
2012-03-07 09:18:49 +00:00
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @frame: a #GstVideoCodecFrame
|
|
|
|
*
|
2012-05-16 11:40:07 +00:00
|
|
|
* Helper function that allocates a buffer to hold a video frame for @decoder's
|
|
|
|
* current #GstVideoCodecState. Subclass should already have configured video
|
|
|
|
* state and set src pad caps.
|
2012-03-07 09:18:49 +00:00
|
|
|
*
|
2012-07-05 12:29:42 +00:00
|
|
|
* The buffer allocated here is owned by the frame and you should only
|
|
|
|
* keep references to the frame, not the buffer.
|
|
|
|
*
|
2012-05-16 11:40:07 +00:00
|
|
|
* Returns: %GST_FLOW_OK if an output buffer could be allocated
|
2012-03-07 09:18:49 +00:00
|
|
|
*/
|
|
|
|
GstFlowReturn
|
2012-07-23 08:18:41 +00:00
|
|
|
gst_video_decoder_allocate_output_frame (GstVideoDecoder *
|
2012-03-07 09:18:49 +00:00
|
|
|
decoder, GstVideoCodecFrame * frame)
|
|
|
|
{
|
|
|
|
GstFlowReturn flow_ret;
|
2012-09-20 01:16:01 +00:00
|
|
|
GstVideoCodecState *state;
|
|
|
|
int num_bytes;
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2013-05-24 14:51:17 +00:00
|
|
|
g_return_val_if_fail (decoder->priv->output_state, GST_FLOW_NOT_NEGOTIATED);
|
2012-07-04 07:14:51 +00:00
|
|
|
g_return_val_if_fail (frame->output_buffer == NULL, GST_FLOW_ERROR);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
2012-07-23 09:50:11 +00:00
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
2012-09-20 01:16:01 +00:00
|
|
|
|
|
|
|
state = decoder->priv->output_state;
|
|
|
|
if (state == NULL) {
|
|
|
|
g_warning ("Output state should be set before allocating frame");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
num_bytes = GST_VIDEO_INFO_SIZE (&state->info);
|
|
|
|
if (num_bytes == 0) {
|
|
|
|
g_warning ("Frame size should not be 0");
|
|
|
|
goto error;
|
|
|
|
}
|
|
|
|
|
2012-04-24 20:35:29 +00:00
|
|
|
if (G_UNLIKELY (decoder->priv->output_state_changed
|
2013-05-24 14:51:17 +00:00
|
|
|
|| gst_pad_check_reconfigure (decoder->srcpad)))
|
2012-07-18 13:34:06 +00:00
|
|
|
gst_video_decoder_negotiate (decoder);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder, "alloc buffer size %d", num_bytes);
|
|
|
|
|
2012-04-24 18:04:48 +00:00
|
|
|
flow_ret = gst_buffer_pool_acquire_buffer (decoder->priv->pool,
|
|
|
|
&frame->output_buffer, NULL);
|
2012-03-07 09:18:49 +00:00
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return flow_ret;
|
2012-09-20 01:16:01 +00:00
|
|
|
|
|
|
|
error:
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
return GST_FLOW_ERROR;
|
2012-03-07 09:18:49 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_max_decode_time:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @frame: a #GstVideoCodecFrame
|
|
|
|
*
|
|
|
|
* Determines maximum possible decoding time for @frame that will
|
|
|
|
* allow it to decode and arrive in time (as determined by QoS events).
|
|
|
|
* In particular, a negative result means decoding in time is no longer possible
|
|
|
|
* and should therefore occur as soon/skippy as possible.
|
|
|
|
*
|
|
|
|
* Returns: max decoding time.
|
|
|
|
*/
|
|
|
|
GstClockTimeDiff
|
|
|
|
gst_video_decoder_get_max_decode_time (GstVideoDecoder *
|
|
|
|
decoder, GstVideoCodecFrame * frame)
|
|
|
|
{
|
|
|
|
GstClockTimeDiff deadline;
|
|
|
|
GstClockTime earliest_time;
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (decoder);
|
|
|
|
earliest_time = decoder->priv->earliest_time;
|
2012-06-19 17:45:14 +00:00
|
|
|
if (GST_CLOCK_TIME_IS_VALID (earliest_time)
|
|
|
|
&& GST_CLOCK_TIME_IS_VALID (frame->deadline))
|
2012-03-07 09:18:49 +00:00
|
|
|
deadline = GST_CLOCK_DIFF (earliest_time, frame->deadline);
|
|
|
|
else
|
|
|
|
deadline = G_MAXINT64;
|
|
|
|
|
|
|
|
GST_LOG_OBJECT (decoder, "earliest %" GST_TIME_FORMAT
|
|
|
|
", frame deadline %" GST_TIME_FORMAT ", deadline %" GST_TIME_FORMAT,
|
|
|
|
GST_TIME_ARGS (earliest_time), GST_TIME_ARGS (frame->deadline),
|
|
|
|
GST_TIME_ARGS (deadline));
|
|
|
|
|
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
|
|
|
|
return deadline;
|
|
|
|
}
|
|
|
|
|
2012-11-09 14:37:57 +00:00
|
|
|
/**
|
2012-11-20 11:08:26 +00:00
|
|
|
* gst_video_decoder_get_qos_proportion:
|
2012-11-09 14:37:57 +00:00
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* current QoS proportion, or %NULL
|
|
|
|
*
|
2012-11-20 11:21:08 +00:00
|
|
|
* Returns: The current QoS proportion.
|
2012-11-09 14:37:57 +00:00
|
|
|
*
|
|
|
|
* Since: 1.0.3
|
|
|
|
*/
|
2012-11-20 11:21:08 +00:00
|
|
|
gdouble
|
|
|
|
gst_video_decoder_get_qos_proportion (GstVideoDecoder * decoder)
|
2012-11-09 14:37:57 +00:00
|
|
|
{
|
2012-11-20 11:21:08 +00:00
|
|
|
gdouble proportion;
|
|
|
|
|
|
|
|
g_return_val_if_fail (GST_IS_VIDEO_DECODER (decoder), 1.0);
|
2012-11-09 14:37:57 +00:00
|
|
|
|
|
|
|
GST_OBJECT_LOCK (decoder);
|
2012-11-20 11:21:08 +00:00
|
|
|
proportion = decoder->priv->proportion;
|
2012-11-09 14:37:57 +00:00
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
|
2012-11-20 11:21:08 +00:00
|
|
|
return proportion;
|
2012-11-09 14:37:57 +00:00
|
|
|
}
|
|
|
|
|
2012-03-07 09:18:49 +00:00
|
|
|
GstFlowReturn
|
|
|
|
_gst_video_decoder_error (GstVideoDecoder * dec, gint weight,
|
|
|
|
GQuark domain, gint code, gchar * txt, gchar * dbg, const gchar * file,
|
|
|
|
const gchar * function, gint line)
|
|
|
|
{
|
|
|
|
if (txt)
|
|
|
|
GST_WARNING_OBJECT (dec, "error: %s", txt);
|
|
|
|
if (dbg)
|
|
|
|
GST_WARNING_OBJECT (dec, "error: %s", dbg);
|
|
|
|
dec->priv->error_count += weight;
|
|
|
|
dec->priv->discont = TRUE;
|
|
|
|
if (dec->priv->max_errors < dec->priv->error_count) {
|
|
|
|
gst_element_message_full (GST_ELEMENT (dec), GST_MESSAGE_ERROR,
|
|
|
|
domain, code, txt, dbg, file, function, line);
|
|
|
|
return GST_FLOW_ERROR;
|
|
|
|
} else {
|
2012-10-20 10:37:33 +00:00
|
|
|
g_free (txt);
|
|
|
|
g_free (dbg);
|
2012-03-07 09:18:49 +00:00
|
|
|
return GST_FLOW_OK;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_set_max_errors:
|
|
|
|
* @dec: a #GstVideoDecoder
|
|
|
|
* @num: max tolerated errors
|
|
|
|
*
|
|
|
|
* Sets numbers of tolerated decoder errors, where a tolerated one is then only
|
|
|
|
* warned about, but more than tolerated will lead to fatal error. Default
|
|
|
|
* is set to GST_VIDEO_DECODER_MAX_ERRORS.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_set_max_errors (GstVideoDecoder * dec, gint num)
|
|
|
|
{
|
|
|
|
g_return_if_fail (GST_IS_VIDEO_DECODER (dec));
|
|
|
|
|
|
|
|
dec->priv->max_errors = num;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_max_errors:
|
|
|
|
* @dec: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Returns: currently configured decoder tolerated error count.
|
|
|
|
*/
|
|
|
|
gint
|
|
|
|
gst_video_decoder_get_max_errors (GstVideoDecoder * dec)
|
|
|
|
{
|
|
|
|
g_return_val_if_fail (GST_IS_VIDEO_DECODER (dec), 0);
|
|
|
|
|
|
|
|
return dec->priv->max_errors;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_set_packetized:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @packetized: whether the input data should be considered as packetized.
|
|
|
|
*
|
|
|
|
* Allows baseclass to consider input data as packetized or not. If the
|
|
|
|
* input is packetized, then the @parse method will not be called.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_set_packetized (GstVideoDecoder * decoder,
|
|
|
|
gboolean packetized)
|
|
|
|
{
|
|
|
|
decoder->priv->packetized = packetized;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_packetized:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Queries whether input data is considered packetized or not by the
|
|
|
|
* base class.
|
|
|
|
*
|
|
|
|
* Returns: TRUE if input data is considered packetized.
|
|
|
|
*/
|
|
|
|
gboolean
|
|
|
|
gst_video_decoder_get_packetized (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
return decoder->priv->packetized;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_set_estimate_rate:
|
|
|
|
* @dec: a #GstVideoDecoder
|
|
|
|
* @enabled: whether to enable byte to time conversion
|
|
|
|
*
|
|
|
|
* Allows baseclass to perform byte to time estimated conversion.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_set_estimate_rate (GstVideoDecoder * dec, gboolean enabled)
|
|
|
|
{
|
|
|
|
g_return_if_fail (GST_IS_VIDEO_DECODER (dec));
|
|
|
|
|
|
|
|
dec->priv->do_estimate_rate = enabled;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_estimate_rate:
|
|
|
|
* @dec: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Returns: currently configured byte to time conversion setting
|
|
|
|
*/
|
|
|
|
gboolean
|
|
|
|
gst_video_decoder_get_estimate_rate (GstVideoDecoder * dec)
|
|
|
|
{
|
|
|
|
g_return_val_if_fail (GST_IS_VIDEO_DECODER (dec), 0);
|
|
|
|
|
|
|
|
return dec->priv->do_estimate_rate;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_set_latency:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @min_latency: minimum latency
|
|
|
|
* @max_latency: maximum latency
|
|
|
|
*
|
2012-05-16 11:40:07 +00:00
|
|
|
* Lets #GstVideoDecoder sub-classes tell the baseclass what the decoder
|
|
|
|
* latency is. Will also post a LATENCY message on the bus so the pipeline
|
|
|
|
* can reconfigure its global latency.
|
2012-03-07 09:18:49 +00:00
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_set_latency (GstVideoDecoder * decoder,
|
|
|
|
GstClockTime min_latency, GstClockTime max_latency)
|
|
|
|
{
|
|
|
|
g_return_if_fail (GST_CLOCK_TIME_IS_VALID (min_latency));
|
|
|
|
g_return_if_fail (max_latency >= min_latency);
|
|
|
|
|
|
|
|
GST_OBJECT_LOCK (decoder);
|
|
|
|
decoder->priv->min_latency = min_latency;
|
|
|
|
decoder->priv->max_latency = max_latency;
|
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
|
|
|
|
gst_element_post_message (GST_ELEMENT_CAST (decoder),
|
|
|
|
gst_message_new_latency (GST_OBJECT_CAST (decoder)));
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_latency:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
2012-05-16 11:40:07 +00:00
|
|
|
* @min_latency: (out) (allow-none): address of variable in which to store the
|
|
|
|
* configured minimum latency, or %NULL
|
|
|
|
* @max_latency: (out) (allow-none): address of variable in which to store the
|
|
|
|
* configured mximum latency, or %NULL
|
2012-03-07 09:18:49 +00:00
|
|
|
*
|
2012-05-16 11:40:07 +00:00
|
|
|
* Query the configured decoder latency. Results will be returned via
|
|
|
|
* @min_latency and @max_latency.
|
2012-03-07 09:18:49 +00:00
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_get_latency (GstVideoDecoder * decoder,
|
|
|
|
GstClockTime * min_latency, GstClockTime * max_latency)
|
|
|
|
{
|
|
|
|
GST_OBJECT_LOCK (decoder);
|
|
|
|
if (min_latency)
|
|
|
|
*min_latency = decoder->priv->min_latency;
|
|
|
|
if (max_latency)
|
|
|
|
*max_latency = decoder->priv->max_latency;
|
|
|
|
GST_OBJECT_UNLOCK (decoder);
|
|
|
|
}
|
2012-08-09 14:02:42 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_merge_tags:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @tags: a #GstTagList to merge
|
|
|
|
* @mode: the #GstTagMergeMode to use
|
|
|
|
*
|
|
|
|
* Adds tags to so-called pending tags, which will be processed
|
|
|
|
* before pushing out data downstream.
|
|
|
|
*
|
|
|
|
* Note that this is provided for convenience, and the subclass is
|
|
|
|
* not required to use this and can still do tag handling on its own.
|
|
|
|
*
|
|
|
|
* MT safe.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_merge_tags (GstVideoDecoder * decoder,
|
|
|
|
const GstTagList * tags, GstTagMergeMode mode)
|
|
|
|
{
|
|
|
|
GstTagList *otags;
|
|
|
|
|
|
|
|
g_return_if_fail (GST_IS_VIDEO_DECODER (decoder));
|
|
|
|
g_return_if_fail (tags == NULL || GST_IS_TAG_LIST (tags));
|
|
|
|
|
|
|
|
GST_VIDEO_DECODER_STREAM_LOCK (decoder);
|
|
|
|
if (tags)
|
|
|
|
GST_DEBUG_OBJECT (decoder, "merging tags %" GST_PTR_FORMAT, tags);
|
|
|
|
otags = decoder->priv->tags;
|
|
|
|
decoder->priv->tags = gst_tag_list_merge (decoder->priv->tags, tags, mode);
|
|
|
|
if (otags)
|
|
|
|
gst_tag_list_unref (otags);
|
|
|
|
decoder->priv->tags_changed = TRUE;
|
|
|
|
GST_VIDEO_DECODER_STREAM_UNLOCK (decoder);
|
|
|
|
}
|
2012-08-06 18:18:30 +00:00
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_buffer_pool:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
*
|
|
|
|
* Returns: (transfer full): the instance of the #GstBufferPool used
|
|
|
|
* by the decoder; free it after use it
|
|
|
|
*/
|
|
|
|
GstBufferPool *
|
|
|
|
gst_video_decoder_get_buffer_pool (GstVideoDecoder * decoder)
|
|
|
|
{
|
|
|
|
g_return_val_if_fail (GST_IS_VIDEO_DECODER (decoder), NULL);
|
|
|
|
|
|
|
|
if (decoder->priv->pool)
|
|
|
|
return gst_object_ref (decoder->priv->pool);
|
|
|
|
|
|
|
|
return NULL;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* gst_video_decoder_get_allocator:
|
|
|
|
* @decoder: a #GstVideoDecoder
|
|
|
|
* @allocator: (out) (allow-none) (transfer full): the #GstAllocator
|
|
|
|
* used
|
|
|
|
* @params: (out) (allow-none) (transfer full): the
|
|
|
|
* #GstAllocatorParams of @allocator
|
|
|
|
*
|
|
|
|
* Lets #GstVideoDecoder sub-classes to know the memory @allocator
|
|
|
|
* used by the base class and its @params.
|
|
|
|
*
|
|
|
|
* Unref the @allocator after use it.
|
|
|
|
*/
|
|
|
|
void
|
|
|
|
gst_video_decoder_get_allocator (GstVideoDecoder * decoder,
|
|
|
|
GstAllocator ** allocator, GstAllocationParams * params)
|
|
|
|
{
|
|
|
|
g_return_if_fail (GST_IS_VIDEO_DECODER (decoder));
|
|
|
|
|
|
|
|
if (allocator)
|
|
|
|
*allocator = decoder->priv->allocator ?
|
|
|
|
gst_object_ref (decoder->priv->allocator) : NULL;
|
|
|
|
|
|
|
|
if (params)
|
|
|
|
*params = decoder->priv->params;
|
|
|
|
}
|