mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2025-01-23 15:48:23 +00:00
libs/video: Finish porting to 0.11
This commit is contained in:
parent
ac27b5bcc8
commit
c8561e7e1b
7 changed files with 239 additions and 191 deletions
|
@ -1,5 +1,5 @@
|
||||||
|
|
||||||
SUBDIRS = interfaces signalprocessor codecparsers
|
SUBDIRS = interfaces signalprocessor codecparsers video
|
||||||
|
|
||||||
noinst_HEADERS = gst-i18n-plugin.h gettext.h
|
noinst_HEADERS = gst-i18n-plugin.h gettext.h
|
||||||
DIST_SUBDIRS = interfaces signalprocessor video basecamerabinsrc codecparsers
|
DIST_SUBDIRS = interfaces signalprocessor video basecamerabinsrc codecparsers
|
||||||
|
|
|
@ -47,6 +47,9 @@ static GstStateChangeReturn gst_base_video_codec_change_state (GstElement *
|
||||||
|
|
||||||
static GstElementClass *parent_class = NULL;
|
static GstElementClass *parent_class = NULL;
|
||||||
|
|
||||||
|
/* NOTE (Edward): Do not use G_DEFINE_* because we need to have
|
||||||
|
* a GClassInitFunc called with the target class (which the macros
|
||||||
|
* don't handle). */
|
||||||
static void gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass);
|
static void gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass);
|
||||||
static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
|
static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
|
||||||
GstBaseVideoCodecClass * klass);
|
GstBaseVideoCodecClass * klass);
|
||||||
|
@ -86,6 +89,8 @@ gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass)
|
||||||
gobject_class = G_OBJECT_CLASS (klass);
|
gobject_class = G_OBJECT_CLASS (klass);
|
||||||
element_class = GST_ELEMENT_CLASS (klass);
|
element_class = GST_ELEMENT_CLASS (klass);
|
||||||
|
|
||||||
|
parent_class = g_type_class_peek_parent (klass);
|
||||||
|
|
||||||
gobject_class->finalize = gst_base_video_codec_finalize;
|
gobject_class->finalize = gst_base_video_codec_finalize;
|
||||||
|
|
||||||
element_class->change_state = gst_base_video_codec_change_state;
|
element_class->change_state = gst_base_video_codec_change_state;
|
||||||
|
@ -203,6 +208,9 @@ gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec)
|
||||||
base_video_codec->system_frame_number++;
|
base_video_codec->system_frame_number++;
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec);
|
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec);
|
||||||
|
|
||||||
|
GST_LOG_OBJECT (base_video_codec, "Created new frame %p (sfn:%d)",
|
||||||
|
frame, frame->system_frame_number);
|
||||||
|
|
||||||
return frame;
|
return frame;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -211,6 +219,8 @@ gst_base_video_codec_free_frame (GstVideoFrameState * frame)
|
||||||
{
|
{
|
||||||
g_return_if_fail (frame != NULL);
|
g_return_if_fail (frame != NULL);
|
||||||
|
|
||||||
|
GST_LOG ("Freeing frame %p (sfn:%d)", frame, frame->system_frame_number);
|
||||||
|
|
||||||
if (frame->sink_buffer) {
|
if (frame->sink_buffer) {
|
||||||
gst_buffer_unref (frame->sink_buffer);
|
gst_buffer_unref (frame->sink_buffer);
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,6 +28,8 @@
|
||||||
#include <gst/gst.h>
|
#include <gst/gst.h>
|
||||||
#include <gst/base/gstadapter.h>
|
#include <gst/base/gstadapter.h>
|
||||||
#include <gst/video/video.h>
|
#include <gst/video/video.h>
|
||||||
|
#include <gst/video/gstvideopool.h>
|
||||||
|
#include <gst/video/gstmetavideo.h>
|
||||||
|
|
||||||
G_BEGIN_DECLS
|
G_BEGIN_DECLS
|
||||||
|
|
||||||
|
@ -156,7 +158,8 @@ struct _GstBaseVideoCodec
|
||||||
guint64 system_frame_number;
|
guint64 system_frame_number;
|
||||||
|
|
||||||
GList *frames; /* Protected with OBJECT_LOCK */
|
GList *frames; /* Protected with OBJECT_LOCK */
|
||||||
GstVideoState state;
|
GstVideoState state; /* Compressed video pad */
|
||||||
|
GstVideoInfo info; /* Raw video pad */
|
||||||
GstSegment segment;
|
GstSegment segment;
|
||||||
|
|
||||||
gdouble proportion;
|
gdouble proportion;
|
||||||
|
|
|
@ -137,7 +137,7 @@ GST_DEBUG_CATEGORY (basevideodecoder_debug);
|
||||||
|
|
||||||
static void gst_base_video_decoder_finalize (GObject * object);
|
static void gst_base_video_decoder_finalize (GObject * object);
|
||||||
|
|
||||||
static gboolean gst_base_video_decoder_sink_setcaps (GstPad * pad,
|
static gboolean gst_base_video_decoder_setcaps (GstBaseVideoDecoder * vdec,
|
||||||
GstCaps * caps);
|
GstCaps * caps);
|
||||||
static gboolean gst_base_video_decoder_sink_event (GstPad * pad,
|
static gboolean gst_base_video_decoder_sink_event (GstPad * pad,
|
||||||
GstEvent * event);
|
GstEvent * event);
|
||||||
|
@ -257,16 +257,15 @@ gst_base_video_decoder_push_src_event (GstBaseVideoDecoder * decoder,
|
||||||
}
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
gst_base_video_decoder_setcaps (GstBaseVideoDecoder * base_video_decoder,
|
||||||
|
GstCaps * caps)
|
||||||
{
|
{
|
||||||
GstBaseVideoDecoder *base_video_decoder;
|
|
||||||
GstBaseVideoDecoderClass *base_video_decoder_class;
|
GstBaseVideoDecoderClass *base_video_decoder_class;
|
||||||
GstStructure *structure;
|
GstStructure *structure;
|
||||||
const GValue *codec_data;
|
const GValue *codec_data;
|
||||||
GstVideoState state;
|
GstVideoState state;
|
||||||
gboolean ret = TRUE;
|
gboolean ret = TRUE;
|
||||||
|
|
||||||
base_video_decoder = GST_BASE_VIDEO_DECODER (gst_pad_get_parent (pad));
|
|
||||||
base_video_decoder_class =
|
base_video_decoder_class =
|
||||||
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
|
GST_BASE_VIDEO_DECODER_GET_CLASS (base_video_decoder);
|
||||||
|
|
||||||
|
@ -280,36 +279,41 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
||||||
|
|
||||||
structure = gst_caps_get_structure (caps, 0);
|
structure = gst_caps_get_structure (caps, 0);
|
||||||
|
|
||||||
gst_video_format_parse_caps (caps, NULL, &state.width, &state.height);
|
/* FIXME : Add have_{width_height|framerate|par} fields to
|
||||||
|
* GstVideoState so we can make better decisions
|
||||||
|
*/
|
||||||
|
|
||||||
/* this one fails if no framerate in caps */
|
gst_structure_get_int (structure, "width", &state.width);
|
||||||
if (!gst_video_parse_caps_framerate (caps, &state.fps_n, &state.fps_d)) {
|
gst_structure_get_int (structure, "height", &state.height);
|
||||||
|
|
||||||
|
if (!gst_structure_get_fraction (structure, "framerate", &state.fps_n,
|
||||||
|
&state.fps_d)) {
|
||||||
state.fps_n = 0;
|
state.fps_n = 0;
|
||||||
state.fps_d = 1;
|
state.fps_d = 1;
|
||||||
}
|
}
|
||||||
/* but the p-a-r sets 1/1 instead, which is not quite informative ... */
|
|
||||||
if (!gst_structure_has_field (structure, "pixel-aspect-ratio") ||
|
if (!gst_structure_get_fraction (structure, "pixel-aspect-ratio",
|
||||||
!gst_video_parse_caps_pixel_aspect_ratio (caps,
|
|
||||||
&state.par_n, &state.par_d)) {
|
&state.par_n, &state.par_d)) {
|
||||||
state.par_n = 0;
|
state.par_n = 0;
|
||||||
state.par_d = 1;
|
state.par_d = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
state.have_interlaced =
|
state.have_interlaced =
|
||||||
gst_video_format_parse_caps_interlaced (caps, &state.interlaced);
|
gst_structure_get_boolean (structure, "interlaced", &state.interlaced);
|
||||||
|
|
||||||
codec_data = gst_structure_get_value (structure, "codec_data");
|
codec_data = gst_structure_get_value (structure, "codec_data");
|
||||||
if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
|
if (codec_data && G_VALUE_TYPE (codec_data) == GST_TYPE_BUFFER) {
|
||||||
state.codec_data = GST_BUFFER (gst_value_dup_mini_object (codec_data));
|
state.codec_data = GST_BUFFER (gst_value_get_buffer (codec_data));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (base_video_decoder_class->set_format) {
|
if (base_video_decoder_class->set_format) {
|
||||||
|
GST_LOG_OBJECT (base_video_decoder, "Calling ::set_format()");
|
||||||
ret = base_video_decoder_class->set_format (base_video_decoder, &state);
|
ret = base_video_decoder_class->set_format (base_video_decoder, &state);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ret) {
|
if (ret) {
|
||||||
gst_buffer_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->state.
|
gst_buffer_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->
|
||||||
codec_data, NULL);
|
state.codec_data, NULL);
|
||||||
gst_caps_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->state.caps,
|
gst_caps_replace (&GST_BASE_VIDEO_CODEC (base_video_decoder)->state.caps,
|
||||||
NULL);
|
NULL);
|
||||||
GST_BASE_VIDEO_CODEC (base_video_decoder)->state = state;
|
GST_BASE_VIDEO_CODEC (base_video_decoder)->state = state;
|
||||||
|
@ -319,7 +323,6 @@ gst_base_video_decoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
||||||
}
|
}
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||||
g_object_unref (base_video_decoder);
|
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
@ -342,6 +345,11 @@ gst_base_video_decoder_finalize (GObject * object)
|
||||||
base_video_decoder->output_adapter = NULL;
|
base_video_decoder->output_adapter = NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (base_video_decoder->pool) {
|
||||||
|
g_object_unref (base_video_decoder->pool);
|
||||||
|
base_video_decoder->pool = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
G_OBJECT_CLASS (parent_class)->finalize (object);
|
G_OBJECT_CLASS (parent_class)->finalize (object);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -403,7 +411,7 @@ gst_base_video_decoder_sink_event (GstPad * pad, GstEvent * event)
|
||||||
GstCaps *caps;
|
GstCaps *caps;
|
||||||
|
|
||||||
gst_event_parse_caps (event, &caps);
|
gst_event_parse_caps (event, &caps);
|
||||||
ret = gst_base_video_decoder_sink_setcaps (pad, caps);
|
ret = gst_base_video_decoder_setcaps (base_video_decoder, caps);
|
||||||
gst_event_unref (event);
|
gst_event_unref (event);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -1043,6 +1051,7 @@ gst_base_video_decoder_chain_forward (GstBaseVideoDecoder * base_video_decoder,
|
||||||
}
|
}
|
||||||
|
|
||||||
do {
|
do {
|
||||||
|
GST_LOG_OBJECT (base_video_decoder, "Calling ::parse_data()");
|
||||||
ret = klass->parse_data (base_video_decoder, FALSE);
|
ret = klass->parse_data (base_video_decoder, FALSE);
|
||||||
} while (ret == GST_FLOW_OK);
|
} while (ret == GST_FLOW_OK);
|
||||||
|
|
||||||
|
@ -1232,17 +1241,15 @@ gst_base_video_decoder_chain (GstPad * pad, GstBuffer * buf)
|
||||||
GST_FORMAT_UNDEFINED) {
|
GST_FORMAT_UNDEFINED) {
|
||||||
GstEvent *event;
|
GstEvent *event;
|
||||||
GstFlowReturn ret;
|
GstFlowReturn ret;
|
||||||
|
GstSegment *segment = &GST_BASE_VIDEO_CODEC (base_video_decoder)->segment;
|
||||||
|
|
||||||
GST_WARNING_OBJECT (base_video_decoder,
|
GST_WARNING_OBJECT (base_video_decoder,
|
||||||
"Received buffer without a new-segment. "
|
"Received buffer without a new-segment. "
|
||||||
"Assuming timestamps start from 0.");
|
"Assuming timestamps start from 0.");
|
||||||
|
|
||||||
gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC
|
gst_segment_init (segment, GST_FORMAT_TIME);
|
||||||
(base_video_decoder)->segment, FALSE, 1.0, 1.0, GST_FORMAT_TIME, 0,
|
|
||||||
GST_CLOCK_TIME_NONE, 0);
|
|
||||||
|
|
||||||
event = gst_event_new_new_segment (FALSE, 1.0, GST_FORMAT_TIME, 0,
|
event = gst_event_new_segment (segment);
|
||||||
GST_CLOCK_TIME_NONE, 0);
|
|
||||||
|
|
||||||
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
ret = gst_base_video_decoder_push_src_event (base_video_decoder, event);
|
||||||
if (!ret) {
|
if (!ret) {
|
||||||
|
@ -1486,16 +1493,16 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
||||||
tff ^= 1;
|
tff ^= 1;
|
||||||
}
|
}
|
||||||
if (tff) {
|
if (tff) {
|
||||||
GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_TFF);
|
GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
|
||||||
} else {
|
} else {
|
||||||
GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_TFF);
|
GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_FLAG_TFF);
|
||||||
}
|
}
|
||||||
GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_RFF);
|
GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_FLAG_RFF);
|
||||||
GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
|
GST_BUFFER_FLAG_UNSET (src_buffer, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
|
||||||
if (frame->n_fields == 3) {
|
if (frame->n_fields == 3) {
|
||||||
GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_RFF);
|
GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_FLAG_RFF);
|
||||||
} else if (frame->n_fields == 1) {
|
} else if (frame->n_fields == 1) {
|
||||||
GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_ONEFIELD);
|
GST_BUFFER_FLAG_SET (src_buffer, GST_VIDEO_BUFFER_FLAG_ONEFIELD);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (GST_BASE_VIDEO_CODEC (base_video_decoder)->discont) {
|
if (GST_BASE_VIDEO_CODEC (base_video_decoder)->discont) {
|
||||||
|
@ -1519,17 +1526,14 @@ gst_base_video_decoder_finish_frame (GstBaseVideoDecoder * base_video_decoder,
|
||||||
GST_BASE_VIDEO_CODEC (base_video_decoder)->time = GST_CLOCK_TIME_NONE;
|
GST_BASE_VIDEO_CODEC (base_video_decoder)->time = GST_CLOCK_TIME_NONE;
|
||||||
}
|
}
|
||||||
|
|
||||||
gst_buffer_set_caps (src_buffer,
|
|
||||||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)));
|
|
||||||
|
|
||||||
GST_LOG_OBJECT (base_video_decoder, "pushing frame ts %" GST_TIME_FORMAT
|
GST_LOG_OBJECT (base_video_decoder, "pushing frame ts %" GST_TIME_FORMAT
|
||||||
", duration %" GST_TIME_FORMAT,
|
", duration %" GST_TIME_FORMAT,
|
||||||
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
|
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (src_buffer)),
|
||||||
GST_TIME_ARGS (GST_BUFFER_DURATION (src_buffer)));
|
GST_TIME_ARGS (GST_BUFFER_DURATION (src_buffer)));
|
||||||
|
|
||||||
if (base_video_decoder->sink_clipping) {
|
if (base_video_decoder->sink_clipping) {
|
||||||
gint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
|
guint64 start = GST_BUFFER_TIMESTAMP (src_buffer);
|
||||||
gint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
|
guint64 stop = GST_BUFFER_TIMESTAMP (src_buffer) +
|
||||||
GST_BUFFER_DURATION (src_buffer);
|
GST_BUFFER_DURATION (src_buffer);
|
||||||
GstSegment *segment = &GST_BASE_VIDEO_CODEC (base_video_decoder)->segment;
|
GstSegment *segment = &GST_BASE_VIDEO_CODEC (base_video_decoder)->segment;
|
||||||
|
|
||||||
|
@ -1762,6 +1766,7 @@ gst_base_video_decoder_have_frame_2 (GstBaseVideoDecoder * base_video_decoder)
|
||||||
frame->presentation_timestamp);
|
frame->presentation_timestamp);
|
||||||
|
|
||||||
/* do something with frame */
|
/* do something with frame */
|
||||||
|
GST_LOG_OBJECT (base_video_decoder, "Calling ::handle_frame()");
|
||||||
ret = base_video_decoder_class->handle_frame (base_video_decoder, frame);
|
ret = base_video_decoder_class->handle_frame (base_video_decoder, frame);
|
||||||
if (ret != GST_FLOW_OK) {
|
if (ret != GST_FLOW_OK) {
|
||||||
GST_DEBUG_OBJECT (base_video_decoder, "flow error %s",
|
GST_DEBUG_OBJECT (base_video_decoder, "flow error %s",
|
||||||
|
@ -1885,12 +1890,22 @@ gst_base_video_decoder_get_frame (GstBaseVideoDecoder * base_video_decoder,
|
||||||
*
|
*
|
||||||
* Sets src pad caps according to currently configured #GstVideoState.
|
* Sets src pad caps according to currently configured #GstVideoState.
|
||||||
*
|
*
|
||||||
|
* The #GstVideoInfo and #GstBufferPool will be created and negotiated
|
||||||
|
* according to those values.
|
||||||
|
*
|
||||||
|
* Returns: %TRUE if the format was properly negotiated, else %FALSE.
|
||||||
*/
|
*/
|
||||||
gboolean
|
gboolean
|
||||||
gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
||||||
{
|
{
|
||||||
GstCaps *caps;
|
GstCaps *caps;
|
||||||
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
|
GstBaseVideoCodec *codec = GST_BASE_VIDEO_CODEC (base_video_decoder);
|
||||||
|
GstVideoState *state = &codec->state;
|
||||||
|
GstVideoInfo *info = &codec->info;
|
||||||
|
GstQuery *query;
|
||||||
|
GstBufferPool *pool = NULL;
|
||||||
|
GstStructure *config;
|
||||||
|
guint size, min, max, prefix, alignment;
|
||||||
gboolean ret;
|
gboolean ret;
|
||||||
|
|
||||||
/* minimum sense */
|
/* minimum sense */
|
||||||
|
@ -1900,6 +1915,8 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||||
|
|
||||||
|
gst_video_info_set_format (info, state->format, state->width, state->height);
|
||||||
|
|
||||||
/* sanitize */
|
/* sanitize */
|
||||||
if (state->fps_n == 0 || state->fps_d == 0) {
|
if (state->fps_n == 0 || state->fps_d == 0) {
|
||||||
state->fps_n = 0;
|
state->fps_n = 0;
|
||||||
|
@ -1910,11 +1927,22 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
||||||
state->par_d = 1;
|
state->par_d = 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
caps = gst_video_format_new_caps (state->format,
|
info->par_n = state->par_n;
|
||||||
state->width, state->height,
|
info->par_d = state->par_d;
|
||||||
state->fps_n, state->fps_d, state->par_n, state->par_d);
|
info->fps_n = state->fps_n;
|
||||||
gst_caps_set_simple (caps, "interlaced",
|
info->fps_d = state->fps_d;
|
||||||
G_TYPE_BOOLEAN, state->interlaced, NULL);
|
|
||||||
|
if (state->have_interlaced) {
|
||||||
|
if (state->interlaced)
|
||||||
|
GST_VIDEO_INFO_FLAG_SET (info, GST_VIDEO_FLAG_INTERLACED);
|
||||||
|
if (state->top_field_first)
|
||||||
|
GST_VIDEO_INFO_FLAG_SET (info, GST_VIDEO_FLAG_TFF);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* FIXME : Handle chroma site */
|
||||||
|
/* FIXME : Handle colorimetry */
|
||||||
|
|
||||||
|
caps = gst_video_info_to_caps (info);
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_decoder, "setting caps %" GST_PTR_FORMAT, caps);
|
GST_DEBUG_OBJECT (base_video_decoder, "setting caps %" GST_PTR_FORMAT, caps);
|
||||||
|
|
||||||
|
@ -1923,9 +1951,50 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
||||||
caps);
|
caps);
|
||||||
gst_caps_unref (caps);
|
gst_caps_unref (caps);
|
||||||
|
|
||||||
/* arrange for derived info */
|
/* Negotiate pool */
|
||||||
state->bytes_per_picture =
|
query = gst_query_new_allocation (caps, TRUE);
|
||||||
gst_video_format_get_size (state->format, state->width, state->height);
|
|
||||||
|
if (gst_pad_peer_query (codec->srcpad, query)) {
|
||||||
|
GST_DEBUG_OBJECT (codec, "got downstream ALLOCATION hints");
|
||||||
|
/* we got configuration from our peer, parse them */
|
||||||
|
gst_query_parse_allocation_params (query, &size, &min, &max, &prefix,
|
||||||
|
&alignment, &pool);
|
||||||
|
size = MAX (size, info->size);
|
||||||
|
} else {
|
||||||
|
GST_DEBUG_OBJECT (codec, "didn't get downstream ALLOCATION hints");
|
||||||
|
size = info->size;
|
||||||
|
min = max = 0;
|
||||||
|
prefix = 0;
|
||||||
|
alignment = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pool == NULL) {
|
||||||
|
/* we did not get a pool, make one ourselves then */
|
||||||
|
pool = gst_buffer_pool_new ();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (base_video_decoder->pool)
|
||||||
|
gst_object_unref (base_video_decoder->pool);
|
||||||
|
base_video_decoder->pool = pool;
|
||||||
|
|
||||||
|
config = gst_buffer_pool_get_config (pool);
|
||||||
|
gst_buffer_pool_config_set (config, caps, size, min, max, prefix, alignment);
|
||||||
|
state->bytes_per_picture = size;
|
||||||
|
|
||||||
|
/* just set the option, if the pool can support it we will transparently use
|
||||||
|
* it through the video info API. We could also see if the pool support this
|
||||||
|
* option and only activate it then. */
|
||||||
|
gst_buffer_pool_config_add_option (config, GST_BUFFER_POOL_OPTION_VIDEO_META);
|
||||||
|
|
||||||
|
/* check if downstream supports cropping */
|
||||||
|
base_video_decoder->use_cropping =
|
||||||
|
gst_query_has_allocation_meta (query, GST_META_API_VIDEO_CROP);
|
||||||
|
|
||||||
|
gst_buffer_pool_set_config (pool, config);
|
||||||
|
/* and activate */
|
||||||
|
gst_buffer_pool_set_active (pool, TRUE);
|
||||||
|
|
||||||
|
gst_query_unref (query);
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||||
|
|
||||||
|
@ -1936,9 +2005,8 @@ gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder * base_video_decoder)
|
||||||
* gst_base_video_decoder_alloc_src_buffer:
|
* gst_base_video_decoder_alloc_src_buffer:
|
||||||
* @base_video_decoder: a #GstBaseVideoDecoder
|
* @base_video_decoder: a #GstBaseVideoDecoder
|
||||||
*
|
*
|
||||||
* Helper function that uses gst_pad_alloc_buffer_and_set_caps
|
* Helper function that returns a buffer from the decoders' configured
|
||||||
* to allocate a buffer to hold a video frame for @base_video_decoder's
|
* #GstBufferPool.
|
||||||
* current #GstVideoState.
|
|
||||||
*
|
*
|
||||||
* Returns: allocated buffer
|
* Returns: allocated buffer
|
||||||
*/
|
*/
|
||||||
|
@ -1946,29 +2014,14 @@ GstBuffer *
|
||||||
gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder *
|
gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder *
|
||||||
base_video_decoder)
|
base_video_decoder)
|
||||||
{
|
{
|
||||||
GstBuffer *buffer;
|
GstBuffer *buffer = NULL;
|
||||||
GstFlowReturn flow_ret;
|
|
||||||
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
|
|
||||||
int num_bytes = state->bytes_per_picture;
|
|
||||||
|
|
||||||
GST_DEBUG ("alloc src buffer caps=%" GST_PTR_FORMAT,
|
|
||||||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)));
|
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||||
|
|
||||||
flow_ret =
|
gst_buffer_pool_acquire_buffer (base_video_decoder->pool, &buffer, NULL);
|
||||||
gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
|
|
||||||
(base_video_decoder), GST_BUFFER_OFFSET_NONE, num_bytes,
|
|
||||||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)),
|
|
||||||
&buffer);
|
|
||||||
|
|
||||||
if (flow_ret != GST_FLOW_OK) {
|
|
||||||
buffer = gst_buffer_new_and_alloc (num_bytes);
|
|
||||||
gst_buffer_set_caps (buffer,
|
|
||||||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)));
|
|
||||||
}
|
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_decoder);
|
||||||
|
|
||||||
return buffer;
|
return buffer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1989,21 +2042,14 @@ gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder *
|
||||||
base_video_decoder, GstVideoFrameState * frame)
|
base_video_decoder, GstVideoFrameState * frame)
|
||||||
{
|
{
|
||||||
GstFlowReturn flow_ret;
|
GstFlowReturn flow_ret;
|
||||||
GstVideoState *state = &GST_BASE_VIDEO_CODEC (base_video_decoder)->state;
|
|
||||||
int num_bytes = state->bytes_per_picture;
|
|
||||||
|
|
||||||
g_return_val_if_fail (state->bytes_per_picture != 0, GST_FLOW_ERROR);
|
GST_LOG_OBJECT (base_video_decoder, "alloc buffer");
|
||||||
g_return_val_if_fail (GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD
|
|
||||||
(base_video_decoder)) != NULL, GST_FLOW_ERROR);
|
|
||||||
|
|
||||||
GST_LOG_OBJECT (base_video_decoder, "alloc buffer size %d", num_bytes);
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_decoder);
|
||||||
|
|
||||||
flow_ret =
|
flow_ret =
|
||||||
gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
|
gst_buffer_pool_acquire_buffer (base_video_decoder->pool,
|
||||||
(base_video_decoder), GST_BUFFER_OFFSET_NONE, num_bytes,
|
&frame->src_buffer, NULL);
|
||||||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_decoder)),
|
|
||||||
&frame->src_buffer);
|
|
||||||
|
|
||||||
if (flow_ret != GST_FLOW_OK) {
|
if (flow_ret != GST_FLOW_OK) {
|
||||||
GST_WARNING_OBJECT (base_video_decoder, "failed to get buffer %s",
|
GST_WARNING_OBJECT (base_video_decoder, "failed to get buffer %s",
|
||||||
|
|
|
@ -95,7 +95,7 @@ GstFlowReturn _gst_base_video_decoder_error (GstBaseVideoDecoder *dec, gint weig
|
||||||
* enclosed in parentheses)
|
* enclosed in parentheses)
|
||||||
* @ret: variable to receive return value
|
* @ret: variable to receive return value
|
||||||
*
|
*
|
||||||
* Utility function that audio decoder elements can use in case they encountered
|
* Utility function that video decoder elements can use in case they encountered
|
||||||
* a data processing error that may be fatal for the current "data unit" but
|
* a data processing error that may be fatal for the current "data unit" but
|
||||||
* need not prevent subsequent decoding. Such errors are counted and if there
|
* need not prevent subsequent decoding. Such errors are counted and if there
|
||||||
* are too many, as configured in the context's max_errors, the pipeline will
|
* are too many, as configured in the context's max_errors, the pipeline will
|
||||||
|
@ -104,7 +104,7 @@ GstFlowReturn _gst_base_video_decoder_error (GstBaseVideoDecoder *dec, gint weig
|
||||||
* is logged. In either case, @ret is set to the proper value to
|
* is logged. In either case, @ret is set to the proper value to
|
||||||
* return to upstream/caller (indicating either GST_FLOW_ERROR or GST_FLOW_OK).
|
* return to upstream/caller (indicating either GST_FLOW_ERROR or GST_FLOW_OK).
|
||||||
*/
|
*/
|
||||||
#define GST_BASE_AUDIO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \
|
#define GST_BASE_VIDEO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \
|
||||||
G_STMT_START { \
|
G_STMT_START { \
|
||||||
gchar *__txt = _gst_element_error_printf text; \
|
gchar *__txt = _gst_element_error_printf text; \
|
||||||
gchar *__dbg = _gst_element_error_printf debug; \
|
gchar *__dbg = _gst_element_error_printf debug; \
|
||||||
|
@ -182,12 +182,18 @@ struct _GstBaseVideoDecoder
|
||||||
int reorder_depth;
|
int reorder_depth;
|
||||||
int distance_from_sync;
|
int distance_from_sync;
|
||||||
|
|
||||||
|
/* Raw video bufferpool */
|
||||||
|
GstBufferPool *pool;
|
||||||
|
/* Indicates whether downstream can handle
|
||||||
|
* GST_META_API_VIDEO_CROP */
|
||||||
|
gboolean use_cropping;
|
||||||
|
|
||||||
/* FIXME before moving to base */
|
/* FIXME before moving to base */
|
||||||
void *padding[GST_PADDING_LARGE];
|
void *padding[GST_PADDING_LARGE];
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GstBaseAudioDecoderClass:
|
* GstBaseVideoDecoderClass:
|
||||||
* @start: Optional.
|
* @start: Optional.
|
||||||
* Called when the element starts processing.
|
* Called when the element starts processing.
|
||||||
* Allows opening external resources.
|
* Allows opening external resources.
|
||||||
|
|
|
@ -115,9 +115,8 @@ GST_DEBUG_CATEGORY (basevideoencoder_debug);
|
||||||
|
|
||||||
static void gst_base_video_encoder_finalize (GObject * object);
|
static void gst_base_video_encoder_finalize (GObject * object);
|
||||||
|
|
||||||
static gboolean gst_base_video_encoder_sink_setcaps (GstPad * pad,
|
static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad,
|
||||||
GstCaps * caps);
|
GstCaps * filter);
|
||||||
static GstCaps *gst_base_video_encoder_sink_getcaps (GstPad * pad);
|
|
||||||
static gboolean gst_base_video_encoder_src_event (GstPad * pad,
|
static gboolean gst_base_video_encoder_src_event (GstPad * pad,
|
||||||
GstEvent * event);
|
GstEvent * event);
|
||||||
static gboolean gst_base_video_encoder_sink_event (GstPad * pad,
|
static gboolean gst_base_video_encoder_sink_event (GstPad * pad,
|
||||||
|
@ -131,30 +130,10 @@ static const GstQueryType *gst_base_video_encoder_get_query_types (GstPad *
|
||||||
static gboolean gst_base_video_encoder_src_query (GstPad * pad,
|
static gboolean gst_base_video_encoder_src_query (GstPad * pad,
|
||||||
GstQuery * query);
|
GstQuery * query);
|
||||||
|
|
||||||
|
#define gst_base_video_encoder_parent_class parent_class
|
||||||
static void
|
G_DEFINE_TYPE_WITH_CODE (GstBaseVideoEncoder, gst_base_video_encoder,
|
||||||
_do_init (GType object_type)
|
GST_TYPE_BASE_VIDEO_CODEC, G_IMPLEMENT_INTERFACE (GST_TYPE_PRESET, NULL);
|
||||||
{
|
);
|
||||||
const GInterfaceInfo preset_interface_info = {
|
|
||||||
NULL, /* interface_init */
|
|
||||||
NULL, /* interface_finalize */
|
|
||||||
NULL /* interface_data */
|
|
||||||
};
|
|
||||||
|
|
||||||
g_type_add_interface_static (object_type, GST_TYPE_PRESET,
|
|
||||||
&preset_interface_info);
|
|
||||||
}
|
|
||||||
|
|
||||||
GST_BOILERPLATE_FULL (GstBaseVideoEncoder, gst_base_video_encoder,
|
|
||||||
GstBaseVideoCodec, GST_TYPE_BASE_VIDEO_CODEC, _do_init);
|
|
||||||
|
|
||||||
static void
|
|
||||||
gst_base_video_encoder_base_init (gpointer g_class)
|
|
||||||
{
|
|
||||||
GST_DEBUG_CATEGORY_INIT (basevideoencoder_debug, "basevideoencoder", 0,
|
|
||||||
"Base Video Encoder");
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
static void
|
static void
|
||||||
gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
|
gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
|
||||||
|
@ -162,6 +141,9 @@ gst_base_video_encoder_class_init (GstBaseVideoEncoderClass * klass)
|
||||||
GObjectClass *gobject_class;
|
GObjectClass *gobject_class;
|
||||||
GstElementClass *gstelement_class;
|
GstElementClass *gstelement_class;
|
||||||
|
|
||||||
|
GST_DEBUG_CATEGORY_INIT (basevideoencoder_debug, "basevideoencoder", 0,
|
||||||
|
"Base Video Encoder");
|
||||||
|
|
||||||
gobject_class = G_OBJECT_CLASS (klass);
|
gobject_class = G_OBJECT_CLASS (klass);
|
||||||
gstelement_class = GST_ELEMENT_CLASS (klass);
|
gstelement_class = GST_ELEMENT_CLASS (klass);
|
||||||
|
|
||||||
|
@ -198,8 +180,7 @@ gst_base_video_encoder_reset (GstBaseVideoEncoder * base_video_encoder)
|
||||||
}
|
}
|
||||||
|
|
||||||
static void
|
static void
|
||||||
gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder,
|
gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder)
|
||||||
GstBaseVideoEncoderClass * klass)
|
|
||||||
{
|
{
|
||||||
GstPad *pad;
|
GstPad *pad;
|
||||||
|
|
||||||
|
@ -211,8 +192,6 @@ gst_base_video_encoder_init (GstBaseVideoEncoder * base_video_encoder,
|
||||||
GST_DEBUG_FUNCPTR (gst_base_video_encoder_chain));
|
GST_DEBUG_FUNCPTR (gst_base_video_encoder_chain));
|
||||||
gst_pad_set_event_function (pad,
|
gst_pad_set_event_function (pad,
|
||||||
GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_event));
|
GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_event));
|
||||||
gst_pad_set_setcaps_function (pad,
|
|
||||||
GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_setcaps));
|
|
||||||
gst_pad_set_getcaps_function (pad,
|
gst_pad_set_getcaps_function (pad,
|
||||||
GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_getcaps));
|
GST_DEBUG_FUNCPTR (gst_base_video_encoder_sink_getcaps));
|
||||||
|
|
||||||
|
@ -269,70 +248,71 @@ gst_base_video_encoder_drain (GstBaseVideoEncoder * enc)
|
||||||
}
|
}
|
||||||
|
|
||||||
static gboolean
|
static gboolean
|
||||||
gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
gst_base_video_encoder_sink_setcaps (GstBaseVideoEncoder * base_video_encoder,
|
||||||
|
GstCaps * caps)
|
||||||
{
|
{
|
||||||
GstBaseVideoEncoder *base_video_encoder;
|
|
||||||
GstBaseVideoEncoderClass *base_video_encoder_class;
|
GstBaseVideoEncoderClass *base_video_encoder_class;
|
||||||
GstStructure *structure;
|
GstBaseVideoCodec *codec = GST_BASE_VIDEO_CODEC (base_video_encoder);
|
||||||
|
GstVideoInfo *info, tmp_info;
|
||||||
GstVideoState *state, tmp_state;
|
GstVideoState *state, tmp_state;
|
||||||
gboolean ret;
|
gboolean ret;
|
||||||
gboolean changed = FALSE;
|
gboolean changed = TRUE;
|
||||||
|
|
||||||
|
GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps);
|
||||||
|
|
||||||
base_video_encoder = GST_BASE_VIDEO_ENCODER (gst_pad_get_parent (pad));
|
|
||||||
base_video_encoder_class =
|
base_video_encoder_class =
|
||||||
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
|
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
|
||||||
|
|
||||||
/* subclass should do something here ... */
|
/* subclass should do something here ... */
|
||||||
g_return_val_if_fail (base_video_encoder_class->set_format != NULL, FALSE);
|
g_return_val_if_fail (base_video_encoder_class->set_format != NULL, FALSE);
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "setcaps %" GST_PTR_FORMAT, caps);
|
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||||
|
|
||||||
state = &GST_BASE_VIDEO_CODEC (base_video_encoder)->state;
|
/* Get GstVideoInfo from upstream caps */
|
||||||
|
info = &codec->info;
|
||||||
|
if (!gst_video_info_from_caps (&tmp_info, caps))
|
||||||
|
goto exit;
|
||||||
|
|
||||||
|
state = &codec->state;
|
||||||
memset (&tmp_state, 0, sizeof (tmp_state));
|
memset (&tmp_state, 0, sizeof (tmp_state));
|
||||||
|
|
||||||
tmp_state.caps = gst_caps_ref (caps);
|
tmp_state.caps = gst_caps_ref (caps);
|
||||||
structure = gst_caps_get_structure (caps, 0);
|
|
||||||
|
|
||||||
ret =
|
/* Check if input caps changed */
|
||||||
gst_video_format_parse_caps (caps, &tmp_state.format, &tmp_state.width,
|
if (info->finfo) {
|
||||||
&tmp_state.height);
|
/* Check if anything changed */
|
||||||
if (!ret)
|
changed = GST_VIDEO_INFO_FORMAT (&tmp_info) != GST_VIDEO_INFO_FORMAT (info);
|
||||||
goto exit;
|
changed |= GST_VIDEO_INFO_FLAGS (&tmp_info) != GST_VIDEO_INFO_FLAGS (info);
|
||||||
|
changed |= GST_VIDEO_INFO_WIDTH (&tmp_info) != GST_VIDEO_INFO_WIDTH (info);
|
||||||
changed = (tmp_state.format != state->format
|
changed |=
|
||||||
|| tmp_state.width != state->width || tmp_state.height != state->height);
|
GST_VIDEO_INFO_HEIGHT (&tmp_info) != GST_VIDEO_INFO_HEIGHT (info);
|
||||||
|
changed |= GST_VIDEO_INFO_SIZE (&tmp_info) != GST_VIDEO_INFO_SIZE (info);
|
||||||
if (!gst_video_parse_caps_framerate (caps, &tmp_state.fps_n,
|
changed |= GST_VIDEO_INFO_VIEWS (&tmp_info) != GST_VIDEO_INFO_VIEWS (info);
|
||||||
&tmp_state.fps_d)) {
|
changed |= GST_VIDEO_INFO_FPS_N (&tmp_info) != GST_VIDEO_INFO_FPS_N (info);
|
||||||
tmp_state.fps_n = 0;
|
changed |= GST_VIDEO_INFO_FPS_D (&tmp_info) != GST_VIDEO_INFO_FPS_D (info);
|
||||||
tmp_state.fps_d = 1;
|
changed |= GST_VIDEO_INFO_PAR_N (&tmp_info) != GST_VIDEO_INFO_PAR_N (info);
|
||||||
|
changed |= GST_VIDEO_INFO_PAR_D (&tmp_info) != GST_VIDEO_INFO_PAR_D (info);
|
||||||
}
|
}
|
||||||
changed = changed || (tmp_state.fps_n != state->fps_n
|
|
||||||
|| tmp_state.fps_d != state->fps_d);
|
|
||||||
|
|
||||||
if (!gst_video_parse_caps_pixel_aspect_ratio (caps, &tmp_state.par_n,
|
/* Copy over info from input GstVideoInfo into output GstVideoFrameState */
|
||||||
&tmp_state.par_d)) {
|
tmp_state.format = GST_VIDEO_INFO_FORMAT (&tmp_info);
|
||||||
tmp_state.par_n = 1;
|
tmp_state.bytes_per_picture = tmp_info.size;
|
||||||
tmp_state.par_d = 1;
|
tmp_state.width = tmp_info.width;
|
||||||
}
|
tmp_state.height = tmp_info.height;
|
||||||
changed = changed || (tmp_state.par_n != state->par_n
|
tmp_state.fps_n = tmp_info.fps_n;
|
||||||
|| tmp_state.par_d != state->par_d);
|
tmp_state.fps_d = tmp_info.fps_d;
|
||||||
|
tmp_state.par_n = tmp_info.par_n;
|
||||||
tmp_state.have_interlaced =
|
tmp_state.par_d = tmp_info.par_d;
|
||||||
gst_structure_get_boolean (structure, "interlaced",
|
tmp_state.clean_width = tmp_info.width;
|
||||||
&tmp_state.interlaced);
|
tmp_state.clean_height = tmp_info.height;
|
||||||
changed = changed || (tmp_state.have_interlaced != state->have_interlaced
|
|
||||||
|| tmp_state.interlaced != state->interlaced);
|
|
||||||
|
|
||||||
tmp_state.bytes_per_picture =
|
|
||||||
gst_video_format_get_size (tmp_state.format, tmp_state.width,
|
|
||||||
tmp_state.height);
|
|
||||||
tmp_state.clean_width = tmp_state.width;
|
|
||||||
tmp_state.clean_height = tmp_state.height;
|
|
||||||
tmp_state.clean_offset_left = 0;
|
tmp_state.clean_offset_left = 0;
|
||||||
tmp_state.clean_offset_top = 0;
|
tmp_state.clean_offset_top = 0;
|
||||||
|
/* FIXME (Edward): We need flags in GstVideoInfo to know whether
|
||||||
|
* interlaced field was present in input caps */
|
||||||
|
tmp_state.have_interlaced = tmp_state.interlaced =
|
||||||
|
GST_VIDEO_INFO_FLAG_IS_SET (&tmp_info, GST_VIDEO_FLAG_INTERLACED);
|
||||||
|
tmp_state.top_field_first =
|
||||||
|
GST_VIDEO_INFO_FLAG_IS_SET (&tmp_info, GST_VIDEO_FLAG_TFF);
|
||||||
|
|
||||||
if (changed) {
|
if (changed) {
|
||||||
/* arrange draining pending frames */
|
/* arrange draining pending frames */
|
||||||
|
@ -341,10 +321,11 @@ gst_base_video_encoder_sink_setcaps (GstPad * pad, GstCaps * caps)
|
||||||
/* and subclass should be ready to configure format at any time around */
|
/* and subclass should be ready to configure format at any time around */
|
||||||
if (base_video_encoder_class->set_format)
|
if (base_video_encoder_class->set_format)
|
||||||
ret =
|
ret =
|
||||||
base_video_encoder_class->set_format (base_video_encoder, &tmp_state);
|
base_video_encoder_class->set_format (base_video_encoder, &tmp_info);
|
||||||
if (ret) {
|
if (ret) {
|
||||||
gst_caps_replace (&state->caps, NULL);
|
gst_caps_replace (&state->caps, NULL);
|
||||||
*state = tmp_state;
|
*state = tmp_state;
|
||||||
|
*info = tmp_info;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
/* no need to stir things up */
|
/* no need to stir things up */
|
||||||
|
@ -362,13 +343,11 @@ exit:
|
||||||
caps);
|
caps);
|
||||||
}
|
}
|
||||||
|
|
||||||
g_object_unref (base_video_encoder);
|
|
||||||
|
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
static GstCaps *
|
static GstCaps *
|
||||||
gst_base_video_encoder_sink_getcaps (GstPad * pad)
|
gst_base_video_encoder_sink_getcaps (GstPad * pad, GstCaps * filter)
|
||||||
{
|
{
|
||||||
GstBaseVideoEncoder *base_video_encoder;
|
GstBaseVideoEncoder *base_video_encoder;
|
||||||
const GstCaps *templ_caps;
|
const GstCaps *templ_caps;
|
||||||
|
@ -409,7 +388,7 @@ gst_base_video_encoder_sink_getcaps (GstPad * pad)
|
||||||
const GValue *val;
|
const GValue *val;
|
||||||
GstStructure *s;
|
GstStructure *s;
|
||||||
|
|
||||||
s = gst_structure_id_empty_new (q_name);
|
s = gst_structure_new_id_empty (q_name);
|
||||||
if ((val = gst_structure_get_value (allowed_s, "width")))
|
if ((val = gst_structure_get_value (allowed_s, "width")))
|
||||||
gst_structure_set_value (s, "width", val);
|
gst_structure_set_value (s, "width", val);
|
||||||
if ((val = gst_structure_get_value (allowed_s, "height")))
|
if ((val = gst_structure_get_value (allowed_s, "height")))
|
||||||
|
@ -423,9 +402,20 @@ gst_base_video_encoder_sink_getcaps (GstPad * pad)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
GST_LOG_OBJECT (base_video_encoder, "filtered caps (first) %" GST_PTR_FORMAT,
|
||||||
|
filter_caps);
|
||||||
|
|
||||||
fcaps = gst_caps_intersect (filter_caps, templ_caps);
|
fcaps = gst_caps_intersect (filter_caps, templ_caps);
|
||||||
gst_caps_unref (filter_caps);
|
gst_caps_unref (filter_caps);
|
||||||
|
|
||||||
|
if (filter) {
|
||||||
|
GST_LOG_OBJECT (base_video_encoder, "intersecting with %" GST_PTR_FORMAT,
|
||||||
|
filter);
|
||||||
|
filter_caps = gst_caps_intersect (fcaps, filter);
|
||||||
|
gst_caps_unref (fcaps);
|
||||||
|
fcaps = filter_caps;
|
||||||
|
}
|
||||||
|
|
||||||
done:
|
done:
|
||||||
|
|
||||||
gst_caps_replace (&allowed, NULL);
|
gst_caps_replace (&allowed, NULL);
|
||||||
|
@ -455,6 +445,15 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
||||||
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
|
GST_BASE_VIDEO_ENCODER_GET_CLASS (base_video_encoder);
|
||||||
|
|
||||||
switch (GST_EVENT_TYPE (event)) {
|
switch (GST_EVENT_TYPE (event)) {
|
||||||
|
case GST_EVENT_CAPS:
|
||||||
|
{
|
||||||
|
GstCaps *caps;
|
||||||
|
|
||||||
|
gst_event_parse_caps (event, &caps);
|
||||||
|
ret = gst_base_video_encoder_sink_setcaps (base_video_encoder, caps);
|
||||||
|
gst_event_unref (event);
|
||||||
|
}
|
||||||
|
break;
|
||||||
case GST_EVENT_EOS:
|
case GST_EVENT_EOS:
|
||||||
{
|
{
|
||||||
GstFlowReturn flow_ret;
|
GstFlowReturn flow_ret;
|
||||||
|
@ -472,27 +471,20 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case GST_EVENT_NEWSEGMENT:
|
case GST_EVENT_SEGMENT:
|
||||||
{
|
{
|
||||||
gboolean update;
|
const GstSegment *segment;
|
||||||
double rate;
|
|
||||||
double applied_rate;
|
|
||||||
GstFormat format;
|
|
||||||
gint64 start;
|
|
||||||
gint64 stop;
|
|
||||||
gint64 position;
|
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||||
gst_event_parse_new_segment_full (event, &update, &rate, &applied_rate,
|
gst_event_parse_segment (event, &segment);
|
||||||
&format, &start, &stop, &position);
|
|
||||||
|
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "newseg rate %g, applied rate %g, "
|
GST_DEBUG_OBJECT (base_video_encoder, "newseg rate %g, applied rate %g, "
|
||||||
"format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
|
"format %d, start = %" GST_TIME_FORMAT ", stop = %" GST_TIME_FORMAT
|
||||||
", pos = %" GST_TIME_FORMAT, rate, applied_rate, format,
|
", pos = %" GST_TIME_FORMAT, segment->rate, segment->applied_rate,
|
||||||
GST_TIME_ARGS (start), GST_TIME_ARGS (stop),
|
segment->format, GST_TIME_ARGS (segment->start),
|
||||||
GST_TIME_ARGS (position));
|
GST_TIME_ARGS (segment->stop), GST_TIME_ARGS (segment->position));
|
||||||
|
|
||||||
if (format != GST_FORMAT_TIME) {
|
if (segment->format != GST_FORMAT_TIME) {
|
||||||
GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment");
|
GST_DEBUG_OBJECT (base_video_encoder, "received non TIME newsegment");
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||||
break;
|
break;
|
||||||
|
@ -500,9 +492,8 @@ gst_base_video_encoder_sink_eventfunc (GstBaseVideoEncoder * base_video_encoder,
|
||||||
|
|
||||||
base_video_encoder->a.at_eos = FALSE;
|
base_video_encoder->a.at_eos = FALSE;
|
||||||
|
|
||||||
gst_segment_set_newsegment_full (&GST_BASE_VIDEO_CODEC
|
gst_segment_copy_into (segment, &GST_BASE_VIDEO_CODEC
|
||||||
(base_video_encoder)->segment, update, rate, applied_rate, format,
|
(base_video_encoder)->segment);
|
||||||
start, stop, position);
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_encoder);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -715,14 +706,9 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
||||||
|
|
||||||
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_encoder);
|
||||||
|
|
||||||
if (!GST_PAD_CAPS (pad)) {
|
|
||||||
ret = GST_FLOW_NOT_NEGOTIATED;
|
|
||||||
goto done;
|
|
||||||
}
|
|
||||||
|
|
||||||
GST_LOG_OBJECT (base_video_encoder,
|
GST_LOG_OBJECT (base_video_encoder,
|
||||||
"received buffer of size %d with ts %" GST_TIME_FORMAT
|
"received buffer of size %d with ts %" GST_TIME_FORMAT
|
||||||
", duration %" GST_TIME_FORMAT, GST_BUFFER_SIZE (buf),
|
", duration %" GST_TIME_FORMAT, gst_buffer_get_size (buf),
|
||||||
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
GST_TIME_ARGS (GST_BUFFER_TIMESTAMP (buf)),
|
||||||
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
GST_TIME_ARGS (GST_BUFFER_DURATION (buf)));
|
||||||
|
|
||||||
|
@ -732,10 +718,10 @@ gst_base_video_encoder_chain (GstPad * pad, GstBuffer * buf)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (base_video_encoder->sink_clipping) {
|
if (base_video_encoder->sink_clipping) {
|
||||||
gint64 start = GST_BUFFER_TIMESTAMP (buf);
|
guint64 start = GST_BUFFER_TIMESTAMP (buf);
|
||||||
gint64 stop = start + GST_BUFFER_DURATION (buf);
|
guint64 stop = start + GST_BUFFER_DURATION (buf);
|
||||||
gint64 clip_start;
|
guint64 clip_start;
|
||||||
gint64 clip_stop;
|
guint64 clip_stop;
|
||||||
|
|
||||||
if (!gst_segment_clip (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
|
if (!gst_segment_clip (&GST_BASE_VIDEO_CODEC (base_video_encoder)->segment,
|
||||||
GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) {
|
GST_FORMAT_TIME, start, stop, &clip_start, &clip_stop)) {
|
||||||
|
@ -901,11 +887,11 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
|
||||||
base_video_encoder->force_keyunit_event = NULL;
|
base_video_encoder->force_keyunit_event = NULL;
|
||||||
} else {
|
} else {
|
||||||
ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
|
ev = gst_event_new_custom (GST_EVENT_CUSTOM_DOWNSTREAM,
|
||||||
gst_structure_new ("GstForceKeyUnit", NULL));
|
gst_structure_new_empty ("GstForceKeyUnit"));
|
||||||
}
|
}
|
||||||
GST_OBJECT_UNLOCK (base_video_encoder);
|
GST_OBJECT_UNLOCK (base_video_encoder);
|
||||||
|
|
||||||
gst_structure_set (ev->structure,
|
gst_structure_set (gst_event_writable_structure (ev),
|
||||||
"timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
|
"timestamp", G_TYPE_UINT64, frame->presentation_timestamp,
|
||||||
"stream-time", G_TYPE_UINT64, stream_time,
|
"stream-time", G_TYPE_UINT64, stream_time,
|
||||||
"running-time", G_TYPE_UINT64, running_time, NULL);
|
"running-time", G_TYPE_UINT64, running_time, NULL);
|
||||||
|
@ -946,7 +932,7 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
|
||||||
|
|
||||||
/* update rate estimate */
|
/* update rate estimate */
|
||||||
GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes +=
|
GST_BASE_VIDEO_CODEC (base_video_encoder)->bytes +=
|
||||||
GST_BUFFER_SIZE (frame->src_buffer);
|
gst_buffer_get_size (frame->src_buffer);
|
||||||
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_duration)) {
|
if (GST_CLOCK_TIME_IS_VALID (frame->presentation_duration)) {
|
||||||
GST_BASE_VIDEO_CODEC (base_video_encoder)->time +=
|
GST_BASE_VIDEO_CODEC (base_video_encoder)->time +=
|
||||||
frame->presentation_duration;
|
frame->presentation_duration;
|
||||||
|
@ -961,9 +947,6 @@ gst_base_video_encoder_finish_frame (GstBaseVideoEncoder * base_video_encoder,
|
||||||
GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE;
|
GST_BASE_VIDEO_CODEC (base_video_encoder)->discont = FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
gst_buffer_set_caps (GST_BUFFER (frame->src_buffer),
|
|
||||||
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder)));
|
|
||||||
|
|
||||||
if (base_video_encoder_class->shape_output) {
|
if (base_video_encoder_class->shape_output) {
|
||||||
ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
|
ret = base_video_encoder_class->shape_output (base_video_encoder, frame);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -116,7 +116,7 @@ struct _GstBaseVideoEncoder
|
||||||
* Allows closing external resources.
|
* Allows closing external resources.
|
||||||
* @set_format: Optional.
|
* @set_format: Optional.
|
||||||
* Notifies subclass of incoming data format.
|
* Notifies subclass of incoming data format.
|
||||||
* GstVideoState fields have already been
|
* GstVideoInfo fields have already been
|
||||||
* set according to provided caps.
|
* set according to provided caps.
|
||||||
* @handle_frame: Provides input frame to subclass.
|
* @handle_frame: Provides input frame to subclass.
|
||||||
* @finish: Optional.
|
* @finish: Optional.
|
||||||
|
@ -147,7 +147,7 @@ struct _GstBaseVideoEncoderClass
|
||||||
gboolean (*stop) (GstBaseVideoEncoder *coder);
|
gboolean (*stop) (GstBaseVideoEncoder *coder);
|
||||||
|
|
||||||
gboolean (*set_format) (GstBaseVideoEncoder *coder,
|
gboolean (*set_format) (GstBaseVideoEncoder *coder,
|
||||||
GstVideoState *state);
|
GstVideoInfo *info);
|
||||||
|
|
||||||
GstFlowReturn (*handle_frame) (GstBaseVideoEncoder *coder,
|
GstFlowReturn (*handle_frame) (GstBaseVideoEncoder *coder,
|
||||||
GstVideoFrameState *frame);
|
GstVideoFrameState *frame);
|
||||||
|
|
Loading…
Reference in a new issue