mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer.git
synced 2024-12-18 14:26:43 +00:00
Remove mentions of ffmpegcolorspace now that it's in gst-plugins-base
Original commit message from CVS: * HACKING: * ext/ffmpeg/gstffmpegcolorspace.c: Remove mentions of ffmpegcolorspace now that it's in gst-plugins-base * ext/ffmpeg/Makefile.am: Link to gstreamer libraries * ext/ffmpeg/gstffmpegcodecmap.c: (gst_ffmpeg_caps_to_pixfmt): * ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_init), (gst_ffmpegdec_close), (gst_ffmpegdec_setcaps), (gst_ffmpegdec_negotiate), (gst_ffmpegdec_frame): Convert to fractional framerate
This commit is contained in:
parent
93242ea4c3
commit
602e47bf4a
6 changed files with 64 additions and 521 deletions
15
ChangeLog
15
ChangeLog
|
@ -1,3 +1,18 @@
|
|||
2005-11-23 Jan Schmidt <thaytan@mad.scientist.com>
|
||||
|
||||
* HACKING:
|
||||
* ext/ffmpeg/gstffmpegcolorspace.c:
|
||||
Remove mentions of ffmpegcolorspace now that it's in
|
||||
gst-plugins-base
|
||||
|
||||
* ext/ffmpeg/Makefile.am:
|
||||
Link to gstreamer libraries
|
||||
* ext/ffmpeg/gstffmpegcodecmap.c: (gst_ffmpeg_caps_to_pixfmt):
|
||||
* ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_init),
|
||||
(gst_ffmpegdec_close), (gst_ffmpegdec_setcaps),
|
||||
(gst_ffmpegdec_negotiate), (gst_ffmpegdec_frame):
|
||||
Convert to fractional framerate
|
||||
|
||||
2005-11-22 Andy Wingo <wingo@pobox.com>
|
||||
|
||||
* ext/ffmpeg/gstffmpegdec.c (gst_ffmpegdec_sink_event): Run
|
||||
|
|
17
HACKING
17
HACKING
|
@ -195,19 +195,10 @@ Some notes on how ffmpeg wrapping inside GStreamer currently works:
|
|||
* gstffmpeg{dec,enc,demux,mux}.c are wrappers for specific element types from
|
||||
their ffmpeg counterpart. If you want to wrap a new type of element in
|
||||
wrapper file.
|
||||
|
||||
* gstffmpegcolorspace.c is a wrapper for one specific function in ffmpeg:
|
||||
colorspace conversion. This works different from the previously mentioned
|
||||
ones, and we'll come to that in the next item. If you want to wrap one
|
||||
specific function, then that, too, belongs in a new wrapper file.
|
||||
|
||||
* the important difference between all those is that the colorspace element
|
||||
contains one element, so there is a 1<->1 mapping. This makes for a fairly
|
||||
basic element implementation. gstffmpegcolorspace.c, therefore, doesn't
|
||||
differ much from other colorspace elements. The ffmpeg element types,
|
||||
however, define a whole *list* of elements (in GStreamer, each decoder etc.
|
||||
needs to be its own element). We use a set of tricks for that to keep
|
||||
coding simple: codec mapping and dynamic type creation.
|
||||
The ffmpeg element types, define a whole *list* of elements (in
|
||||
GStreamer, each decoder etc. needs to be its own element).
|
||||
We use a set of tricks for that to keep coding simple: codec
|
||||
mapping and dynamic type creation.
|
||||
|
||||
* ffmpeg uses CODEC_ID_* enumerations for their codecs. GStreamer uses caps,
|
||||
which consists of a mimetype and a defined set of properties. In ffmpeg,
|
||||
|
|
|
@ -7,17 +7,16 @@ libgstffmpeg_la_SOURCES = gstffmpeg.c \
|
|||
# gstffmpegdemux.c \
|
||||
# gstffmpegmux.c \
|
||||
# gstffmpegprotocol.c \
|
||||
# gstffmpegcolorspace.c \
|
||||
# gstffmpegscale.c \
|
||||
# gstffmpegdeinterlace.c
|
||||
|
||||
libgstffmpeg_la_CFLAGS = $(GST_CFLAGS) \
|
||||
libgstffmpeg_la_CFLAGS = $(GST_CFLAGS) $(GST_PLUGINS_BASE_CFLAGS) \
|
||||
-I $(top_srcdir)/gst-libs/ext/ffmpeg/libavformat \
|
||||
-I $(top_srcdir)/gst-libs/ext/ffmpeg/libavutil \
|
||||
-I $(top_srcdir)/gst-libs/ext/ffmpeg/libavcodec
|
||||
libgstffmpeg_la_LIBADD = \
|
||||
$(top_builddir)/gst-libs/ext/ffmpeg/libavformat/libavformat.la
|
||||
|
||||
$(top_builddir)/gst-libs/ext/ffmpeg/libavformat/libavformat.la \
|
||||
$(GST_LIBS)
|
||||
libgstffmpeg_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
|
||||
|
||||
noinst_HEADERS = \
|
||||
|
|
|
@ -86,17 +86,14 @@ gst_ffmpeg_set_palette (GstCaps *caps, AVCodecContext *context)
|
|||
gst_caps_new_simple (mimetype, \
|
||||
"width", G_TYPE_INT, context->width, \
|
||||
"height", G_TYPE_INT, context->height, \
|
||||
"framerate", G_TYPE_DOUBLE, (double) 1. * \
|
||||
context->time_base.den / \
|
||||
context->time_base.num, \
|
||||
__VA_ARGS__, NULL) \
|
||||
"framerate", GST_TYPE_FRACTION, context->time_base.den, \
|
||||
context->time_base.num, __VA_ARGS__, NULL) \
|
||||
: \
|
||||
gst_caps_new_simple (mimetype, \
|
||||
"width", GST_TYPE_INT_RANGE, 16, 4096, \
|
||||
"height", GST_TYPE_INT_RANGE, 16, 4096, \
|
||||
"framerate", GST_TYPE_DOUBLE_RANGE, (double) 1., \
|
||||
G_MAXDOUBLE, \
|
||||
__VA_ARGS__, NULL)
|
||||
"framerate", GST_TYPE_FRACTION_RANGE, 0, 1, \
|
||||
G_MAXINT, 1, __VA_ARGS__, NULL)
|
||||
|
||||
/* same for audio - now with channels/sample rate
|
||||
*/
|
||||
|
@ -1102,7 +1099,7 @@ gst_ffmpeg_caps_to_pixfmt (const GstCaps * caps,
|
|||
AVCodecContext * context, gboolean raw)
|
||||
{
|
||||
GstStructure *structure;
|
||||
gdouble fps;
|
||||
const GValue *fps;
|
||||
|
||||
g_return_if_fail (gst_caps_get_size (caps) == 1);
|
||||
structure = gst_caps_get_structure (caps, 0);
|
||||
|
@ -1111,23 +1108,16 @@ gst_ffmpeg_caps_to_pixfmt (const GstCaps * caps,
|
|||
gst_structure_get_int (structure, "height", &context->height);
|
||||
gst_structure_get_int (structure, "bpp", &context->bits_per_sample);
|
||||
|
||||
if (gst_structure_get_double (structure, "framerate", &fps)) {
|
||||
GValue dfps = { 0 };
|
||||
GValue framerate = { 0 };
|
||||
fps = gst_structure_get_value (structure, "framerate");
|
||||
g_return_if_fail (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps));
|
||||
|
||||
/* convert double to fraction for the framerate */
|
||||
g_value_init (&dfps, G_TYPE_DOUBLE);
|
||||
g_value_init (&framerate, GST_TYPE_FRACTION);
|
||||
g_value_set_double (&dfps, fps);
|
||||
g_value_transform (&dfps, &framerate);
|
||||
/* somehow these seem mixed up.. */
|
||||
context->time_base.den = gst_value_get_fraction_numerator (fps);
|
||||
context->time_base.num = gst_value_get_fraction_denominator (fps);
|
||||
|
||||
/* somehow these seem mixed up.. */
|
||||
context->time_base.den = gst_value_get_fraction_numerator (&framerate);
|
||||
context->time_base.num = gst_value_get_fraction_denominator (&framerate);
|
||||
|
||||
GST_DEBUG ("setting framerate %d/%d = %lf",
|
||||
context->time_base.den, context->time_base.num, fps);
|
||||
}
|
||||
GST_DEBUG ("setting framerate %d/%d = %lf",
|
||||
context->time_base.den, context->time_base.num,
|
||||
1. * context->time_base.den / context->time_base.num);
|
||||
|
||||
if (!raw)
|
||||
return;
|
||||
|
|
|
@ -1,459 +0,0 @@
|
|||
/* GStreamer
|
||||
* Copyright (C) <1999> Erik Walthinsen <omega@cse.ogi.edu>
|
||||
* This file:
|
||||
* Copyright (C) 2003 Ronald Bultje <rbultje@ronald.bitfreak.net>
|
||||
*
|
||||
* This library is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU Library General Public
|
||||
* License as published by the Free Software Foundation; either
|
||||
* version 2 of the License, or (at your option) any later version.
|
||||
*
|
||||
* This library is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
* Library General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU Library General Public
|
||||
* License along with this library; if not, write to the
|
||||
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
|
||||
* Boston, MA 02111-1307, USA.
|
||||
*/
|
||||
|
||||
#ifdef HAVE_CONFIG_H
|
||||
#include "config.h"
|
||||
#endif
|
||||
|
||||
#include <gst/gst.h>
|
||||
#ifdef HAVE_FFMPEG_UNINSTALLED
|
||||
#include <avcodec.h>
|
||||
#else
|
||||
#include <ffmpeg/avcodec.h>
|
||||
#endif
|
||||
|
||||
#include "gstffmpeg.h"
|
||||
#include "gstffmpegcodecmap.h"
|
||||
|
||||
#define GST_TYPE_FFMPEGCSP \
|
||||
(gst_ffmpegcsp_get_type())
|
||||
#define GST_FFMPEGCSP(obj) \
|
||||
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FFMPEGCSP,GstFFMpegCsp))
|
||||
#define GST_FFMPEGCSP_CLASS(klass) \
|
||||
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FFMPEGCSP,GstFFMpegCsp))
|
||||
#define GST_IS_FFMPEGCSP(obj) \
|
||||
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FFMPEGCSP))
|
||||
#define GST_IS_FFMPEGCSP_CLASS(obj) \
|
||||
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGCSP))
|
||||
|
||||
typedef struct _GstFFMpegCsp GstFFMpegCsp;
|
||||
typedef struct _GstFFMpegCspClass GstFFMpegCspClass;
|
||||
|
||||
struct _GstFFMpegCsp
|
||||
{
|
||||
GstElement element;
|
||||
|
||||
GstPad *sinkpad, *srcpad;
|
||||
|
||||
gint width, height;
|
||||
gfloat fps;
|
||||
enum PixelFormat from_pixfmt, to_pixfmt;
|
||||
AVPicture from_frame, to_frame;
|
||||
AVPaletteControl *palette;
|
||||
GstCaps *sinkcaps;
|
||||
};
|
||||
|
||||
struct _GstFFMpegCspClass
|
||||
{
|
||||
GstElementClass parent_class;
|
||||
};
|
||||
|
||||
/* elementfactory information */
|
||||
static GstElementDetails ffmpegcsp_details = {
|
||||
"FFMPEG Colorspace converter",
|
||||
"Filter/Converter/Video",
|
||||
"Converts video from one colorspace to another",
|
||||
"Ronald Bultje <rbultje@ronald.bitfreak.net>",
|
||||
};
|
||||
|
||||
|
||||
/* Stereo signals and args */
|
||||
enum
|
||||
{
|
||||
/* FILL ME */
|
||||
LAST_SIGNAL
|
||||
};
|
||||
|
||||
enum
|
||||
{
|
||||
ARG_0,
|
||||
};
|
||||
|
||||
static GType gst_ffmpegcsp_get_type (void);
|
||||
|
||||
static void gst_ffmpegcsp_base_init (GstFFMpegCspClass * klass);
|
||||
static void gst_ffmpegcsp_class_init (GstFFMpegCspClass * klass);
|
||||
static void gst_ffmpegcsp_init (GstFFMpegCsp * space);
|
||||
|
||||
static void gst_ffmpegcsp_set_property (GObject * object,
|
||||
guint prop_id, const GValue * value, GParamSpec * pspec);
|
||||
static void gst_ffmpegcsp_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec);
|
||||
|
||||
static GstPadLinkReturn
|
||||
gst_ffmpegcsp_pad_link (GstPad * pad, const GstCaps * caps);
|
||||
|
||||
static void gst_ffmpegcsp_chain (GstPad * pad, GstData * data);
|
||||
static GstStateChangeReturn gst_ffmpegcsp_change_state (GstElement * element,
|
||||
GstStateChange transition);
|
||||
|
||||
static GstPadTemplate *srctempl, *sinktempl;
|
||||
static GstElementClass *parent_class = NULL;
|
||||
|
||||
/*static guint gst_ffmpegcsp_signals[LAST_SIGNAL] = { 0 }; */
|
||||
|
||||
|
||||
static GstCaps *
|
||||
gst_ffmpegcsp_caps_remove_format_info (GstCaps * caps)
|
||||
{
|
||||
int i;
|
||||
GstStructure *structure;
|
||||
GstCaps *rgbcaps;
|
||||
|
||||
for (i = 0; i < gst_caps_get_size (caps); i++) {
|
||||
structure = gst_caps_get_structure (caps, i);
|
||||
|
||||
gst_structure_set_name (structure, "video/x-raw-yuv");
|
||||
gst_structure_remove_field (structure, "format");
|
||||
gst_structure_remove_field (structure, "endianness");
|
||||
gst_structure_remove_field (structure, "depth");
|
||||
gst_structure_remove_field (structure, "bpp");
|
||||
gst_structure_remove_field (structure, "red_mask");
|
||||
gst_structure_remove_field (structure, "green_mask");
|
||||
gst_structure_remove_field (structure, "blue_mask");
|
||||
}
|
||||
|
||||
gst_caps_do_simplify (caps);
|
||||
rgbcaps = gst_caps_copy (caps);
|
||||
|
||||
for (i = 0; i < gst_caps_get_size (rgbcaps); i++) {
|
||||
structure = gst_caps_get_structure (rgbcaps, i);
|
||||
|
||||
gst_structure_set_name (structure, "video/x-raw-rgb");
|
||||
}
|
||||
|
||||
gst_caps_append (caps, rgbcaps);
|
||||
|
||||
return caps;
|
||||
}
|
||||
|
||||
static GstCaps *
|
||||
gst_ffmpegcsp_getcaps (GstPad * pad)
|
||||
{
|
||||
GstFFMpegCsp *space;
|
||||
GstCaps *othercaps;
|
||||
GstCaps *caps;
|
||||
GstPad *otherpad;
|
||||
|
||||
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
|
||||
|
||||
otherpad = (pad == space->srcpad) ? space->sinkpad : space->srcpad;
|
||||
|
||||
othercaps = gst_pad_get_allowed_caps (otherpad);
|
||||
|
||||
othercaps = gst_ffmpegcsp_caps_remove_format_info (othercaps);
|
||||
|
||||
caps = gst_caps_intersect (othercaps, gst_pad_get_pad_template_caps (pad));
|
||||
gst_caps_free (othercaps);
|
||||
|
||||
return caps;
|
||||
}
|
||||
|
||||
static GstPadLinkReturn
|
||||
gst_ffmpegcsp_pad_link (GstPad * pad, const GstCaps * caps)
|
||||
{
|
||||
GstStructure *structure;
|
||||
AVCodecContext *ctx;
|
||||
GstFFMpegCsp *space;
|
||||
const GstCaps *othercaps;
|
||||
GstPad *otherpad;
|
||||
GstPadLinkReturn ret;
|
||||
int height, width;
|
||||
double framerate;
|
||||
const GValue *par = NULL;
|
||||
|
||||
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
|
||||
|
||||
GST_DEBUG_OBJECT (space, "pad_link on %s:%s with caps %" GST_PTR_FORMAT,
|
||||
GST_DEBUG_PAD_NAME (pad), caps);
|
||||
|
||||
otherpad = (pad == space->srcpad) ? space->sinkpad : space->srcpad;
|
||||
|
||||
structure = gst_caps_get_structure (caps, 0);
|
||||
gst_structure_get_int (structure, "width", &width);
|
||||
gst_structure_get_int (structure, "height", &height);
|
||||
gst_structure_get_double (structure, "framerate", &framerate);
|
||||
par = gst_structure_get_value (structure, "pixel-aspect-ratio");
|
||||
|
||||
/* FIXME attempt and/or check for passthru */
|
||||
|
||||
/* loop over all possibilities and select the first one we can convert and
|
||||
* is accepted by the peer */
|
||||
ctx = avcodec_alloc_context ();
|
||||
|
||||
ctx->width = width;
|
||||
ctx->height = height;
|
||||
ctx->pix_fmt = PIX_FMT_NB;
|
||||
gst_ffmpeg_caps_with_codectype (CODEC_TYPE_VIDEO, caps, ctx);
|
||||
if (ctx->pix_fmt == PIX_FMT_NB) {
|
||||
av_free (ctx);
|
||||
|
||||
/* we disable ourself here */
|
||||
if (pad == space->srcpad) {
|
||||
space->to_pixfmt = PIX_FMT_NB;
|
||||
} else {
|
||||
space->from_pixfmt = PIX_FMT_NB;
|
||||
}
|
||||
|
||||
return GST_PAD_LINK_REFUSED;
|
||||
}
|
||||
|
||||
/* set the size on the otherpad */
|
||||
othercaps = gst_pad_get_negotiated_caps (otherpad);
|
||||
if (othercaps) {
|
||||
GstCaps *caps = gst_caps_copy (othercaps);
|
||||
|
||||
gst_caps_set_simple (caps,
|
||||
"width", G_TYPE_INT, width,
|
||||
"height", G_TYPE_INT, height,
|
||||
"framerate", G_TYPE_DOUBLE, framerate, NULL);
|
||||
if (par) {
|
||||
gst_caps_set_simple (caps,
|
||||
"pixel-aspect-ratio", GST_TYPE_FRACTION,
|
||||
gst_value_get_fraction_numerator (par),
|
||||
gst_value_get_fraction_denominator (par), NULL);
|
||||
}
|
||||
ret = gst_pad_try_set_caps (otherpad, caps);
|
||||
gst_caps_free (caps);
|
||||
if (GST_PAD_LINK_FAILED (ret)) {
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
|
||||
if (pad == space->srcpad) {
|
||||
space->to_pixfmt = ctx->pix_fmt;
|
||||
} else {
|
||||
space->from_pixfmt = ctx->pix_fmt;
|
||||
|
||||
/* palette */
|
||||
if (space->palette)
|
||||
av_free (space->palette);
|
||||
space->palette = ctx->palctrl;
|
||||
}
|
||||
av_free (ctx);
|
||||
|
||||
space->width = width;
|
||||
space->height = height;
|
||||
|
||||
return GST_PAD_LINK_OK;
|
||||
}
|
||||
|
||||
static GType
|
||||
gst_ffmpegcsp_get_type (void)
|
||||
{
|
||||
static GType ffmpegcsp_type = 0;
|
||||
|
||||
if (!ffmpegcsp_type) {
|
||||
static const GTypeInfo ffmpegcsp_info = {
|
||||
sizeof (GstFFMpegCspClass),
|
||||
(GBaseInitFunc) gst_ffmpegcsp_base_init,
|
||||
NULL,
|
||||
(GClassInitFunc) gst_ffmpegcsp_class_init,
|
||||
NULL,
|
||||
NULL,
|
||||
sizeof (GstFFMpegCsp),
|
||||
0,
|
||||
(GInstanceInitFunc) gst_ffmpegcsp_init,
|
||||
};
|
||||
|
||||
ffmpegcsp_type = g_type_register_static (GST_TYPE_ELEMENT,
|
||||
"GstFFMpegCsp", &ffmpegcsp_info, 0);
|
||||
}
|
||||
|
||||
return ffmpegcsp_type;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_ffmpegcsp_base_init (GstFFMpegCspClass * klass)
|
||||
{
|
||||
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
|
||||
|
||||
gst_element_class_add_pad_template (element_class, srctempl);
|
||||
gst_element_class_add_pad_template (element_class, sinktempl);
|
||||
gst_element_class_set_details (element_class, &ffmpegcsp_details);
|
||||
}
|
||||
|
||||
static void
|
||||
gst_ffmpegcsp_class_init (GstFFMpegCspClass * klass)
|
||||
{
|
||||
GObjectClass *gobject_class;
|
||||
GstElementClass *gstelement_class;
|
||||
|
||||
gobject_class = (GObjectClass *) klass;
|
||||
gstelement_class = (GstElementClass *) klass;
|
||||
|
||||
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
|
||||
|
||||
gobject_class->set_property = gst_ffmpegcsp_set_property;
|
||||
gobject_class->get_property = gst_ffmpegcsp_get_property;
|
||||
|
||||
gstelement_class->change_state = gst_ffmpegcsp_change_state;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_ffmpegcsp_init (GstFFMpegCsp * space)
|
||||
{
|
||||
space->sinkpad = gst_pad_new_from_template (sinktempl, "sink");
|
||||
gst_pad_set_link_function (space->sinkpad, gst_ffmpegcsp_pad_link);
|
||||
gst_pad_set_getcaps_function (space->sinkpad, gst_ffmpegcsp_getcaps);
|
||||
gst_pad_set_chain_function (space->sinkpad, gst_ffmpegcsp_chain);
|
||||
gst_element_add_pad (GST_ELEMENT (space), space->sinkpad);
|
||||
|
||||
space->srcpad = gst_pad_new_from_template (srctempl, "src");
|
||||
gst_element_add_pad (GST_ELEMENT (space), space->srcpad);
|
||||
gst_pad_set_link_function (space->srcpad, gst_ffmpegcsp_pad_link);
|
||||
gst_pad_set_getcaps_function (space->srcpad, gst_ffmpegcsp_getcaps);
|
||||
|
||||
space->from_pixfmt = space->to_pixfmt = PIX_FMT_NB;
|
||||
space->palette = NULL;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_ffmpegcsp_chain (GstPad * pad, GstData * data)
|
||||
{
|
||||
GstBuffer *inbuf = GST_BUFFER (data);
|
||||
GstFFMpegCsp *space;
|
||||
GstBuffer *outbuf = NULL;
|
||||
|
||||
g_return_if_fail (pad != NULL);
|
||||
g_return_if_fail (GST_IS_PAD (pad));
|
||||
g_return_if_fail (inbuf != NULL);
|
||||
|
||||
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
|
||||
|
||||
g_return_if_fail (space != NULL);
|
||||
g_return_if_fail (GST_IS_FFMPEGCSP (space));
|
||||
|
||||
if (!GST_PAD_IS_USABLE (space->srcpad)) {
|
||||
gst_buffer_unref (inbuf);
|
||||
return;
|
||||
}
|
||||
|
||||
if (space->from_pixfmt == PIX_FMT_NB || space->to_pixfmt == PIX_FMT_NB) {
|
||||
GST_ELEMENT_ERROR (space, CORE, NOT_IMPLEMENTED, (NULL),
|
||||
("attempting to convert colorspaces between unknown formats"));
|
||||
gst_buffer_unref (inbuf);
|
||||
return;
|
||||
}
|
||||
|
||||
if (space->from_pixfmt == space->to_pixfmt) {
|
||||
outbuf = inbuf;
|
||||
} else {
|
||||
#define ROUND_UP_4(x) (((x) + 3) & ~3)
|
||||
guint size = avpicture_get_size (space->to_pixfmt,
|
||||
ROUND_UP_4 (space->width), ROUND_UP_4 (space->height));
|
||||
|
||||
outbuf = gst_pad_alloc_buffer (space->srcpad, GST_BUFFER_OFFSET_NONE, size);
|
||||
|
||||
/* convert */
|
||||
gst_ffmpeg_avpicture_fill (&space->from_frame,
|
||||
GST_BUFFER_DATA (inbuf),
|
||||
space->from_pixfmt, space->width, space->height);
|
||||
if (space->palette)
|
||||
space->from_frame.data[1] = (uint8_t *) space->palette;
|
||||
gst_ffmpeg_avpicture_fill (&space->to_frame,
|
||||
GST_BUFFER_DATA (outbuf),
|
||||
space->to_pixfmt, space->width, space->height);
|
||||
img_convert (&space->to_frame, space->to_pixfmt,
|
||||
&space->from_frame, space->from_pixfmt,
|
||||
space->width, space->height);
|
||||
|
||||
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf);
|
||||
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf);
|
||||
|
||||
gst_buffer_unref (inbuf);
|
||||
}
|
||||
|
||||
gst_pad_push (space->srcpad, GST_DATA (outbuf));
|
||||
}
|
||||
|
||||
static GstStateChangeReturn
|
||||
gst_ffmpegcsp_change_state (GstElement * element, GstStateChange transition)
|
||||
{
|
||||
GstFFMpegCsp *space;
|
||||
|
||||
space = GST_FFMPEGCSP (element);
|
||||
|
||||
switch (transition) {
|
||||
case GST_STATE_CHANGE_PAUSED_TO_READY:
|
||||
if (space->palette)
|
||||
av_free (space->palette);
|
||||
space->palette = NULL;
|
||||
break;
|
||||
}
|
||||
|
||||
if (parent_class->change_state)
|
||||
return parent_class->change_state (element, transition);
|
||||
|
||||
return GST_STATE_CHANGE_SUCCESS;
|
||||
}
|
||||
|
||||
static void
|
||||
gst_ffmpegcsp_set_property (GObject * object,
|
||||
guint prop_id, const GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstFFMpegCsp *space;
|
||||
|
||||
/* it's not null if we got it, but it might not be ours */
|
||||
g_return_if_fail (GST_IS_FFMPEGCSP (object));
|
||||
space = GST_FFMPEGCSP (object);
|
||||
|
||||
switch (prop_id) {
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
static void
|
||||
gst_ffmpegcsp_get_property (GObject * object,
|
||||
guint prop_id, GValue * value, GParamSpec * pspec)
|
||||
{
|
||||
GstFFMpegCsp *space;
|
||||
|
||||
/* it's not null if we got it, but it might not be ours */
|
||||
g_return_if_fail (GST_IS_FFMPEGCSP (object));
|
||||
space = GST_FFMPEGCSP (object);
|
||||
|
||||
switch (prop_id) {
|
||||
default:
|
||||
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
gboolean
|
||||
gst_ffmpegcsp_register (GstPlugin * plugin)
|
||||
{
|
||||
GstCaps *caps;
|
||||
|
||||
/* template caps */
|
||||
caps = gst_ffmpeg_codectype_to_caps (CODEC_TYPE_VIDEO, NULL);
|
||||
|
||||
/* build templates */
|
||||
srctempl = gst_pad_template_new ("src",
|
||||
GST_PAD_SRC, GST_PAD_ALWAYS, gst_caps_copy (caps));
|
||||
|
||||
/* the sink template will do palette handling as well... */
|
||||
sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps);
|
||||
|
||||
return gst_element_register (plugin, "ffcolorspace",
|
||||
GST_RANK_NONE, GST_TYPE_FFMPEGCSP);
|
||||
}
|
|
@ -56,7 +56,9 @@ struct _GstFFMpegDec
|
|||
struct
|
||||
{
|
||||
gint width, height;
|
||||
gdouble fps, old_fps;
|
||||
gint fps_n, fps_d;
|
||||
gint old_fps_n, old_fps_d;
|
||||
|
||||
enum PixelFormat pix_fmt;
|
||||
} video;
|
||||
struct
|
||||
|
@ -289,8 +291,8 @@ gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec)
|
|||
|
||||
ffmpegdec->last_buffer = NULL;
|
||||
|
||||
ffmpegdec->format.video.fps = -1.0;
|
||||
ffmpegdec->format.video.old_fps = -1.0;
|
||||
ffmpegdec->format.video.fps_n = -1;
|
||||
ffmpegdec->format.video.old_fps_n = -1;
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -402,8 +404,8 @@ gst_ffmpegdec_close (GstFFMpegDec * ffmpegdec)
|
|||
ffmpegdec->pctx = NULL;
|
||||
}
|
||||
|
||||
ffmpegdec->format.video.fps = -1.0;
|
||||
ffmpegdec->format.video.old_fps = -1.0;
|
||||
ffmpegdec->format.video.fps_n = -1;
|
||||
ffmpegdec->format.video.old_fps_n = -1;
|
||||
}
|
||||
|
||||
static gboolean
|
||||
|
@ -466,6 +468,7 @@ gst_ffmpegdec_setcaps (GstPad * pad, GstCaps * caps)
|
|||
(GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
|
||||
GstStructure *structure;
|
||||
const GValue *par;
|
||||
const GValue *fps;
|
||||
|
||||
GST_DEBUG ("setcaps called");
|
||||
|
||||
|
@ -498,13 +501,12 @@ gst_ffmpegdec_setcaps (GstPad * pad, GstCaps * caps)
|
|||
gst_value_init_and_copy (ffmpegdec->par, par);
|
||||
}
|
||||
|
||||
if (gst_structure_has_field (structure, "framerate")) {
|
||||
ffmpegdec->format.video.old_fps = ffmpegdec->format.video.fps;
|
||||
gst_structure_get_double (structure, "framerate",
|
||||
&ffmpegdec->format.video.fps);
|
||||
fps = gst_structure_get_value (structure, "framerate");
|
||||
if (fps != NULL && GST_VALUE_HOLDS_FRACTION (fps)) {
|
||||
ffmpegdec->format.video.fps_n = gst_value_get_fraction_numerator (fps);
|
||||
ffmpegdec->format.video.fps_d = gst_value_get_fraction_denominator (fps);
|
||||
} else {
|
||||
ffmpegdec->format.video.old_fps = ffmpegdec->format.video.fps;
|
||||
ffmpegdec->format.video.fps = -1.0;
|
||||
ffmpegdec->format.video.fps_n = -1;
|
||||
}
|
||||
|
||||
/* do *not* draw edges */
|
||||
|
@ -635,16 +637,19 @@ gst_ffmpegdec_negotiate (GstFFMpegDec * ffmpegdec)
|
|||
case CODEC_TYPE_VIDEO:
|
||||
if (ffmpegdec->format.video.width == ffmpegdec->context->width &&
|
||||
ffmpegdec->format.video.height == ffmpegdec->context->height &&
|
||||
ffmpegdec->format.video.fps == ffmpegdec->format.video.old_fps &&
|
||||
ffmpegdec->format.video.fps_n == ffmpegdec->format.video.old_fps_n &&
|
||||
ffmpegdec->format.video.fps_d == ffmpegdec->format.video.old_fps_d &&
|
||||
ffmpegdec->format.video.pix_fmt == ffmpegdec->context->pix_fmt)
|
||||
return TRUE;
|
||||
GST_DEBUG ("Renegotiating video from %dx%d@%0.2ffps to %dx%d@%0.2ffps",
|
||||
GST_DEBUG ("Renegotiating video from %dx%d@ %d/%d fps to %dx%d@ %d/%d fps",
|
||||
ffmpegdec->format.video.width, ffmpegdec->format.video.height,
|
||||
ffmpegdec->format.video.old_fps, ffmpegdec->context->width,
|
||||
ffmpegdec->context->height, ffmpegdec->format.video.old_fps);
|
||||
ffmpegdec->format.video.old_fps_n, ffmpegdec->format.video.old_fps_n,
|
||||
ffmpegdec->context->width, ffmpegdec->context->height,
|
||||
ffmpegdec->format.video.fps_n, ffmpegdec->format.video.fps_d);
|
||||
ffmpegdec->format.video.width = ffmpegdec->context->width;
|
||||
ffmpegdec->format.video.height = ffmpegdec->context->height;
|
||||
ffmpegdec->format.video.old_fps = ffmpegdec->format.video.fps;
|
||||
ffmpegdec->format.video.old_fps_n = ffmpegdec->format.video.fps_n;
|
||||
ffmpegdec->format.video.old_fps_d = ffmpegdec->format.video.fps_d;
|
||||
ffmpegdec->format.video.pix_fmt = ffmpegdec->context->pix_fmt;
|
||||
break;
|
||||
case CODEC_TYPE_AUDIO:
|
||||
|
@ -667,9 +672,10 @@ gst_ffmpegdec_negotiate (GstFFMpegDec * ffmpegdec)
|
|||
|
||||
if (caps) {
|
||||
/* If a demuxer provided a framerate then use it (#313970) */
|
||||
if (ffmpegdec->format.video.fps != -1.0) {
|
||||
if (ffmpegdec->format.video.fps_n != -1) {
|
||||
gst_structure_set (gst_caps_get_structure (caps, 0), "framerate",
|
||||
G_TYPE_DOUBLE, ffmpegdec->format.video.fps, NULL);
|
||||
GST_TYPE_FRACTION, ffmpegdec->format.video.fps_n,
|
||||
ffmpegdec->format.video.fps_d, NULL);
|
||||
}
|
||||
|
||||
/* Add pixel-aspect-ratio if we have it. Prefer
|
||||
|
@ -835,15 +841,16 @@ gst_ffmpegdec_frame (GstFFMpegDec * ffmpegdec,
|
|||
|
||||
/* If we have used the framerate from the demuxer then
|
||||
* also use the demuxer's timestamp information (#317596) */
|
||||
if (ffmpegdec->format.video.fps != -1.0 && inbuf != NULL) {
|
||||
if (ffmpegdec->format.video.fps_n != -1 && inbuf != NULL) {
|
||||
gst_buffer_stamp (outbuf, inbuf);
|
||||
} else {
|
||||
GST_BUFFER_TIMESTAMP (outbuf) = ffmpegdec->next_ts;
|
||||
if (ffmpegdec->context->time_base.num != 0 &&
|
||||
ffmpegdec->context->time_base.den != 0) {
|
||||
GST_BUFFER_DURATION (outbuf) = GST_SECOND *
|
||||
ffmpegdec->context->time_base.num /
|
||||
ffmpegdec->context->time_base.den;
|
||||
GST_BUFFER_DURATION (outbuf) =
|
||||
gst_util_clock_time_scale (GST_SECOND,
|
||||
ffmpegdec->context->time_base.num,
|
||||
ffmpegdec->context->time_base.den);
|
||||
|
||||
/* Take repeat_pict into account */
|
||||
GST_BUFFER_DURATION (outbuf) += GST_BUFFER_DURATION (outbuf)
|
||||
|
|
Loading…
Reference in a new issue