Merge CAPS branch

Original commit message from CVS:
Merge CAPS branch
This commit is contained in:
David Schleef 2003-12-22 01:46:58 +00:00
parent 4ee3b353e9
commit 5153d56e2c
7 changed files with 669 additions and 985 deletions

File diff suppressed because it is too large Load diff

View file

@ -49,7 +49,7 @@ gst_ffmpeg_codectype_to_caps (enum CodecType codec_type,
*/
enum CodecID
gst_ffmpeg_caps_to_codecid (GstCaps *caps,
gst_ffmpeg_caps_to_codecid (const GstCaps *caps,
AVCodecContext *context);
/* caps_to_codectype () transforms a GstCaps that belongs to
@ -58,7 +58,7 @@ gst_ffmpeg_caps_to_codecid (GstCaps *caps,
void
gst_ffmpeg_caps_to_codectype (enum CodecType type,
GstCaps *caps,
const GstCaps *caps,
AVCodecContext *context);
/* _formatid_to_caps () is meant for muxers/demuxers, it

View file

@ -32,35 +32,38 @@
#include "gstffmpegcodecmap.h"
#define GST_TYPE_FFMPEGCSP \
GST_DEBUG_CATEGORY_STATIC (debug_ffmpeg_csp);
#define GST_CAT_DEFAULT debug_ffmpeg_csp
#define GST_TYPE_FFMPEG_CSP \
(gst_ffmpegcsp_get_type())
#define GST_FFMPEGCSP(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FFMPEGCSP,GstFFMpegCsp))
#define GST_FFMPEGCSP_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FFMPEGCSP,GstFFMpegCsp))
#define GST_IS_FFMPEGCSP(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FFMPEGCSP))
#define GST_IS_FFMPEGCSP_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGCSP))
#define GST_FFMPEG_CSP(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_FFMPEG_CSP,GstFFMpegCsp))
#define GST_FFMPEG_CSP_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_FFMPEG_CSP,GstFFMpegCsp))
#define GST_IS_FFMPEG_CSP(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_FFMPEG_CSP))
#define GST_IS_FFMPEG_CSP_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEG_CSP))
typedef struct _GstFFMpegCsp GstFFMpegCsp;
typedef struct _GstFFMpegCspClass GstFFMpegCspClass;
struct _GstFFMpegCsp {
GstElement element;
GstElement element;
GstPad *sinkpad, *srcpad;
GstPad * sinkpad;
GstPad * srcpad;
gboolean need_caps_nego;
gint width, height;
gfloat fps;
enum PixelFormat
from_pixfmt,
to_pixfmt;
AVFrame *from_frame,
*to_frame;
GstCaps *sinkcaps;
GstBufferPool *pool;
gint width;
gint height;
gdouble fps;
enum PixelFormat from_pixfmt;
enum PixelFormat to_pixfmt;
AVFrame * from_frame;
AVFrame * to_frame;
};
struct _GstFFMpegCspClass {
@ -88,176 +91,92 @@ enum {
static GType gst_ffmpegcsp_get_type (void);
static void gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass);
static void gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass);
static void gst_ffmpegcsp_init (GstFFMpegCsp *space);
static void gst_ffmpegcsp_set_property (GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec);
static void gst_ffmpegcsp_get_property (GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec);
static void gst_ffmpegcsp_base_init (gpointer g_class);
static void gst_ffmpegcsp_class_init (gpointer g_class, gpointer class_data);
static void gst_ffmpegcsp_init (GTypeInstance *instance, gpointer g_class);
static GstPadLinkReturn
gst_ffmpegcsp_sinkconnect (GstPad *pad,
GstCaps *caps);
gst_ffmpegcsp_connect (GstPad *pad,
const GstCaps *caps);
static GstPadLinkReturn
gst_ffmpegcsp_srcconnect (GstPad *pad,
GstCaps *caps);
static GstPadLinkReturn
gst_ffmpegcsp_srcconnect_func (GstPad *pad,
GstCaps *caps,
gboolean newcaps);
gst_ffmpegcsp_try_connect (GstPad *pad,
AVCodecContext *ctx,
double fps);
static void gst_ffmpegcsp_chain (GstPad *pad,
GstData *data);
static void gst_ffmpegcsp_chain (GstPad *pad,
GstData *data);
static GstElementStateReturn
gst_ffmpegcsp_change_state (GstElement *element);
gst_ffmpegcsp_change_state (GstElement *element);
static GstPadTemplate *srctempl, *sinktempl;
static GstElementClass *parent_class = NULL;
/*static guint gst_ffmpegcsp_signals[LAST_SIGNAL] = { 0 }; */
static GstBufferPool *
ffmpegcsp_get_bufferpool (GstPad *pad)
{
GstFFMpegCsp *space;
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
if (space->from_pixfmt == space->to_pixfmt &&
space->from_pixfmt != PIX_FMT_NB) {
return gst_pad_get_bufferpool (space->srcpad);
}
return NULL;
}
static GstCaps *
gst_ffmpegcsp_getcaps (GstPad *pad,
GstCaps *caps)
{
GstFFMpegCsp *space;
GstCaps *result;
GstCaps *peercaps;
GstCaps *ourcaps;
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
/* we can do everything our peer can... */
peercaps = gst_caps_copy (gst_pad_get_allowed_caps (space->srcpad));
/* and our own template of course */
ourcaps = gst_caps_copy (gst_pad_get_pad_template_caps (pad));
/* merge them together, we prefer the peercaps first */
result = gst_caps_prepend (ourcaps, peercaps);
return result;
}
/* does caps nego on a pad */
static GstPadLinkReturn
gst_ffmpegcsp_srcconnect_func (GstPad *pad,
GstCaps *caps,
gboolean newcaps)
gst_ffmpegcsp_try_connect (GstPad *pad, AVCodecContext *ctx, double fps)
{
AVCodecContext *ctx;
gint i, ret;
GstFFMpegCsp *space;
GstCaps *peercaps;
GstCaps *ourcaps;
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
/* we cannot operate if we didn't get src caps */
if (!(ourcaps = space->sinkcaps)) {
if (newcaps) {
gst_pad_recalc_allowed_caps (space->sinkpad);
}
return GST_PAD_LINK_DELAYED;
}
/* first see if we can do the format natively by filtering the peer caps
* with our incomming caps */
if ((peercaps = gst_caps_intersect (caps, ourcaps)) != NULL) {
/* see if the peer likes it too, it should as the caps say so.. */
if (gst_pad_try_set_caps (space->srcpad, peercaps) > 0) {
space->from_pixfmt = space->to_pixfmt = -1;
return GST_PAD_LINK_DONE;
}
}
/* then see what the peer has that matches the size */
peercaps = gst_caps_intersect (caps,
gst_caps_append (
GST_CAPS_NEW (
"ffmpegcsp_filter",
"video/x-raw-yuv",
"width", GST_PROPS_INT (space->width),
"height", GST_PROPS_INT (space->height),
"framerate", GST_PROPS_FLOAT (space->fps)
), GST_CAPS_NEW (
"ffmpegcsp_filter",
"video/x-raw-rgb",
"width", GST_PROPS_INT (space->width),
"height", GST_PROPS_INT (space->height),
"framerate", GST_PROPS_FLOAT (space->fps)
)));
/* we are looping over the caps, so we have to get rid of the lists */
peercaps = gst_caps_normalize (peercaps);
gboolean try_all = (ctx->pix_fmt != PIX_FMT_NB);
GstCaps *caps;
space = GST_FFMPEG_CSP (gst_pad_get_parent (pad));
/* loop over all possibilities and select the first one we can convert and
* is accepted by the peer */
ctx = avcodec_alloc_context ();
while (peercaps) {
ctx->width = space->width;
ctx->height = space->height;
ctx->pix_fmt = PIX_FMT_NB;
gst_ffmpeg_caps_to_codectype (CODEC_TYPE_VIDEO, peercaps, ctx);
if (ctx->pix_fmt != PIX_FMT_NB) {
GstCaps *one = gst_caps_copy_1 (peercaps);
if (gst_pad_try_set_caps (space->srcpad, one) > 0) {
space->to_pixfmt = ctx->pix_fmt;
gst_caps_unref (one);
av_free (ctx);
if (space->from_frame)
av_free (space->from_frame);
if (space->to_frame)
av_free (space->to_frame);
space->from_frame = avcodec_alloc_frame ();
space->to_frame = avcodec_alloc_frame ();
return GST_PAD_LINK_DONE;
}
gst_caps_unref (one);
}
peercaps = peercaps->next;
}
av_free (ctx);
/* we disable ourself here */
space->from_pixfmt = space->to_pixfmt = PIX_FMT_NB;
caps = gst_ffmpeg_codectype_to_caps (CODEC_TYPE_VIDEO, ctx);
for (i = 0; i < gst_caps_get_size (caps); i++) {
GstStructure *structure = gst_caps_get_structure (caps, i);
GstCaps *setcaps;
return GST_PAD_LINK_REFUSED;
if (fps > 0)
gst_structure_set (structure, "framerate", G_TYPE_DOUBLE, fps, NULL);
setcaps = gst_caps_new_full (gst_structure_copy (structure), NULL);
ret = gst_pad_try_set_caps (pad, setcaps);
gst_caps_free (setcaps);
if (ret >= 0) {
if (ctx->pix_fmt == PIX_FMT_NB)
gst_ffmpeg_caps_to_codectype (CODEC_TYPE_VIDEO, caps, ctx);
gst_caps_free (caps);
return ret;
}
}
if (try_all) {
ctx->pix_fmt = PIX_FMT_NB;
return gst_ffmpegcsp_try_connect (pad, ctx, fps);
} else {
return GST_PAD_LINK_REFUSED;
}
}
static GstPadLinkReturn
gst_ffmpegcsp_sinkconnect (GstPad *pad,
GstCaps *caps)
gst_ffmpegcsp_connect (GstPad *pad, const GstCaps *caps)
{
AVCodecContext *ctx;
GstFFMpegCsp *space;
GstPad *peer;
gdouble fps;
enum PixelFormat pixfmt;
GstPad *other;
enum PixelFormat *format, *other_format;
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
space = GST_FFMPEG_CSP (gst_pad_get_parent (pad));
if (!GST_CAPS_IS_FIXED (caps)) {
return GST_PAD_LINK_DELAYED;
if (space->sinkpad == pad) {
other = space->srcpad;
format = &space->from_pixfmt;
other_format = &space->to_pixfmt;
} else if (space->srcpad == pad) {
other = space->sinkpad;
format = &space->to_pixfmt;
other_format = &space->from_pixfmt;
} else {
g_assert_not_reached ();
return GST_PAD_LINK_REFUSED;
}
ctx = avcodec_alloc_context ();
ctx->width = 0;
ctx->height = 0;
@ -265,42 +184,39 @@ gst_ffmpegcsp_sinkconnect (GstPad *pad,
gst_ffmpeg_caps_to_codectype (CODEC_TYPE_VIDEO, caps, ctx);
if (!ctx->width || !ctx->height || ctx->pix_fmt == PIX_FMT_NB) {
av_free (ctx);
return GST_PAD_LINK_REFUSED;
}
gst_caps_get_float (caps, "framerate", &space->fps);
if (!gst_structure_get_double (gst_caps_get_structure (caps, 0),
"framerate", &fps))
fps = 0;
pixfmt = ctx->pix_fmt;
if (*other_format == PIX_FMT_NB ||
space->width != ctx->width ||
space->height != ctx->height ||
space->fps != fps) {
GST_DEBUG_OBJECT (space, "Need caps nego on pad %s for size %dx%d",
GST_PAD_NAME (other), ctx->width, ctx->height);
/* ctx->pix_fmt is set to preferred format */
if (gst_ffmpegcsp_try_connect (space->sinkpad, ctx, fps) <= 0) {
av_free (ctx);
return GST_PAD_LINK_REFUSED;
}
*other_format = ctx->pix_fmt;
}
space->width = ctx->width;
space->height = ctx->height;
space->from_pixfmt = ctx->pix_fmt;
space->fps = fps;
*format = pixfmt;
av_free (ctx);
GST_INFO ( "size: %dx%d", space->width, space->height);
space->sinkcaps = caps;
if ((peer = gst_pad_get_peer (pad)) != NULL) {
GstPadLinkReturn ret;
ret = gst_ffmpegcsp_srcconnect_func (pad,
gst_pad_get_caps (GST_PAD_PEER (space->srcpad)),
FALSE);
if (ret <= 0) {
space->sinkcaps = NULL;
return ret;
}
return GST_PAD_LINK_DONE;
}
return GST_PAD_LINK_OK;
}
static GstPadLinkReturn
gst_ffmpegcsp_srcconnect (GstPad *pad,
GstCaps *caps)
{
return gst_ffmpegcsp_srcconnect_func (pad, caps, TRUE);
}
static GType
gst_ffmpegcsp_get_type (void)
{
@ -309,66 +225,79 @@ gst_ffmpegcsp_get_type (void)
if (!ffmpegcsp_type) {
static const GTypeInfo ffmpegcsp_info = {
sizeof (GstFFMpegCspClass),
(GBaseInitFunc) gst_ffmpegcsp_base_init,
gst_ffmpegcsp_base_init,
NULL,
(GClassInitFunc) gst_ffmpegcsp_class_init,
gst_ffmpegcsp_class_init,
NULL,
NULL,
sizeof (GstFFMpegCsp),
0,
(GInstanceInitFunc) gst_ffmpegcsp_init,
gst_ffmpegcsp_init,
};
ffmpegcsp_type = g_type_register_static (GST_TYPE_ELEMENT,
"GstFFMpegColorspace",
&ffmpegcsp_info, 0);
GST_DEBUG_CATEGORY_INIT (debug_ffmpeg_csp, "ffcolorspace", 0, "FFMpeg colorspace converter");
}
return ffmpegcsp_type;
}
static void
gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass)
gst_ffmpegcsp_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstCaps *caps, *capscopy;
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
/* template caps */
caps = gst_ffmpeg_codectype_to_caps (CODEC_TYPE_VIDEO, NULL);
capscopy = gst_caps_copy (caps);
/* build templates */
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
caps));
gst_element_class_add_pad_template (element_class,
gst_pad_template_new ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
capscopy));
gst_element_class_add_pad_template (element_class, srctempl);
gst_element_class_add_pad_template (element_class, sinktempl);
gst_element_class_set_details (element_class, &ffmpegcsp_details);
}
static void
gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass)
gst_ffmpegcsp_class_init (gpointer g_class, gpointer class_data)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstElementClass *gstelement_class = GST_ELEMENT_CLASS (g_class);
gobject_class = (GObjectClass*) klass;
gstelement_class = (GstElementClass*) klass;
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
gobject_class->set_property = gst_ffmpegcsp_set_property;
gobject_class->get_property = gst_ffmpegcsp_get_property;
parent_class = g_type_class_peek_parent (g_class);
gstelement_class->change_state = gst_ffmpegcsp_change_state;
}
static void
gst_ffmpegcsp_init (GstFFMpegCsp *space)
gst_ffmpegcsp_init (GTypeInstance *instance, gpointer g_class)
{
space->sinkpad = gst_pad_new_from_template (sinktempl, "sink");
gst_pad_set_link_function (space->sinkpad, gst_ffmpegcsp_sinkconnect);
gst_pad_set_getcaps_function (space->sinkpad, gst_ffmpegcsp_getcaps);
gst_pad_set_bufferpool_function (space->sinkpad, ffmpegcsp_get_bufferpool);
GstFFMpegCsp *space = GST_FFMPEG_CSP (instance);
space->sinkpad = gst_pad_new_from_template (
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (space), "sink"),
"sink");
gst_pad_set_link_function (space->sinkpad, gst_ffmpegcsp_connect);
gst_pad_set_chain_function (space->sinkpad,gst_ffmpegcsp_chain);
gst_element_add_pad (GST_ELEMENT(space), space->sinkpad);
space->srcpad = gst_pad_new_from_template (srctempl, "src");
space->srcpad = gst_pad_new_from_template (
gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (space), "src"),
"src");
gst_element_add_pad (GST_ELEMENT (space), space->srcpad);
gst_pad_set_link_function (space->srcpad, gst_ffmpegcsp_srcconnect);
gst_pad_set_link_function (space->srcpad, gst_ffmpegcsp_connect);
space->pool = NULL;
space->from_pixfmt = space->to_pixfmt = PIX_FMT_NB;
space->from_frame = space->to_frame = NULL;
}
@ -377,18 +306,18 @@ static void
gst_ffmpegcsp_chain (GstPad *pad,
GstData *data)
{
GstBuffer *inbuf = GST_BUFFER (data);
GstFFMpegCsp *space;
GstBuffer *inbuf = GST_BUFFER (data);
GstBuffer *outbuf = NULL;
g_return_if_fail (pad != NULL);
g_return_if_fail (GST_IS_PAD (pad));
g_return_if_fail (inbuf != NULL);
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
space = GST_FFMPEG_CSP (gst_pad_get_parent (pad));
g_return_if_fail (space != NULL);
g_return_if_fail (GST_IS_FFMPEGCSP (space));
g_return_if_fail (GST_IS_FFMPEG_CSP (space));
if (space->from_pixfmt == PIX_FMT_NB ||
space->to_pixfmt == PIX_FMT_NB) {
@ -399,16 +328,11 @@ gst_ffmpegcsp_chain (GstPad *pad,
if (space->from_pixfmt == space->to_pixfmt) {
outbuf = inbuf;
} else {
if (space->pool) {
outbuf = gst_buffer_new_from_pool (space->pool, 0, 0);
}
if (!outbuf) {
guint size = avpicture_get_size (space->to_pixfmt,
space->width,
space->height);
outbuf = gst_buffer_new_and_alloc (size);
}
guint size = avpicture_get_size (space->to_pixfmt,
space->width,
space->height);
/* use bufferpools here */
outbuf = gst_buffer_new_and_alloc (size);
/* convert */
avpicture_fill ((AVPicture *) space->from_frame, GST_BUFFER_DATA (inbuf),
@ -419,8 +343,7 @@ gst_ffmpegcsp_chain (GstPad *pad,
(AVPicture *) space->from_frame, space->from_pixfmt,
space->width, space->height);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf);
gst_buffer_stamp (outbuf, inbuf);
gst_buffer_unref (inbuf);
}
@ -433,14 +356,12 @@ gst_ffmpegcsp_change_state (GstElement *element)
{
GstFFMpegCsp *space;
space = GST_FFMPEGCSP (element);
space = GST_FFMPEG_CSP (element);
switch (GST_STATE_TRANSITION (element)) {
case GST_STATE_PAUSED_TO_PLAYING:
space->pool = gst_pad_get_bufferpool (space->srcpad);
case GST_STATE_READY_TO_PAUSED:
space->need_caps_nego = TRUE;
break;
case GST_STATE_PLAYING_TO_PAUSED:
space->pool = NULL;
case GST_STATE_PAUSED_TO_READY:
if (space->from_frame)
av_free (space->from_frame);
@ -457,62 +378,9 @@ gst_ffmpegcsp_change_state (GstElement *element)
return GST_STATE_SUCCESS;
}
static void
gst_ffmpegcsp_set_property (GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec)
{
GstFFMpegCsp *space;
/* it's not null if we got it, but it might not be ours */
g_return_if_fail (GST_IS_FFMPEGCSP (object));
space = GST_FFMPEGCSP (object);
switch (prop_id) {
default:
break;
}
}
static void
gst_ffmpegcsp_get_property (GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec)
{
GstFFMpegCsp *space;
/* it's not null if we got it, but it might not be ours */
g_return_if_fail (GST_IS_FFMPEGCSP (object));
space = GST_FFMPEGCSP (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
gboolean
gst_ffmpegcsp_register (GstPlugin *plugin)
{
GstCaps *caps;
/* template caps */
caps = gst_ffmpeg_codectype_to_caps (CODEC_TYPE_VIDEO, NULL);
/* build templates */
srctempl = gst_pad_template_new ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
caps, NULL);
gst_caps_ref (caps); /* FIXME: pad_template_new refs the caps, doesn't it? */
sinktempl = gst_pad_template_new ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
caps, NULL);
return gst_element_register (plugin, "ffcolorspace",
GST_RANK_NONE, GST_TYPE_FFMPEGCSP);
GST_RANK_NONE, GST_TYPE_FFMPEG_CSP);
}

View file

@ -92,7 +92,7 @@ static void gst_ffmpegdec_init (GstFFMpegDec *ffmpegdec);
static void gst_ffmpegdec_dispose (GObject *object);
static GstPadLinkReturn gst_ffmpegdec_connect (GstPad *pad,
GstCaps *caps);
const GstCaps *caps);
static void gst_ffmpegdec_chain (GstPad *pad,
GstData *data);
@ -141,9 +141,9 @@ gst_ffmpegdec_base_init (GstFFMpegDecClass *klass)
/* pad templates */
sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
GST_PAD_ALWAYS, params->sinkcaps, NULL);
GST_PAD_ALWAYS, params->sinkcaps);
srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
GST_PAD_ALWAYS, params->srccaps, NULL);
GST_PAD_ALWAYS, params->srccaps);
gst_element_class_add_pad_template (element_class, srctempl);
gst_element_class_add_pad_template (element_class, sinktempl);
@ -206,15 +206,11 @@ gst_ffmpegdec_dispose (GObject *object)
static GstPadLinkReturn
gst_ffmpegdec_connect (GstPad *pad,
GstCaps *caps)
const GstCaps *caps)
{
GstFFMpegDec *ffmpegdec = (GstFFMpegDec *)(gst_pad_get_parent (pad));
GstFFMpegDecClass *oclass = (GstFFMpegDecClass*)(G_OBJECT_GET_CLASS (ffmpegdec));
/* we want fixed caps */
if (!GST_CAPS_IS_FIXED (caps))
return GST_PAD_LINK_DELAYED;
/* close old session */
if (ffmpegdec->opened) {
avcodec_close (ffmpegdec->context);

View file

@ -133,15 +133,15 @@ gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass)
sinktempl = gst_pad_template_new ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
params->sinkcaps, NULL);
params->sinkcaps);
videosrctempl = gst_pad_template_new ("video_%02d",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
params->videosrccaps, NULL);
params->videosrccaps);
audiosrctempl = gst_pad_template_new ("audio_%02d",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
params->audiosrccaps, NULL);
params->audiosrccaps);
gst_element_class_add_pad_template (element_class, videosrctempl);
gst_element_class_add_pad_template (element_class, audiosrctempl);
@ -217,12 +217,8 @@ gst_ffmpegdemux_type_find (GstTypeFind *tf, gpointer priv)
res = in_plugin->read_probe (&probe_data);
res = res * GST_TYPE_FIND_MAXIMUM / AVPROBE_SCORE_MAX;
if (res > 0) {
GstCaps *caps = params->sinkcaps;
/* make sure we still hold a refcount to this caps */
gst_caps_ref (caps);
gst_type_find_suggest (tf, res, caps);
}
if (res > 0)
gst_type_find_suggest (tf, res, params->sinkcaps);
}
}
@ -386,6 +382,11 @@ gst_ffmpegdemux_change_state (GstElement *element)
gboolean
gst_ffmpegdemux_register (GstPlugin *plugin)
{
GType type;
AVInputFormat *in_plugin;
GstFFMpegDemuxClassParams *params;
AVCodec *in_codec;
gchar **extensions;
GTypeInfo typeinfo = {
sizeof(GstFFMpegDemuxClass),
(GBaseInitFunc)gst_ffmpegdemux_base_init,
@ -397,11 +398,7 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
0,
(GInstanceInitFunc)gst_ffmpegdemux_init,
};
GType type;
AVInputFormat *in_plugin;
GstFFMpegDemuxClassParams *params;
AVCodec *in_codec;
gchar **extensions;
GstCaps *any_caps = gst_caps_new_any ();
in_plugin = first_iformat;
@ -430,13 +427,13 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
}
switch (in_codec->type) {
case CODEC_TYPE_VIDEO:
videosrccaps = gst_caps_append (videosrccaps, temp);
gst_caps_append (videosrccaps, temp);
break;
case CODEC_TYPE_AUDIO:
audiosrccaps = gst_caps_append (audiosrccaps, temp);
gst_caps_append (audiosrccaps, temp);
break;
default:
gst_caps_unref (temp);
gst_caps_free (temp);
break;
}
}
@ -480,13 +477,14 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
if (!gst_element_register (plugin, type_name, GST_RANK_MARGINAL, type) ||
!gst_type_find_register (plugin, typefind_name, GST_RANK_MARGINAL,
gst_ffmpegdemux_type_find,
extensions, GST_CAPS_ANY, params))
extensions, any_caps, params))
return FALSE;
g_strfreev (extensions);
next:
in_plugin = in_plugin->next;
}
gst_caps_free (any_caps);
g_hash_table_remove (global_plugins, GINT_TO_POINTER (0));
return TRUE;

View file

@ -123,7 +123,7 @@ static void gst_ffmpegenc_init (GstFFMpegEnc *ffmpegenc);
static void gst_ffmpegenc_dispose (GObject *object);
static GstPadLinkReturn
gst_ffmpegenc_connect (GstPad *pad, GstCaps *caps);
gst_ffmpegenc_connect (GstPad *pad, const GstCaps *caps);
static void gst_ffmpegenc_chain_video (GstPad *pad, GstData *_data);
static void gst_ffmpegenc_chain_audio (GstPad *pad, GstData *_data);
@ -175,9 +175,9 @@ gst_ffmpegenc_base_init (GstFFMpegEncClass *klass)
/* pad templates */
sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
GST_PAD_ALWAYS, params->sinkcaps, NULL);
GST_PAD_ALWAYS, params->sinkcaps);
srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
GST_PAD_ALWAYS, params->srccaps, NULL);
GST_PAD_ALWAYS, params->srccaps);
gst_element_class_add_pad_template (element_class, srctempl);
gst_element_class_add_pad_template (element_class, sinktempl);
@ -282,15 +282,13 @@ gst_ffmpegenc_dispose (GObject *object)
static GstPadLinkReturn
gst_ffmpegenc_connect (GstPad *pad,
GstCaps *caps)
const GstCaps *caps)
{
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) gst_pad_get_parent (pad);
GstFFMpegEncClass *oclass = (GstFFMpegEncClass*)(G_OBJECT_GET_CLASS(ffmpegenc));
GstCaps *ret_caps;
GstCaps *other_caps;
GstPadLinkReturn ret;
if (!GST_CAPS_IS_FIXED (caps))
return GST_PAD_LINK_DELAYED;
enum PixelFormat pix_fmt;
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) gst_pad_get_parent (pad);
GstFFMpegEncClass *oclass = (GstFFMpegEncClass *) G_OBJECT_GET_CLASS(ffmpegenc);
/* close old session */
if (ffmpegenc->opened) {
@ -315,47 +313,37 @@ gst_ffmpegenc_connect (GstPad *pad,
/* no edges */
ffmpegenc->context->flags |= CODEC_FLAG_EMU_EDGE;
for (ret_caps = caps; ret_caps != NULL; ret_caps = ret_caps->next) {
enum PixelFormat pix_fmt;
/* fetch pix_fmt and so on */
gst_ffmpeg_caps_to_codectype (oclass->in_plugin->type,
caps, ffmpegenc->context);
/* fetch pix_fmt and so on */
gst_ffmpeg_caps_to_codectype (oclass->in_plugin->type,
caps, ffmpegenc->context);
pix_fmt = ffmpegenc->context->pix_fmt;
pix_fmt = ffmpegenc->context->pix_fmt;
/* open codec */
if (avcodec_open (ffmpegenc->context, oclass->in_plugin) < 0) {
GST_DEBUG ("ffenc_%s: Failed to open FFMPEG codec",
oclass->in_plugin->name);
continue;
}
/* is the colourspace correct? */
if (pix_fmt != ffmpegenc->context->pix_fmt) {
avcodec_close (ffmpegenc->context);
GST_DEBUG ("ffenc_%s: AV wants different colourspace (%d given, %d wanted)",
oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt);
continue;
}
break;
/* open codec */
if (avcodec_open (ffmpegenc->context, oclass->in_plugin) < 0) {
GST_DEBUG ("ffenc_%s: Failed to open FFMPEG codec",
oclass->in_plugin->name);
return GST_PAD_LINK_REFUSED;
}
if (ret_caps == NULL) {
/* is the colourspace correct? */
if (pix_fmt != ffmpegenc->context->pix_fmt) {
avcodec_close (ffmpegenc->context);
GST_DEBUG ("ffenc_%s: AV wants different colourspace (%d given, %d wanted)",
oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt);
return GST_PAD_LINK_REFUSED;
}
/* try to set this caps on the other side */
ret_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id,
other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id,
ffmpegenc->context);
if (!ret_caps) {
if (!other_caps) {
avcodec_close (ffmpegenc->context);
GST_DEBUG ("Unsupported codec - no caps found");
return GST_PAD_LINK_REFUSED;
}
if ((ret = gst_pad_try_set_caps (ffmpegenc->srcpad, ret_caps)) <= 0) {
if ((ret = gst_pad_try_set_caps (ffmpegenc->srcpad, other_caps)) <= 0) {
avcodec_close (ffmpegenc->context);
GST_DEBUG ("Failed to set caps on next element for ffmpeg encoder (%s)",
oclass->in_plugin->name);

View file

@ -96,7 +96,7 @@ static void gst_ffmpegmux_dispose (GObject *object);
static GstPadLinkReturn
gst_ffmpegmux_connect (GstPad *pad,
GstCaps *caps);
const GstCaps *caps);
static GstPad * gst_ffmpegmux_request_new_pad (GstElement *element,
GstPadTemplate *templ,
const gchar *name);
@ -138,15 +138,15 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass)
/* pad templates */
srctempl = gst_pad_template_new ("sink", GST_PAD_SRC,
GST_PAD_ALWAYS,
params->srccaps, NULL);
params->srccaps);
audiosinktempl = gst_pad_template_new ("audio_%d",
GST_PAD_SINK,
GST_PAD_REQUEST,
params->audiosinkcaps, NULL);
params->audiosinkcaps);
videosinktempl = gst_pad_template_new ("video_%d",
GST_PAD_SINK,
GST_PAD_REQUEST,
params->videosinkcaps, NULL);
params->videosinkcaps);
gst_element_class_add_pad_template (element_class, srctempl);
gst_element_class_add_pad_template (element_class, videosinktempl);
@ -272,7 +272,7 @@ gst_ffmpegmux_request_new_pad (GstElement *element,
static GstPadLinkReturn
gst_ffmpegmux_connect (GstPad *pad,
GstCaps *caps)
const GstCaps *caps)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *)(gst_pad_get_parent (pad));
gint i;
@ -281,9 +281,6 @@ gst_ffmpegmux_connect (GstPad *pad,
g_return_val_if_fail (ffmpegmux->opened == FALSE,
GST_PAD_LINK_REFUSED);
if (!GST_CAPS_IS_FIXED (caps))
return GST_PAD_LINK_DELAYED;
for (i = 0; i < ffmpegmux->context->nb_streams; i++) {
if (pad == ffmpegmux->sinkpads[i]) {
break;
@ -297,12 +294,10 @@ gst_ffmpegmux_connect (GstPad *pad,
/* for the format-specific guesses, we'll go to
* our famous codec mapper */
for ( ; caps != NULL; caps = caps->next) {
if (gst_ffmpeg_caps_to_codecid (caps,
&st->codec) != CODEC_ID_NONE) {
ffmpegmux->eos[i] = FALSE;
return GST_PAD_LINK_OK;
}
if (gst_ffmpeg_caps_to_codecid (caps,
&st->codec) != CODEC_ID_NONE) {
ffmpegmux->eos[i] = FALSE;
return GST_PAD_LINK_OK;
}
return GST_PAD_LINK_REFUSED;
@ -483,13 +478,13 @@ gst_ffmpegmux_register (GstPlugin *plugin)
}
switch (in_codec->type) {
case CODEC_TYPE_VIDEO:
videosinkcaps = gst_caps_append (videosinkcaps, temp);
gst_caps_append (videosinkcaps, temp);
break;
case CODEC_TYPE_AUDIO:
audiosinkcaps = gst_caps_append (audiosinkcaps, temp);
gst_caps_append (audiosinkcaps, temp);
break;
default:
gst_caps_unref (temp);
gst_caps_free (temp);
break;
}
}