Original commit message from CVS:
indent
This commit is contained in:
Thomas Vander Stichele 2004-03-29 16:39:18 +00:00
parent d5cfdabbd8
commit 7383aa1baa
9 changed files with 891 additions and 984 deletions

View file

@ -1,3 +1,7 @@
2004-03-29 Thomas Vander Stichele <thomas at apestaart dot org>
* ext/ffmpeg/*.c: indent
2004-03-25 Ronald Bultje <rbultje@ronald.bitfreak.net>
* ext/ffmpeg/gstffmpegdec.c: (gst_ffmpegdec_base_init):

View file

@ -33,16 +33,16 @@
#include <ffmpeg/avformat.h>
#endif
extern gboolean gst_ffmpegdemux_register (GstPlugin *plugin);
extern gboolean gst_ffmpegdec_register (GstPlugin *plugin);
extern gboolean gst_ffmpegenc_register (GstPlugin *plugin);
extern gboolean gst_ffmpegmux_register (GstPlugin *plugin);
extern gboolean gst_ffmpegcsp_register (GstPlugin *plugin);
extern gboolean gst_ffmpegdemux_register (GstPlugin * plugin);
extern gboolean gst_ffmpegdec_register (GstPlugin * plugin);
extern gboolean gst_ffmpegenc_register (GstPlugin * plugin);
extern gboolean gst_ffmpegmux_register (GstPlugin * plugin);
extern gboolean gst_ffmpegcsp_register (GstPlugin * plugin);
extern URLProtocol gstreamer_protocol;
static gboolean
plugin_init (GstPlugin *plugin)
plugin_init (GstPlugin * plugin)
{
if (!gst_library_load ("gstbytestream"))
return FALSE;
@ -54,7 +54,7 @@ plugin_init (GstPlugin *plugin)
gst_ffmpegenc_register (plugin);
gst_ffmpegdec_register (plugin);
gst_ffmpegdemux_register (plugin);
/*gst_ffmpegmux_register (plugin);*/
/*gst_ffmpegmux_register (plugin); */
gst_ffmpegcsp_register (plugin);
register_protocol (&gstreamer_protocol);
@ -63,14 +63,9 @@ plugin_init (GstPlugin *plugin)
return TRUE;
}
GST_PLUGIN_DEFINE (
GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"ffmpeg",
"All FFMPEG codecs",
plugin_init,
FFMPEG_VERSION,
"LGPL",
"FFMpeg",
"http://ffmpeg.sourceforge.net/"
)
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"ffmpeg",
"All FFMPEG codecs",
plugin_init,
FFMPEG_VERSION, "LGPL", "FFMpeg", "http://ffmpeg.sourceforge.net/")

File diff suppressed because it is too large Load diff

View file

@ -46,22 +46,21 @@
typedef struct _GstFFMpegCsp GstFFMpegCsp;
typedef struct _GstFFMpegCspClass GstFFMpegCspClass;
struct _GstFFMpegCsp {
GstElement element;
struct _GstFFMpegCsp
{
GstElement element;
GstPad *sinkpad, *srcpad;
GstPad *sinkpad, *srcpad;
gint width, height;
gfloat fps;
enum PixelFormat
from_pixfmt,
to_pixfmt;
AVFrame *from_frame,
*to_frame;
GstCaps *sinkcaps;
gint width, height;
gfloat fps;
enum PixelFormat from_pixfmt, to_pixfmt;
AVFrame *from_frame, *to_frame;
GstCaps *sinkcaps;
};
struct _GstFFMpegCspClass {
struct _GstFFMpegCspClass
{
GstElementClass parent_class;
};
@ -75,72 +74,68 @@ static GstElementDetails ffmpegcsp_details = {
/* Stereo signals and args */
enum {
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum {
enum
{
ARG_0,
};
static GType gst_ffmpegcsp_get_type (void);
static GType gst_ffmpegcsp_get_type (void);
static void gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass);
static void gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass);
static void gst_ffmpegcsp_init (GstFFMpegCsp *space);
static void gst_ffmpegcsp_base_init (GstFFMpegCspClass * klass);
static void gst_ffmpegcsp_class_init (GstFFMpegCspClass * klass);
static void gst_ffmpegcsp_init (GstFFMpegCsp * space);
static void gst_ffmpegcsp_set_property (GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec);
static void gst_ffmpegcsp_get_property (GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec);
static void gst_ffmpegcsp_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_ffmpegcsp_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static GstPadLinkReturn
gst_ffmpegcsp_pad_link (GstPad *pad,
const GstCaps *caps);
gst_ffmpegcsp_pad_link (GstPad * pad, const GstCaps * caps);
static void gst_ffmpegcsp_chain (GstPad *pad,
GstData *data);
static GstElementStateReturn
gst_ffmpegcsp_change_state (GstElement *element);
static void gst_ffmpegcsp_chain (GstPad * pad, GstData * data);
static GstElementStateReturn gst_ffmpegcsp_change_state (GstElement * element);
static GstPadTemplate *srctempl, *sinktempl;
static GstElementClass *parent_class = NULL;
/*static guint gst_ffmpegcsp_signals[LAST_SIGNAL] = { 0 }; */
static GstCaps *
gst_ffmpegcsp_caps_remove_format_info (GstCaps *caps)
gst_ffmpegcsp_caps_remove_format_info (GstCaps * caps)
{
int i;
GstStructure *structure;
GstCaps *rgbcaps;
for(i=0;i<gst_caps_get_size (caps);i++){
for (i = 0; i < gst_caps_get_size (caps); i++) {
structure = gst_caps_get_structure (caps, i);
gst_structure_set_name (structure,"video/x-raw-yuv");
gst_structure_remove_field (structure,"format");
gst_structure_remove_field (structure,"endianness");
gst_structure_remove_field (structure,"depth");
gst_structure_remove_field (structure,"bpp");
gst_structure_remove_field (structure,"red_mask");
gst_structure_remove_field (structure,"green_mask");
gst_structure_remove_field (structure,"blue_mask");
gst_structure_set_name (structure, "video/x-raw-yuv");
gst_structure_remove_field (structure, "format");
gst_structure_remove_field (structure, "endianness");
gst_structure_remove_field (structure, "depth");
gst_structure_remove_field (structure, "bpp");
gst_structure_remove_field (structure, "red_mask");
gst_structure_remove_field (structure, "green_mask");
gst_structure_remove_field (structure, "blue_mask");
}
rgbcaps = gst_caps_simplify (caps);
gst_caps_free (caps);
caps = gst_caps_copy (rgbcaps);
for(i=0;i<gst_caps_get_size (rgbcaps);i++){
for (i = 0; i < gst_caps_get_size (rgbcaps); i++) {
structure = gst_caps_get_structure (rgbcaps, i);
gst_structure_set_name (structure,"video/x-raw-rgb");
gst_structure_set_name (structure, "video/x-raw-rgb");
}
gst_caps_append (caps, rgbcaps);
@ -149,13 +144,13 @@ gst_ffmpegcsp_caps_remove_format_info (GstCaps *caps)
}
static GstCaps *
gst_ffmpegcsp_getcaps (GstPad *pad)
gst_ffmpegcsp_getcaps (GstPad * pad)
{
GstFFMpegCsp *space;
GstCaps *othercaps;
GstCaps *caps;
GstPad *otherpad;
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
otherpad = (pad == space->srcpad) ? space->sinkpad : space->srcpad;
@ -171,8 +166,7 @@ gst_ffmpegcsp_getcaps (GstPad *pad)
}
static GstPadLinkReturn
gst_ffmpegcsp_pad_link (GstPad *pad,
const GstCaps *caps)
gst_ffmpegcsp_pad_link (GstPad * pad, const GstCaps * caps)
{
GstStructure *structure;
AVCodecContext *ctx;
@ -223,8 +217,7 @@ gst_ffmpegcsp_pad_link (GstPad *pad,
gst_caps_set_simple (caps,
"width", G_TYPE_INT, width,
"height", G_TYPE_INT, height,
"framerate", G_TYPE_DOUBLE, framerate,
NULL);
"framerate", G_TYPE_DOUBLE, framerate, NULL);
ret = gst_pad_try_set_caps (otherpad, caps);
if (GST_PAD_LINK_FAILED (ret)) {
return ret;
@ -270,15 +263,14 @@ gst_ffmpegcsp_get_type (void)
};
ffmpegcsp_type = g_type_register_static (GST_TYPE_ELEMENT,
"GstFFMpegCsp",
&ffmpegcsp_info, 0);
"GstFFMpegCsp", &ffmpegcsp_info, 0);
}
return ffmpegcsp_type;
}
static void
gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass)
gst_ffmpegcsp_base_init (GstFFMpegCspClass * klass)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
@ -288,13 +280,13 @@ gst_ffmpegcsp_base_init (GstFFMpegCspClass *klass)
}
static void
gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass)
gst_ffmpegcsp_class_init (GstFFMpegCspClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass*) klass;
gstelement_class = (GstElementClass*) klass;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
@ -305,13 +297,13 @@ gst_ffmpegcsp_class_init (GstFFMpegCspClass *klass)
}
static void
gst_ffmpegcsp_init (GstFFMpegCsp *space)
gst_ffmpegcsp_init (GstFFMpegCsp * space)
{
space->sinkpad = gst_pad_new_from_template (sinktempl, "sink");
gst_pad_set_link_function (space->sinkpad, gst_ffmpegcsp_pad_link);
gst_pad_set_getcaps_function (space->sinkpad, gst_ffmpegcsp_getcaps);
gst_pad_set_chain_function (space->sinkpad,gst_ffmpegcsp_chain);
gst_element_add_pad (GST_ELEMENT(space), space->sinkpad);
gst_pad_set_chain_function (space->sinkpad, gst_ffmpegcsp_chain);
gst_element_add_pad (GST_ELEMENT (space), space->sinkpad);
space->srcpad = gst_pad_new_from_template (srctempl, "src");
gst_element_add_pad (GST_ELEMENT (space), space->srcpad);
@ -323,8 +315,7 @@ gst_ffmpegcsp_init (GstFFMpegCsp *space)
}
static void
gst_ffmpegcsp_chain (GstPad *pad,
GstData *data)
gst_ffmpegcsp_chain (GstPad * pad, GstData * data)
{
GstBuffer *inbuf = GST_BUFFER (data);
GstFFMpegCsp *space;
@ -335,14 +326,13 @@ gst_ffmpegcsp_chain (GstPad *pad,
g_return_if_fail (inbuf != NULL);
space = GST_FFMPEGCSP (gst_pad_get_parent (pad));
g_return_if_fail (space != NULL);
g_return_if_fail (GST_IS_FFMPEGCSP (space));
if (space->from_pixfmt == PIX_FMT_NB ||
space->to_pixfmt == PIX_FMT_NB) {
if (space->from_pixfmt == PIX_FMT_NB || space->to_pixfmt == PIX_FMT_NB) {
GST_ELEMENT_ERROR (space, CORE, NOT_IMPLEMENTED, NULL,
("attempting to convert colorspaces between unknown formats"));
("attempting to convert colorspaces between unknown formats"));
gst_buffer_unref (inbuf);
return;
}
@ -352,19 +342,19 @@ gst_ffmpegcsp_chain (GstPad *pad,
} else {
/* use bufferpool here */
guint size = avpicture_get_size (space->to_pixfmt,
space->width,
space->height);
outbuf = gst_pad_alloc_buffer (space->srcpad,
GST_BUFFER_OFFSET_NONE, size);
space->width,
space->height);
outbuf = gst_pad_alloc_buffer (space->srcpad, GST_BUFFER_OFFSET_NONE, size);
/* convert */
avpicture_fill ((AVPicture *) space->from_frame, GST_BUFFER_DATA (inbuf),
space->from_pixfmt, space->width, space->height);
space->from_pixfmt, space->width, space->height);
avpicture_fill ((AVPicture *) space->to_frame, GST_BUFFER_DATA (outbuf),
space->to_pixfmt, space->width, space->height);
space->to_pixfmt, space->width, space->height);
img_convert ((AVPicture *) space->to_frame, space->to_pixfmt,
(AVPicture *) space->from_frame, space->from_pixfmt,
space->width, space->height);
(AVPicture *) space->from_frame, space->from_pixfmt,
space->width, space->height);
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf);
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf);
@ -376,7 +366,7 @@ gst_ffmpegcsp_chain (GstPad *pad,
}
static GstElementStateReturn
gst_ffmpegcsp_change_state (GstElement *element)
gst_ffmpegcsp_change_state (GstElement * element)
{
GstFFMpegCsp *space;
@ -400,10 +390,8 @@ gst_ffmpegcsp_change_state (GstElement *element)
}
static void
gst_ffmpegcsp_set_property (GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec)
gst_ffmpegcsp_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstFFMpegCsp *space;
@ -418,10 +406,8 @@ gst_ffmpegcsp_set_property (GObject *object,
}
static void
gst_ffmpegcsp_get_property (GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec)
gst_ffmpegcsp_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstFFMpegCsp *space;
@ -437,7 +423,7 @@ gst_ffmpegcsp_get_property (GObject *object,
}
gboolean
gst_ffmpegcsp_register (GstPlugin *plugin)
gst_ffmpegcsp_register (GstPlugin * plugin)
{
GstCaps *caps;
@ -445,15 +431,10 @@ gst_ffmpegcsp_register (GstPlugin *plugin)
caps = gst_ffmpeg_codectype_to_caps (CODEC_TYPE_VIDEO, NULL);
/* build templates */
srctempl = gst_pad_template_new ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
gst_caps_copy (caps));
sinktempl = gst_pad_template_new ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
caps);
srctempl = gst_pad_template_new ("src",
GST_PAD_SRC, GST_PAD_ALWAYS, gst_caps_copy (caps));
sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK, GST_PAD_ALWAYS, caps);
return gst_element_register (plugin, "ffcolorspace",
GST_RANK_NONE, GST_TYPE_FFMPEGCSP);
GST_RANK_NONE, GST_TYPE_FFMPEGCSP);
}

View file

@ -36,7 +36,8 @@
typedef struct _GstFFMpegDec GstFFMpegDec;
struct _GstFFMpegDec {
struct _GstFFMpegDec
{
GstElement element;
/* We need to keep track of our pads, so we do so here. */
@ -50,7 +51,8 @@ struct _GstFFMpegDec {
typedef struct _GstFFMpegDecClass GstFFMpegDecClass;
struct _GstFFMpegDecClass {
struct _GstFFMpegDecClass
{
GstElementClass parent_class;
AVCodec *in_plugin;
@ -59,7 +61,8 @@ struct _GstFFMpegDecClass {
typedef struct _GstFFMpegDecClassParams GstFFMpegDecClassParams;
struct _GstFFMpegDecClassParams {
struct _GstFFMpegDecClassParams
{
AVCodec *in_plugin;
GstCaps *srccaps, *sinkcaps;
};
@ -75,12 +78,14 @@ struct _GstFFMpegDecClassParams {
#define GST_IS_FFMPEGDEC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGDEC))
enum {
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum {
enum
{
ARG_0,
/* FILL ME */
};
@ -88,25 +93,23 @@ enum {
static GHashTable *global_plugins;
/* A number of functon prototypes are given so we can refer to them later. */
static void gst_ffmpegdec_base_init (GstFFMpegDecClass *klass);
static void gst_ffmpegdec_class_init (GstFFMpegDecClass *klass);
static void gst_ffmpegdec_init (GstFFMpegDec *ffmpegdec);
static void gst_ffmpegdec_dispose (GObject *object);
static void gst_ffmpegdec_base_init (GstFFMpegDecClass * klass);
static void gst_ffmpegdec_class_init (GstFFMpegDecClass * klass);
static void gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec);
static void gst_ffmpegdec_dispose (GObject * object);
static GstPadLinkReturn gst_ffmpegdec_connect (GstPad *pad,
const GstCaps *caps);
static void gst_ffmpegdec_chain (GstPad *pad,
GstData *data);
static GstPadLinkReturn gst_ffmpegdec_connect (GstPad * pad,
const GstCaps * caps);
static void gst_ffmpegdec_chain (GstPad * pad, GstData * data);
static GstElementStateReturn
gst_ffmpegdec_change_state (GstElement *element);
static GstElementStateReturn gst_ffmpegdec_change_state (GstElement * element);
#if 0
/* some sort of bufferpool handling, but different */
static int gst_ffmpegdec_get_buffer (AVCodecContext *context,
AVFrame *picture);
static void gst_ffmpegdec_release_buffer (AVCodecContext *context,
AVFrame *picture);
static int gst_ffmpegdec_get_buffer (AVCodecContext * context,
AVFrame * picture);
static void gst_ffmpegdec_release_buffer (AVCodecContext * context,
AVFrame * picture);
#endif
static GstElementClass *parent_class = NULL;
@ -114,7 +117,7 @@ static GstElementClass *parent_class = NULL;
/*static guint gst_ffmpegdec_signals[LAST_SIGNAL] = { 0 }; */
static void
gst_ffmpegdec_base_init (GstFFMpegDecClass *klass)
gst_ffmpegdec_base_init (GstFFMpegDecClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
@ -123,22 +126,20 @@ gst_ffmpegdec_base_init (GstFFMpegDecClass *klass)
GstPadTemplate *sinktempl, *srctempl;
params = g_hash_table_lookup (global_plugins,
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
if (!params)
params = g_hash_table_lookup (global_plugins,
GINT_TO_POINTER (0));
params = g_hash_table_lookup (global_plugins, GINT_TO_POINTER (0));
g_assert (params);
/* construct the element details struct */
details.longname = g_strdup_printf("FFMPEG %s decoder",
params->in_plugin->name);
details.klass = g_strdup_printf("Codec/%s/Decoder",
(params->in_plugin->type == CODEC_TYPE_VIDEO) ?
"Video" : "Audio");
details.description = g_strdup_printf("FFMPEG %s decoder",
params->in_plugin->name);
details.longname = g_strdup_printf ("FFMPEG %s decoder",
params->in_plugin->name);
details.klass = g_strdup_printf ("Codec/%s/Decoder",
(params->in_plugin->type == CODEC_TYPE_VIDEO) ? "Video" : "Audio");
details.description = g_strdup_printf ("FFMPEG %s decoder",
params->in_plugin->name);
details.author = "Wim Taymans <wim.taymans@chello.be>, "
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
gst_element_class_set_details (element_class, &details);
g_free (details.longname);
g_free (details.klass);
@ -146,9 +147,9 @@ gst_ffmpegdec_base_init (GstFFMpegDecClass *klass)
/* pad templates */
sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
GST_PAD_ALWAYS, params->sinkcaps);
GST_PAD_ALWAYS, params->sinkcaps);
srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
GST_PAD_ALWAYS, params->srccaps);
GST_PAD_ALWAYS, params->srccaps);
gst_element_class_add_pad_template (element_class, srctempl);
gst_element_class_add_pad_template (element_class, sinktempl);
@ -159,13 +160,13 @@ gst_ffmpegdec_base_init (GstFFMpegDecClass *klass)
}
static void
gst_ffmpegdec_class_init (GstFFMpegDecClass *klass)
gst_ffmpegdec_class_init (GstFFMpegDecClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass*)klass;
gstelement_class = (GstElementClass*)klass;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_peek_parent (klass);
@ -174,9 +175,10 @@ gst_ffmpegdec_class_init (GstFFMpegDecClass *klass)
}
static void
gst_ffmpegdec_init (GstFFMpegDec *ffmpegdec)
gst_ffmpegdec_init (GstFFMpegDec * ffmpegdec)
{
GstFFMpegDecClass *oclass = (GstFFMpegDecClass*)(G_OBJECT_GET_CLASS (ffmpegdec));
GstFFMpegDecClass *oclass =
(GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
/* setup pads */
ffmpegdec->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
@ -189,14 +191,14 @@ gst_ffmpegdec_init (GstFFMpegDec *ffmpegdec)
gst_element_add_pad (GST_ELEMENT (ffmpegdec), ffmpegdec->srcpad);
/* some ffmpeg data */
ffmpegdec->context = avcodec_alloc_context();
ffmpegdec->picture = avcodec_alloc_frame();
ffmpegdec->context = avcodec_alloc_context ();
ffmpegdec->picture = avcodec_alloc_frame ();
ffmpegdec->opened = FALSE;
}
static void
gst_ffmpegdec_dispose (GObject *object)
gst_ffmpegdec_dispose (GObject * object)
{
GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) object;
@ -210,11 +212,11 @@ gst_ffmpegdec_dispose (GObject *object)
}
static GstPadLinkReturn
gst_ffmpegdec_connect (GstPad *pad,
const GstCaps *caps)
gst_ffmpegdec_connect (GstPad * pad, const GstCaps * caps)
{
GstFFMpegDec *ffmpegdec = (GstFFMpegDec *)(gst_pad_get_parent (pad));
GstFFMpegDecClass *oclass = (GstFFMpegDecClass*)(G_OBJECT_GET_CLASS (ffmpegdec));
GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) (gst_pad_get_parent (pad));
GstFFMpegDecClass *oclass =
(GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
/* close old session */
if (ffmpegdec->opened) {
@ -233,13 +235,13 @@ gst_ffmpegdec_connect (GstPad *pad,
/* get size and so */
gst_ffmpeg_caps_to_codectype (oclass->in_plugin->type,
caps, ffmpegdec->context);
caps, ffmpegdec->context);
/* we dont send complete frames - FIXME: we need a 'framed' property
* in caps */
if (oclass->in_plugin->capabilities & CODEC_CAP_TRUNCATED &&
(ffmpegdec->context->codec_id == CODEC_ID_MPEG1VIDEO ||
ffmpegdec->context->codec_id == CODEC_ID_MPEG2VIDEO))
ffmpegdec->context->codec_id == CODEC_ID_MPEG2VIDEO))
ffmpegdec->context->flags |= CODEC_FLAG_TRUNCATED;
/* do *not* draw edges */
@ -251,7 +253,7 @@ gst_ffmpegdec_connect (GstPad *pad,
if (avcodec_open (ffmpegdec->context, oclass->in_plugin) < 0) {
avcodec_close (ffmpegdec->context);
GST_DEBUG ("ffdec_%s: Failed to open FFMPEG codec",
oclass->in_plugin->name);
oclass->in_plugin->name);
return GST_PAD_LINK_REFUSED;
}
@ -263,8 +265,7 @@ gst_ffmpegdec_connect (GstPad *pad,
#if 0
static int
gst_ffmpegdec_get_buffer (AVCodecContext *context,
AVFrame *picture)
gst_ffmpegdec_get_buffer (AVCodecContext * context, AVFrame * picture)
{
GstBuffer *buf = NULL;
gulong bufsize = 0;
@ -272,12 +273,10 @@ gst_ffmpegdec_get_buffer (AVCodecContext *context,
switch (context->codec_type) {
case CODEC_TYPE_VIDEO:
bufsize = avpicture_get_size (context->pix_fmt,
context->width,
context->height);
context->width, context->height);
buf = gst_buffer_new_and_alloc (bufsize);
avpicture_fill ((AVPicture *) picture, GST_BUFFER_DATA (buf),
context->pix_fmt,
context->width, context->height);
context->pix_fmt, context->width, context->height);
break;
case CODEC_TYPE_AUDIO:
@ -301,15 +300,15 @@ gst_ffmpegdec_get_buffer (AVCodecContext *context,
}
static void
gst_ffmpegdec_release_buffer (AVCodecContext *context,
AVFrame *picture)
gst_ffmpegdec_release_buffer (AVCodecContext * context, AVFrame * picture)
{
gint i;
GstBuffer *buf = GST_BUFFER (picture->base[0]);
gst_buffer_unref (buf);
/* zero out the reference in ffmpeg */
for (i=0;i<4;i++) {
for (i = 0; i < 4; i++) {
picture->data[i] = NULL;
picture->linesize[i] = 0;
}
@ -317,21 +316,21 @@ gst_ffmpegdec_release_buffer (AVCodecContext *context,
#endif
static void
gst_ffmpegdec_chain (GstPad *pad,
GstData *_data)
gst_ffmpegdec_chain (GstPad * pad, GstData * _data)
{
GstBuffer *inbuf = GST_BUFFER (_data);
GstBuffer *outbuf = NULL;
GstFFMpegDec *ffmpegdec = (GstFFMpegDec *)(gst_pad_get_parent (pad));
GstFFMpegDecClass *oclass = (GstFFMpegDecClass*)(G_OBJECT_GET_CLASS (ffmpegdec));
GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) (gst_pad_get_parent (pad));
GstFFMpegDecClass *oclass =
(GstFFMpegDecClass *) (G_OBJECT_GET_CLASS (ffmpegdec));
guchar *data;
gint size, len = 0;
gint have_data;
if (!ffmpegdec->opened) {
GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL),
("ffdec_%s: input format was not set before data start",
oclass->in_plugin->name));
("ffdec_%s: input format was not set before data start",
oclass->in_plugin->name));
return;
}
@ -348,38 +347,35 @@ gst_ffmpegdec_chain (GstPad *pad,
switch (oclass->in_plugin->type) {
case CODEC_TYPE_VIDEO:
/* workarounds, functions write to buffers:
* libavcodec/svq1.c:svq1_decode_frame writes to the given buffer.
/* workarounds, functions write to buffers:
* libavcodec/svq1.c:svq1_decode_frame writes to the given buffer.
* libavcodec/svq3.c:svq3_decode_slice_header too.
* ffmpeg devs know about it and will fix it (they said). */
if (oclass->in_plugin->id == CODEC_ID_SVQ1 ||
if (oclass->in_plugin->id == CODEC_ID_SVQ1 ||
oclass->in_plugin->id == CODEC_ID_SVQ3) {
inbuf = gst_buffer_copy_on_write(inbuf);
data = GST_BUFFER_DATA (inbuf);
size = GST_BUFFER_SIZE (inbuf);
}
inbuf = gst_buffer_copy_on_write (inbuf);
data = GST_BUFFER_DATA (inbuf);
size = GST_BUFFER_SIZE (inbuf);
}
len = avcodec_decode_video (ffmpegdec->context,
ffmpegdec->picture,
&have_data,
data, size);
ffmpegdec->picture, &have_data, data, size);
if (have_data) {
/* libavcodec constantly crashes on stupid buffer allocation
* errors inside. This drives me crazy, so we let it allocate
* it's own buffers and copy to our own buffer afterwards... */
AVPicture pic;
gint size = avpicture_get_size (ffmpegdec->context->pix_fmt,
ffmpegdec->context->width,
ffmpegdec->context->height);
ffmpegdec->context->width,
ffmpegdec->context->height);
outbuf = gst_buffer_new_and_alloc (size);
avpicture_fill (&pic, GST_BUFFER_DATA (outbuf),
ffmpegdec->context->pix_fmt,
ffmpegdec->context->width,
ffmpegdec->context->height);
ffmpegdec->context->pix_fmt,
ffmpegdec->context->width, ffmpegdec->context->height);
img_convert (&pic, ffmpegdec->context->pix_fmt,
(AVPicture *) ffmpegdec->picture,
ffmpegdec->context->pix_fmt,
ffmpegdec->context->width,
ffmpegdec->context->height);
(AVPicture *) ffmpegdec->picture,
ffmpegdec->context->pix_fmt,
ffmpegdec->context->width, ffmpegdec->context->height);
/* this isn't necessarily true, but it's better than nothing */
GST_BUFFER_DURATION (outbuf) = GST_BUFFER_DURATION (inbuf);
@ -389,40 +385,38 @@ gst_ffmpegdec_chain (GstPad *pad,
case CODEC_TYPE_AUDIO:
outbuf = gst_buffer_new_and_alloc (AVCODEC_MAX_AUDIO_FRAME_SIZE);
len = avcodec_decode_audio (ffmpegdec->context,
(int16_t *) GST_BUFFER_DATA (outbuf),
&have_data,
data, size);
(int16_t *) GST_BUFFER_DATA (outbuf), &have_data, data, size);
if (have_data) {
GST_BUFFER_SIZE (outbuf) = have_data;
GST_BUFFER_DURATION (outbuf) = (have_data * GST_SECOND) /
(ffmpegdec->context->channels *
ffmpegdec->context->sample_rate);
(ffmpegdec->context->channels * ffmpegdec->context->sample_rate);
} else {
gst_buffer_unref (outbuf);
}
}
break;
default:
g_assert(0);
g_assert (0);
break;
}
if (len < 0) {
GST_ERROR_OBJECT (ffmpegdec, "ffdec_%s: decoding error",
oclass->in_plugin->name);
oclass->in_plugin->name);
break;
}
if (have_data) {
if (!GST_PAD_CAPS (ffmpegdec->srcpad)) {
GstCaps *caps;
caps = gst_ffmpeg_codectype_to_caps (oclass->in_plugin->type,
ffmpegdec->context);
ffmpegdec->context);
if (caps == NULL ||
!gst_pad_set_explicit_caps (ffmpegdec->srcpad, caps)) {
GST_ELEMENT_ERROR (ffmpegdec, CORE, NEGOTIATION, (NULL),
("Failed to link ffmpeg decoder (%s) to next element",
oclass->in_plugin->name));
("Failed to link ffmpeg decoder (%s) to next element",
oclass->in_plugin->name));
return;
}
}
@ -430,7 +424,7 @@ gst_ffmpegdec_chain (GstPad *pad,
GST_BUFFER_TIMESTAMP (outbuf) = GST_BUFFER_TIMESTAMP (inbuf);
gst_pad_push (ffmpegdec->srcpad, GST_DATA (outbuf));
}
}
size -= len;
data += len;
@ -440,7 +434,7 @@ gst_ffmpegdec_chain (GstPad *pad,
}
static GstElementStateReturn
gst_ffmpegdec_change_state (GstElement *element)
gst_ffmpegdec_change_state (GstElement * element)
{
GstFFMpegDec *ffmpegdec = (GstFFMpegDec *) element;
gint transition = GST_STATE_TRANSITION (element);
@ -461,22 +455,22 @@ gst_ffmpegdec_change_state (GstElement *element)
}
gboolean
gst_ffmpegdec_register (GstPlugin *plugin)
gst_ffmpegdec_register (GstPlugin * plugin)
{
GTypeInfo typeinfo = {
sizeof(GstFFMpegDecClass),
(GBaseInitFunc)gst_ffmpegdec_base_init,
sizeof (GstFFMpegDecClass),
(GBaseInitFunc) gst_ffmpegdec_base_init,
NULL,
(GClassInitFunc)gst_ffmpegdec_class_init,
(GClassInitFunc) gst_ffmpegdec_class_init,
NULL,
NULL,
sizeof(GstFFMpegDec),
sizeof (GstFFMpegDec),
0,
(GInstanceInitFunc)gst_ffmpegdec_init,
(GInstanceInitFunc) gst_ffmpegdec_init,
};
GType type;
AVCodec *in_plugin;
in_plugin = first_avcodec;
global_plugins = g_hash_table_new (NULL, NULL);
@ -488,8 +482,8 @@ gst_ffmpegdec_register (GstPlugin *plugin)
/* no quasi-codecs, please */
if (in_plugin->id == CODEC_ID_RAWVIDEO ||
(in_plugin->id >= CODEC_ID_PCM_S16LE &&
in_plugin->id <= CODEC_ID_PCM_ALAW)) {
(in_plugin->id >= CODEC_ID_PCM_S16LE &&
in_plugin->id <= CODEC_ID_PCM_ALAW)) {
goto next;
}
@ -500,16 +494,16 @@ gst_ffmpegdec_register (GstPlugin *plugin)
/* first make sure we've got a supported type */
sinkcaps = gst_ffmpeg_codecid_to_caps (in_plugin->id, NULL, FALSE);
srccaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL);
srccaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL);
if (!sinkcaps || !srccaps)
goto next;
/* construct the type */
type_name = g_strdup_printf("ffdec_%s", in_plugin->name);
type_name = g_strdup_printf ("ffdec_%s", in_plugin->name);
/* if it's already registered, drop it */
if (g_type_from_name(type_name)) {
g_free(type_name);
if (g_type_from_name (type_name)) {
g_free (type_name);
goto next;
}
@ -517,28 +511,26 @@ gst_ffmpegdec_register (GstPlugin *plugin)
params->in_plugin = in_plugin;
params->srccaps = srccaps;
params->sinkcaps = sinkcaps;
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (0),
(gpointer) params);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (0), (gpointer) params);
/* create the gtype now
* (Ronald) MPEG-4 gets a higher priority because it has been well-
* tested and by far outperforms divxdec/xviddec - so we prefer it. */
type = g_type_register_static(GST_TYPE_ELEMENT, type_name , &typeinfo, 0);
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
if (!gst_element_register (plugin, type_name,
(in_plugin->id == CODEC_ID_MPEG4) ?
GST_RANK_PRIMARY : GST_RANK_MARGINAL, type)) {
(in_plugin->id == CODEC_ID_MPEG4) ?
GST_RANK_PRIMARY : GST_RANK_MARGINAL, type)) {
g_free (type_name);
return FALSE;
}
g_free (type_name);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (type),
(gpointer) params);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (type), (gpointer) params);
next:
next:
in_plugin = in_plugin->next;
}
g_hash_table_remove (global_plugins, GINT_TO_POINTER (0));

View file

@ -36,35 +36,38 @@
typedef struct _GstFFMpegDemux GstFFMpegDemux;
struct _GstFFMpegDemux {
GstElement element;
struct _GstFFMpegDemux
{
GstElement element;
/* We need to keep track of our pads, so we do so here. */
GstPad *sinkpad;
GstPad *sinkpad;
AVFormatContext *context;
gboolean opened;
AVFormatContext *context;
gboolean opened;
GstPad *srcpads[MAX_STREAMS];
gboolean handled[MAX_STREAMS];
guint64 last_ts[MAX_STREAMS];
gint videopads, audiopads;
GstPad *srcpads[MAX_STREAMS];
gboolean handled[MAX_STREAMS];
guint64 last_ts[MAX_STREAMS];
gint videopads, audiopads;
};
typedef struct _GstFFMpegDemuxClassParams {
AVInputFormat *in_plugin;
GstCaps *sinkcaps, *videosrccaps, *audiosrccaps;
typedef struct _GstFFMpegDemuxClassParams
{
AVInputFormat *in_plugin;
GstCaps *sinkcaps, *videosrccaps, *audiosrccaps;
} GstFFMpegDemuxClassParams;
typedef struct _GstFFMpegDemuxClass GstFFMpegDemuxClass;
struct _GstFFMpegDemuxClass {
GstElementClass parent_class;
struct _GstFFMpegDemuxClass
{
GstElementClass parent_class;
AVInputFormat *in_plugin;
GstPadTemplate *sinktempl;
GstPadTemplate *videosrctempl;
GstPadTemplate *audiosrctempl;
AVInputFormat *in_plugin;
GstPadTemplate *sinktempl;
GstPadTemplate *videosrctempl;
GstPadTemplate *audiosrctempl;
};
#define GST_TYPE_FFMPEGDEC \
@ -78,12 +81,14 @@ struct _GstFFMpegDemuxClass {
#define GST_IS_FFMPEGDEC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGDEC))
enum {
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum {
enum
{
ARG_0,
/* FILL ME */
};
@ -91,14 +96,14 @@ enum {
static GHashTable *global_plugins;
/* A number of functon prototypes are given so we can refer to them later. */
static void gst_ffmpegdemux_class_init (GstFFMpegDemuxClass *klass);
static void gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass);
static void gst_ffmpegdemux_init (GstFFMpegDemux *demux);
static void gst_ffmpegdemux_class_init (GstFFMpegDemuxClass * klass);
static void gst_ffmpegdemux_base_init (GstFFMpegDemuxClass * klass);
static void gst_ffmpegdemux_init (GstFFMpegDemux * demux);
static void gst_ffmpegdemux_loop (GstElement *element);
static void gst_ffmpegdemux_loop (GstElement * element);
static GstElementStateReturn
gst_ffmpegdemux_change_state (GstElement *element);
gst_ffmpegdemux_change_state (GstElement * element);
static GstElementClass *parent_class = NULL;
@ -138,7 +143,7 @@ gst_ffmpegdemux_averror (gint av_errno)
}
static void
gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass)
gst_ffmpegdemux_base_init (GstFFMpegDemuxClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
@ -147,37 +152,30 @@ gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass)
GstPadTemplate *sinktempl, *audiosrctempl, *videosrctempl;
params = g_hash_table_lookup (global_plugins,
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
if (!params)
params = g_hash_table_lookup (global_plugins,
GINT_TO_POINTER (0));
params = g_hash_table_lookup (global_plugins, GINT_TO_POINTER (0));
g_assert (params);
/* construct the element details struct */
details.longname = g_strdup_printf("FFMPEG %s demuxer",
params->in_plugin->long_name);
details.longname = g_strdup_printf ("FFMPEG %s demuxer",
params->in_plugin->long_name);
details.klass = "Codec/Demuxer";
details.description = g_strdup_printf("FFMPEG %s decoder",
params->in_plugin->long_name);
details.description = g_strdup_printf ("FFMPEG %s decoder",
params->in_plugin->long_name);
details.author = "Wim Taymans <wim.taymans@chello.be>, "
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
gst_element_class_set_details (element_class, &details);
g_free (details.longname);
g_free (details.description);
/* pad templates */
sinktempl = gst_pad_template_new ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
params->sinkcaps);
GST_PAD_SINK, GST_PAD_ALWAYS, params->sinkcaps);
videosrctempl = gst_pad_template_new ("video_%02d",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
params->videosrccaps);
GST_PAD_SRC, GST_PAD_SOMETIMES, params->videosrccaps);
audiosrctempl = gst_pad_template_new ("audio_%02d",
GST_PAD_SRC,
GST_PAD_SOMETIMES,
params->audiosrccaps);
GST_PAD_SRC, GST_PAD_SOMETIMES, params->audiosrccaps);
gst_element_class_add_pad_template (element_class, videosrctempl);
gst_element_class_add_pad_template (element_class, audiosrctempl);
@ -190,30 +188,29 @@ gst_ffmpegdemux_base_init (GstFFMpegDemuxClass *klass)
}
static void
gst_ffmpegdemux_class_init (GstFFMpegDemuxClass *klass)
gst_ffmpegdemux_class_init (GstFFMpegDemuxClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass*)klass;
gstelement_class = (GstElementClass*)klass;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_ref(GST_TYPE_ELEMENT);
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
gstelement_class->change_state = gst_ffmpegdemux_change_state;
}
static void
gst_ffmpegdemux_init (GstFFMpegDemux *demux)
gst_ffmpegdemux_init (GstFFMpegDemux * demux)
{
GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) (G_OBJECT_GET_CLASS (demux));
GstFFMpegDemuxClass *oclass =
(GstFFMpegDemuxClass *) (G_OBJECT_GET_CLASS (demux));
gint n;
demux->sinkpad = gst_pad_new_from_template (oclass->sinktempl,
"sink");
demux->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
gst_element_add_pad (GST_ELEMENT (demux), demux->sinkpad);
gst_element_set_loop_function (GST_ELEMENT (demux),
gst_ffmpegdemux_loop);
gst_element_set_loop_function (GST_ELEMENT (demux), gst_ffmpegdemux_loop);
demux->opened = FALSE;
demux->context = NULL;
@ -228,7 +225,7 @@ gst_ffmpegdemux_init (GstFFMpegDemux *demux)
}
static void
gst_ffmpegdemux_close (GstFFMpegDemux *demux)
gst_ffmpegdemux_close (GstFFMpegDemux * demux)
{
gint n;
@ -255,7 +252,7 @@ gst_ffmpegdemux_close (GstFFMpegDemux *demux)
}
static AVStream *
gst_ffmpegdemux_stream_from_pad (GstPad *pad)
gst_ffmpegdemux_stream_from_pad (GstPad * pad)
{
GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
AVStream *stream = NULL;
@ -272,19 +269,18 @@ gst_ffmpegdemux_stream_from_pad (GstPad *pad)
}
static const GstEventMask *
gst_ffmpegdemux_src_event_mask (GstPad *pad)
gst_ffmpegdemux_src_event_mask (GstPad * pad)
{
static const GstEventMask masks[] = {
{ GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT },
{ 0, }
{GST_EVENT_SEEK, GST_SEEK_METHOD_SET | GST_SEEK_FLAG_KEY_UNIT},
{0,}
};
return masks;
}
static gboolean
gst_ffmpegdemux_src_event (GstPad *pad,
GstEvent *event)
gst_ffmpegdemux_src_event (GstPad * pad, GstEvent * event)
{
GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
@ -304,14 +300,15 @@ gst_ffmpegdemux_src_event (GstPad *pad,
break;
} else {
GstFormat fmt = GST_FORMAT_TIME;
if (!(res = gst_pad_convert (pad, GST_FORMAT_DEFAULT, offset,
&fmt, &offset)))
&fmt, &offset)))
break;
}
/* fall-through */
case GST_FORMAT_TIME:
if (av_seek_frame (demux->context, stream->index,
offset / (GST_SECOND / AV_TIME_BASE)))
offset / (GST_SECOND / AV_TIME_BASE)))
res = FALSE;
break;
default:
@ -328,7 +325,7 @@ gst_ffmpegdemux_src_event (GstPad *pad,
}
static const GstFormat *
gst_ffmpegdemux_src_format_list (GstPad *pad)
gst_ffmpegdemux_src_format_list (GstPad * pad)
{
AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
static const GstFormat src_v_formats[] = {
@ -336,31 +333,27 @@ gst_ffmpegdemux_src_format_list (GstPad *pad)
GST_FORMAT_DEFAULT,
0
}, src_a_formats[] = {
GST_FORMAT_TIME,
0
};
GST_FORMAT_TIME, 0};
return (stream->codec.codec_type == CODEC_TYPE_VIDEO) ?
src_v_formats : src_a_formats;
src_v_formats : src_a_formats;
}
static const GstQueryType *
gst_ffmpegdemux_src_query_list (GstPad *pad)
gst_ffmpegdemux_src_query_list (GstPad * pad)
{
static const GstQueryType src_types[] = {
GST_QUERY_TOTAL,
GST_QUERY_POSITION,
0
};
return src_types;
}
static gboolean
gst_ffmpegdemux_src_query (GstPad *pad,
GstQueryType type,
GstFormat *fmt,
gint64 *value)
gst_ffmpegdemux_src_query (GstPad * pad,
GstQueryType type, GstFormat * fmt, gint64 * value)
{
GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
@ -368,7 +361,7 @@ gst_ffmpegdemux_src_query (GstPad *pad,
gint n;
if (!stream || (*fmt == GST_FORMAT_DEFAULT &&
stream->codec.codec_type != CODEC_TYPE_VIDEO))
stream->codec.codec_type != CODEC_TYPE_VIDEO))
return FALSE;
switch (type) {
@ -381,7 +374,7 @@ gst_ffmpegdemux_src_query (GstPad *pad,
if (stream->codec_info_nb_frames) {
*value = stream->codec_info_nb_frames;
break;
} /* else fall-through */
} /* else fall-through */
default:
res = FALSE;
break;
@ -394,8 +387,7 @@ gst_ffmpegdemux_src_query (GstPad *pad,
break;
case GST_FORMAT_DEFAULT:
res = gst_pad_convert (pad, GST_FORMAT_TIME,
demux->last_ts[stream->index],
fmt, value);
demux->last_ts[stream->index], fmt, value);
break;
default:
res = FALSE;
@ -411,11 +403,9 @@ gst_ffmpegdemux_src_query (GstPad *pad,
}
static gboolean
gst_ffmpegdemux_src_convert (GstPad *pad,
GstFormat src_fmt,
gint64 src_value,
GstFormat *dest_fmt,
gint64 *dest_value)
gst_ffmpegdemux_src_convert (GstPad * pad,
GstFormat src_fmt,
gint64 src_value, GstFormat * dest_fmt, gint64 * dest_value)
{
GstFFMpegDemux *demux = (GstFFMpegDemux *) gst_pad_get_parent (pad);
AVStream *stream = gst_ffmpegdemux_stream_from_pad (pad);
@ -429,7 +419,7 @@ gst_ffmpegdemux_src_convert (GstPad *pad,
switch (*dest_fmt) {
case GST_FORMAT_DEFAULT:
*dest_value = src_value * stream->r_frame_rate /
(GST_SECOND * stream->r_frame_rate_base);
(GST_SECOND * stream->r_frame_rate_base);
break;
default:
res = FALSE;
@ -440,7 +430,7 @@ gst_ffmpegdemux_src_convert (GstPad *pad,
switch (*dest_fmt) {
case GST_FORMAT_TIME:
*dest_value = src_value * GST_SECOND * stream->r_frame_rate_base /
stream->r_frame_rate;
stream->r_frame_rate;
break;
default:
res = FALSE;
@ -456,16 +446,16 @@ gst_ffmpegdemux_src_convert (GstPad *pad,
}
static gboolean
gst_ffmpegdemux_add (GstFFMpegDemux *demux,
AVStream *stream)
gst_ffmpegdemux_add (GstFFMpegDemux * demux, AVStream * stream)
{
GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux);
GstFFMpegDemuxClass *oclass =
(GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux);
GstPadTemplate *templ = NULL;
GstPad *pad;
GstCaps *caps;
gint num;
gchar *padname;
switch (stream->codec.codec_type) {
case CODEC_TYPE_VIDEO:
templ = oclass->videosrctempl;
@ -499,7 +489,8 @@ gst_ffmpegdemux_add (GstFFMpegDemux *demux,
demux->srcpads[stream->index] = pad;
/* get caps that belongs to this stream */
caps = gst_ffmpeg_codecid_to_caps (stream->codec.codec_id, &stream->codec, TRUE);
caps =
gst_ffmpeg_codecid_to_caps (stream->codec.codec_id, &stream->codec, TRUE);
gst_pad_set_explicit_caps (pad, caps);
gst_element_add_pad (GST_ELEMENT (demux), pad);
@ -508,9 +499,10 @@ gst_ffmpegdemux_add (GstFFMpegDemux *demux,
}
static gboolean
gst_ffmpegdemux_open (GstFFMpegDemux *demux)
gst_ffmpegdemux_open (GstFFMpegDemux * demux)
{
GstFFMpegDemuxClass *oclass = (GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux);
GstFFMpegDemuxClass *oclass =
(GstFFMpegDemuxClass *) G_OBJECT_GET_CLASS (demux);
gchar *location;
gint res;
@ -520,11 +512,11 @@ gst_ffmpegdemux_open (GstFFMpegDemux *demux)
/* open via our input protocol hack */
location = g_strdup_printf ("gstreamer://%p", demux->sinkpad);
res = av_open_input_file (&demux->context, location,
oclass->in_plugin, 0, NULL);
oclass->in_plugin, 0, NULL);
g_free (location);
if (res < 0) {
GST_ELEMENT_ERROR (demux, LIBRARY, FAILED, (NULL),
(gst_ffmpegdemux_averror (res)));
(gst_ffmpegdemux_averror (res)));
return FALSE;
}
@ -542,13 +534,13 @@ gst_ffmpegdemux_open (GstFFMpegDemux *demux)
#define GST_FFMPEG_TYPE_FIND_SIZE 4096
static void
gst_ffmpegdemux_type_find (GstTypeFind *tf, gpointer priv)
gst_ffmpegdemux_type_find (GstTypeFind * tf, gpointer priv)
{
guint8 *data;
GstFFMpegDemuxClassParams *params = (GstFFMpegDemuxClassParams *) priv;
AVInputFormat *in_plugin = params->in_plugin;
gint res = 0;
if (in_plugin->read_probe &&
(data = gst_type_find_peek (tf, 0, GST_FFMPEG_TYPE_FIND_SIZE)) != NULL) {
AVProbeData probe_data;
@ -559,15 +551,15 @@ gst_ffmpegdemux_type_find (GstTypeFind *tf, gpointer priv)
res = in_plugin->read_probe (&probe_data);
res = res * GST_TYPE_FIND_MAXIMUM / AVPROBE_SCORE_MAX;
if (res > 0)
if (res > 0)
gst_type_find_suggest (tf, res, params->sinkcaps);
}
}
static void
gst_ffmpegdemux_loop (GstElement *element)
gst_ffmpegdemux_loop (GstElement * element)
{
GstFFMpegDemux *demux = (GstFFMpegDemux *)(element);
GstFFMpegDemux *demux = (GstFFMpegDemux *) (element);
gint res;
AVPacket pkt;
GstPad *pad;
@ -586,7 +578,7 @@ gst_ffmpegdemux_loop (GstElement *element)
gst_ffmpegdemux_close (demux);
} else {
GST_ELEMENT_ERROR (demux, LIBRARY, FAILED, (NULL),
(gst_ffmpegdemux_averror (res)));
(gst_ffmpegdemux_averror (res)));
}
return;
}
@ -610,7 +602,7 @@ gst_ffmpegdemux_loop (GstElement *element)
if (pkt.pts != AV_NOPTS_VALUE && demux->context->pts_den)
GST_BUFFER_TIMESTAMP (outbuf) = (double) pkt.pts * GST_SECOND *
demux->context->pts_num / demux->context->pts_den;
demux->context->pts_num / demux->context->pts_den;
if (pkt.flags & PKT_FLAG_KEY) {
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_KEY_UNIT);
@ -623,9 +615,9 @@ gst_ffmpegdemux_loop (GstElement *element)
}
static GstElementStateReturn
gst_ffmpegdemux_change_state (GstElement *element)
gst_ffmpegdemux_change_state (GstElement * element)
{
GstFFMpegDemux *demux = (GstFFMpegDemux *)(element);
GstFFMpegDemux *demux = (GstFFMpegDemux *) (element);
gint transition = GST_STATE_TRANSITION (element);
switch (transition) {
@ -641,7 +633,7 @@ gst_ffmpegdemux_change_state (GstElement *element)
}
gboolean
gst_ffmpegdemux_register (GstPlugin *plugin)
gst_ffmpegdemux_register (GstPlugin * plugin)
{
GType type;
AVInputFormat *in_plugin;
@ -649,17 +641,17 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
AVCodec *in_codec;
gchar **extensions;
GTypeInfo typeinfo = {
sizeof(GstFFMpegDemuxClass),
(GBaseInitFunc)gst_ffmpegdemux_base_init,
sizeof (GstFFMpegDemuxClass),
(GBaseInitFunc) gst_ffmpegdemux_base_init,
NULL,
(GClassInitFunc)gst_ffmpegdemux_class_init,
(GClassInitFunc) gst_ffmpegdemux_class_init,
NULL,
NULL,
sizeof(GstFFMpegDemux),
sizeof (GstFFMpegDemux),
0,
(GInstanceInitFunc)gst_ffmpegdemux_init,
(GInstanceInitFunc) gst_ffmpegdemux_init,
};
in_plugin = first_iformat;
global_plugins = g_hash_table_new (NULL, NULL);
@ -680,7 +672,8 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
p = name = g_strdup (in_plugin->name);
while (*p) {
if (*p == '.' || *p == ',') *p = '_';
if (*p == '.' || *p == ',')
*p = '_';
p++;
}
@ -694,9 +687,9 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
* when we open the stream */
audiosrccaps = gst_caps_new_empty ();
videosrccaps = gst_caps_new_empty ();
for (in_codec = first_avcodec; in_codec != NULL;
in_codec = in_codec->next) {
for (in_codec = first_avcodec; in_codec != NULL; in_codec = in_codec->next) {
GstCaps *temp = gst_ffmpeg_codecid_to_caps (in_codec->id, NULL, TRUE);
if (!temp) {
continue;
}
@ -714,7 +707,7 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
}
/* construct the type */
type_name = g_strdup_printf("ffdemux_%s", name);
type_name = g_strdup_printf ("ffdemux_%s", name);
/* if it's already registered, drop it */
if (g_type_from_name (type_name)) {
@ -722,8 +715,8 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
goto next;
}
typefind_name = g_strdup_printf("fftype_%s", name);
typefind_name = g_strdup_printf ("fftype_%s", name);
/* create a cache for these properties */
params = g_new0 (GstFFMpegDemuxClassParams, 1);
params->in_plugin = in_plugin;
@ -731,16 +724,14 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
params->videosrccaps = videosrccaps;
params->audiosrccaps = audiosrccaps;
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (0),
(gpointer) params);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (0), (gpointer) params);
/* create the type now */
type = g_type_register_static (GST_TYPE_ELEMENT, type_name , &typeinfo, 0);
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (type),
(gpointer) params);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (type), (gpointer) params);
if (in_plugin->extensions)
extensions = g_strsplit (in_plugin->extensions, " ", 0);
@ -749,8 +740,7 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
if (!gst_element_register (plugin, type_name, GST_RANK_MARGINAL, type) ||
!gst_type_find_register (plugin, typefind_name, GST_RANK_MARGINAL,
gst_ffmpegdemux_type_find,
extensions, sinkcaps, params)) {
gst_ffmpegdemux_type_find, extensions, sinkcaps, params)) {
g_warning ("Register of type ffdemux_%s failed", name);
return FALSE;
}
@ -758,7 +748,7 @@ gst_ffmpegdemux_register (GstPlugin *plugin)
if (extensions)
g_strfreev (extensions);
next:
next:
g_free (name);
in_plugin = in_plugin->next;
}

View file

@ -36,7 +36,8 @@
typedef struct _GstFFMpegEnc GstFFMpegEnc;
struct _GstFFMpegEnc {
struct _GstFFMpegEnc
{
GstElement element;
/* We need to keep track of our pads, so we do so here. */
@ -57,14 +58,16 @@ struct _GstFFMpegEnc {
typedef struct _GstFFMpegEncClass GstFFMpegEncClass;
struct _GstFFMpegEncClass {
struct _GstFFMpegEncClass
{
GstElementClass parent_class;
AVCodec *in_plugin;
GstPadTemplate *srctempl, *sinktempl;
};
typedef struct {
typedef struct
{
AVCodec *in_plugin;
GstCaps *srccaps, *sinkcaps;
} GstFFMpegEncClassParams;
@ -82,18 +85,20 @@ typedef struct {
#define VIDEO_BUFFER_SIZE (1024*1024)
enum {
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum {
enum
{
ARG_0,
ARG_BIT_RATE,
ARG_GOP_SIZE,
ARG_ME_METHOD,
ARG_BUFSIZE
/* FILL ME */
/* FILL ME */
};
#define GST_TYPE_ME_METHOD (gst_ffmpegenc_me_method_get_type())
@ -102,16 +107,17 @@ gst_ffmpegenc_me_method_get_type (void)
{
static GType ffmpegenc_me_method_type = 0;
static GEnumValue ffmpegenc_me_methods[] = {
{ ME_ZERO, "0", "zero" },
{ ME_FULL, "1", "full" },
{ ME_LOG, "2", "logarithmic" },
{ ME_PHODS, "3", "phods" },
{ ME_EPZS, "4", "epzs" },
{ ME_X1 , "5", "x1" },
{ 0, NULL, NULL },
{ME_ZERO, "0", "zero"},
{ME_FULL, "1", "full"},
{ME_LOG, "2", "logarithmic"},
{ME_PHODS, "3", "phods"},
{ME_EPZS, "4", "epzs"},
{ME_X1, "5", "x1"},
{0, NULL, NULL},
};
if (!ffmpegenc_me_method_type) {
ffmpegenc_me_method_type = g_enum_register_static ("GstFFMpegEncMeMethod", ffmpegenc_me_methods);
ffmpegenc_me_method_type =
g_enum_register_static ("GstFFMpegEncMeMethod", ffmpegenc_me_methods);
}
return ffmpegenc_me_method_type;
}
@ -119,34 +125,29 @@ gst_ffmpegenc_me_method_get_type (void)
static GHashTable *enc_global_plugins;
/* A number of functon prototypes are given so we can refer to them later. */
static void gst_ffmpegenc_class_init (GstFFMpegEncClass *klass);
static void gst_ffmpegenc_base_init (GstFFMpegEncClass *klass);
static void gst_ffmpegenc_init (GstFFMpegEnc *ffmpegenc);
static void gst_ffmpegenc_dispose (GObject *object);
static void gst_ffmpegenc_class_init (GstFFMpegEncClass * klass);
static void gst_ffmpegenc_base_init (GstFFMpegEncClass * klass);
static void gst_ffmpegenc_init (GstFFMpegEnc * ffmpegenc);
static void gst_ffmpegenc_dispose (GObject * object);
static GstPadLinkReturn
gst_ffmpegenc_connect (GstPad *pad, const GstCaps *caps);
static void gst_ffmpegenc_chain_video (GstPad *pad, GstData *_data);
static void gst_ffmpegenc_chain_audio (GstPad *pad, GstData *_data);
gst_ffmpegenc_connect (GstPad * pad, const GstCaps * caps);
static void gst_ffmpegenc_chain_video (GstPad * pad, GstData * _data);
static void gst_ffmpegenc_chain_audio (GstPad * pad, GstData * _data);
static void gst_ffmpegenc_set_property (GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec);
static void gst_ffmpegenc_get_property (GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec);
static void gst_ffmpegenc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec);
static void gst_ffmpegenc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec);
static GstElementStateReturn
gst_ffmpegenc_change_state (GstElement *element);
static GstElementStateReturn gst_ffmpegenc_change_state (GstElement * element);
static GstElementClass *parent_class = NULL;
/*static guint gst_ffmpegenc_signals[LAST_SIGNAL] = { 0 }; */
static void
gst_ffmpegenc_base_init (GstFFMpegEncClass *klass)
gst_ffmpegenc_base_init (GstFFMpegEncClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
@ -155,24 +156,22 @@ gst_ffmpegenc_base_init (GstFFMpegEncClass *klass)
GstPadTemplate *srctempl, *sinktempl;
params = g_hash_table_lookup (enc_global_plugins,
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
/* HACK: if we don't have a GType yet, our params are stored at position 0 */
if (!params) {
params = g_hash_table_lookup (enc_global_plugins,
GINT_TO_POINTER (0));
params = g_hash_table_lookup (enc_global_plugins, GINT_TO_POINTER (0));
}
g_assert (params);
/* construct the element details struct */
details.longname = g_strdup_printf("FFMPEG %s encoder",
params->in_plugin->name);
details.klass = g_strdup_printf("Codec/%s/Encoder",
(params->in_plugin->type == CODEC_TYPE_VIDEO) ?
"Video" : "Audio");
details.description = g_strdup_printf("FFMPEG %s encoder",
params->in_plugin->name);
details.longname = g_strdup_printf ("FFMPEG %s encoder",
params->in_plugin->name);
details.klass = g_strdup_printf ("Codec/%s/Encoder",
(params->in_plugin->type == CODEC_TYPE_VIDEO) ? "Video" : "Audio");
details.description = g_strdup_printf ("FFMPEG %s encoder",
params->in_plugin->name);
details.author = "Wim Taymans <wim.taymans@chello.be>, "
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
gst_element_class_set_details (element_class, &details);
g_free (details.longname);
g_free (details.klass);
@ -180,9 +179,9 @@ gst_ffmpegenc_base_init (GstFFMpegEncClass *klass)
/* pad templates */
sinktempl = gst_pad_template_new ("sink", GST_PAD_SINK,
GST_PAD_ALWAYS, params->sinkcaps);
GST_PAD_ALWAYS, params->sinkcaps);
srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
GST_PAD_ALWAYS, params->srccaps);
GST_PAD_ALWAYS, params->srccaps);
gst_element_class_add_pad_template (element_class, srctempl);
gst_element_class_add_pad_template (element_class, sinktempl);
@ -193,39 +192,35 @@ gst_ffmpegenc_base_init (GstFFMpegEncClass *klass)
}
static void
gst_ffmpegenc_class_init (GstFFMpegEncClass *klass)
gst_ffmpegenc_class_init (GstFFMpegEncClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass*)klass;
gstelement_class = (GstElementClass*)klass;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_ref(GST_TYPE_ELEMENT);
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
if (klass->in_plugin->type == CODEC_TYPE_VIDEO) {
g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_BIT_RATE,
g_param_spec_ulong ("bitrate","Bit Rate",
"Target Video Bitrate",
0, G_MAXULONG, 300000, G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_GOP_SIZE,
g_param_spec_int ("gop_size","GOP Size",
"Number of frames within one GOP",
0, G_MAXINT, 15, G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_ME_METHOD,
g_param_spec_enum ("me_method","ME Method",
"Motion Estimation Method",
GST_TYPE_ME_METHOD, ME_LOG, G_PARAM_READWRITE));
g_object_class_install_property(G_OBJECT_CLASS(klass), ARG_BUFSIZE,
g_param_spec_ulong("buffer_size", "Buffer Size",
"Size of the video buffers",
0,G_MAXULONG,0,G_PARAM_READWRITE));
}
else if (klass->in_plugin->type == CODEC_TYPE_AUDIO) {
g_object_class_install_property(G_OBJECT_CLASS (klass), ARG_BIT_RATE,
g_param_spec_ulong ("bitrate","Bit Rate",
"Target Audio Bitrate",
0, G_MAXULONG, 128000, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BIT_RATE,
g_param_spec_ulong ("bitrate", "Bit Rate",
"Target Video Bitrate", 0, G_MAXULONG, 300000, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_GOP_SIZE,
g_param_spec_int ("gop_size", "GOP Size",
"Number of frames within one GOP",
0, G_MAXINT, 15, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_ME_METHOD,
g_param_spec_enum ("me_method", "ME Method",
"Motion Estimation Method",
GST_TYPE_ME_METHOD, ME_LOG, G_PARAM_READWRITE));
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BUFSIZE,
g_param_spec_ulong ("buffer_size", "Buffer Size",
"Size of the video buffers", 0, G_MAXULONG, 0, G_PARAM_READWRITE));
} else if (klass->in_plugin->type == CODEC_TYPE_AUDIO) {
g_object_class_install_property (G_OBJECT_CLASS (klass), ARG_BIT_RATE,
g_param_spec_ulong ("bitrate", "Bit Rate",
"Target Audio Bitrate", 0, G_MAXULONG, 128000, G_PARAM_READWRITE));
}
gobject_class->set_property = gst_ffmpegenc_set_property;
@ -237,9 +232,10 @@ gst_ffmpegenc_class_init (GstFFMpegEncClass *klass)
}
static void
gst_ffmpegenc_init(GstFFMpegEnc *ffmpegenc)
gst_ffmpegenc_init (GstFFMpegEnc * ffmpegenc)
{
GstFFMpegEncClass *oclass = (GstFFMpegEncClass*)(G_OBJECT_GET_CLASS (ffmpegenc));
GstFFMpegEncClass *oclass =
(GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
/* setup pads */
ffmpegenc->sinkpad = gst_pad_new_from_template (oclass->sinktempl, "sink");
@ -251,8 +247,8 @@ gst_ffmpegenc_init(GstFFMpegEnc *ffmpegenc)
gst_element_add_pad (GST_ELEMENT (ffmpegenc), ffmpegenc->srcpad);
/* ffmpeg objects */
ffmpegenc->context = avcodec_alloc_context();
ffmpegenc->picture = avcodec_alloc_frame();
ffmpegenc->context = avcodec_alloc_context ();
ffmpegenc->picture = avcodec_alloc_frame ();
ffmpegenc->opened = FALSE;
ffmpegenc->cache = NULL;
@ -270,7 +266,7 @@ gst_ffmpegenc_init(GstFFMpegEnc *ffmpegenc)
}
static void
gst_ffmpegenc_dispose (GObject *object)
gst_ffmpegenc_dispose (GObject * object)
{
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) object;
@ -286,15 +282,15 @@ gst_ffmpegenc_dispose (GObject *object)
}
static GstPadLinkReturn
gst_ffmpegenc_connect (GstPad *pad,
const GstCaps *caps)
gst_ffmpegenc_connect (GstPad * pad, const GstCaps * caps)
{
GstCaps *other_caps;
GstCaps *allowed_caps;
GstCaps *icaps;
enum PixelFormat pix_fmt;
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) gst_pad_get_parent (pad);
GstFFMpegEncClass *oclass = (GstFFMpegEncClass *) G_OBJECT_GET_CLASS(ffmpegenc);
GstFFMpegEncClass *oclass =
(GstFFMpegEncClass *) G_OBJECT_GET_CLASS (ffmpegenc);
/* close old session */
if (ffmpegenc->opened) {
@ -321,7 +317,7 @@ gst_ffmpegenc_connect (GstPad *pad,
/* fetch pix_fmt and so on */
gst_ffmpeg_caps_to_codectype (oclass->in_plugin->type,
caps, ffmpegenc->context);
caps, ffmpegenc->context);
pix_fmt = ffmpegenc->context->pix_fmt;
@ -329,7 +325,7 @@ gst_ffmpegenc_connect (GstPad *pad,
if (avcodec_open (ffmpegenc->context, oclass->in_plugin) < 0) {
avcodec_close (ffmpegenc->context);
GST_DEBUG ("ffenc_%s: Failed to open FFMPEG codec",
oclass->in_plugin->name);
oclass->in_plugin->name);
return GST_PAD_LINK_REFUSED;
}
@ -337,13 +333,13 @@ gst_ffmpegenc_connect (GstPad *pad,
if (pix_fmt != ffmpegenc->context->pix_fmt) {
avcodec_close (ffmpegenc->context);
GST_DEBUG ("ffenc_%s: AV wants different colourspace (%d given, %d wanted)",
oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt);
oclass->in_plugin->name, pix_fmt, ffmpegenc->context->pix_fmt);
return GST_PAD_LINK_REFUSED;
}
/* try to set this caps on the other side */
other_caps = gst_ffmpeg_codecid_to_caps (oclass->in_plugin->id,
ffmpegenc->context, TRUE);
ffmpegenc->context, TRUE);
if (!other_caps) {
avcodec_close (ffmpegenc->context);
GST_DEBUG ("Unsupported codec - no caps found");
@ -363,7 +359,9 @@ gst_ffmpegenc_connect (GstPad *pad,
if (gst_caps_get_size (icaps) > 1) {
GstCaps *newcaps;
newcaps = gst_caps_new_full (gst_structure_copy (gst_caps_get_structure (icaps, 0)), NULL);
newcaps =
gst_caps_new_full (gst_structure_copy (gst_caps_get_structure (icaps,
0)), NULL);
gst_caps_free (icaps);
icaps = newcaps;
}
@ -384,32 +382,29 @@ gst_ffmpegenc_connect (GstPad *pad,
}
static void
gst_ffmpegenc_chain_video (GstPad *pad,
GstData *_data)
gst_ffmpegenc_chain_video (GstPad * pad, GstData * _data)
{
GstBuffer *inbuf = GST_BUFFER (_data);
GstBuffer *outbuf = NULL;
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *)(gst_pad_get_parent (pad));
GstFFMpegEncClass *oclass = (GstFFMpegEncClass*)(G_OBJECT_GET_CLASS(ffmpegenc));
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) (gst_pad_get_parent (pad));
GstFFMpegEncClass *oclass =
(GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
gint ret_size = 0;
/* FIXME: events (discont (flush!) and eos (close down) etc.) */
outbuf = gst_buffer_new_and_alloc (ffmpegenc->buffer_size);
avpicture_fill ((AVPicture *) ffmpegenc->picture,
GST_BUFFER_DATA (inbuf),
ffmpegenc->context->pix_fmt,
ffmpegenc->context->width,
ffmpegenc->context->height);
GST_BUFFER_DATA (inbuf),
ffmpegenc->context->pix_fmt,
ffmpegenc->context->width, ffmpegenc->context->height);
ffmpegenc->picture->pts = GST_BUFFER_TIMESTAMP (inbuf) / 1000;
ret_size = avcodec_encode_video (ffmpegenc->context,
GST_BUFFER_DATA (outbuf),
GST_BUFFER_MAXSIZE (outbuf),
ffmpegenc->picture);
GST_BUFFER_DATA (outbuf),
GST_BUFFER_MAXSIZE (outbuf), ffmpegenc->picture);
if (ret_size < 0) {
g_warning("ffenc_%s: failed to encode buffer",
oclass->in_plugin->name);
g_warning ("ffenc_%s: failed to encode buffer", oclass->in_plugin->name);
gst_buffer_unref (inbuf);
return;
}
@ -423,13 +418,13 @@ gst_ffmpegenc_chain_video (GstPad *pad,
}
static void
gst_ffmpegenc_chain_audio (GstPad *pad,
GstData *_data)
gst_ffmpegenc_chain_audio (GstPad * pad, GstData * _data)
{
GstBuffer *inbuf = GST_BUFFER (_data);
GstBuffer *outbuf = NULL, *subbuf;
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *)(gst_pad_get_parent (pad));
GstFFMpegEncClass *oclass = (GstFFMpegEncClass*)(G_OBJECT_GET_CLASS(ffmpegenc));
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) (gst_pad_get_parent (pad));
GstFFMpegEncClass *oclass =
(GstFFMpegEncClass *) (G_OBJECT_GET_CLASS (ffmpegenc));
gint size, ret_size = 0, in_size, frame_size;
size = GST_BUFFER_SIZE (inbuf);
@ -437,7 +432,7 @@ gst_ffmpegenc_chain_audio (GstPad *pad,
/* FIXME: events (discont (flush!) and eos (close down) etc.) */
frame_size = ffmpegenc->context->frame_size * 2 *
ffmpegenc->context->channels;
ffmpegenc->context->channels;
in_size = size;
if (ffmpegenc->cache)
in_size += GST_BUFFER_SIZE (ffmpegenc->cache);
@ -445,7 +440,7 @@ gst_ffmpegenc_chain_audio (GstPad *pad,
while (1) {
/* do we have enough data for one frame? */
if (in_size / (2 * ffmpegenc->context->channels) <
ffmpegenc->context->frame_size) {
ffmpegenc->context->frame_size) {
if (in_size > size) {
/* this is panic! we got a buffer, but still don't have enough
* data. Merge them and retry in the next cycle... */
@ -455,17 +450,18 @@ gst_ffmpegenc_chain_audio (GstPad *pad,
ffmpegenc->cache = inbuf;
} else if (in_size > 0) {
ffmpegenc->cache = gst_buffer_create_sub (inbuf, size - in_size,
in_size);
in_size);
GST_BUFFER_DURATION (ffmpegenc->cache) =
GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (ffmpegenc->cache) / size;
GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (ffmpegenc->cache) /
size;
GST_BUFFER_TIMESTAMP (ffmpegenc->cache) =
GST_BUFFER_TIMESTAMP (inbuf) + (GST_BUFFER_DURATION (inbuf) *
(size - in_size) / size);
GST_BUFFER_TIMESTAMP (inbuf) +
(GST_BUFFER_DURATION (inbuf) * (size - in_size) / size);
gst_buffer_unref (inbuf);
} else {
gst_buffer_unref (inbuf);
}
return;
}
@ -474,28 +470,26 @@ gst_ffmpegenc_chain_audio (GstPad *pad,
/* merge */
subbuf = gst_buffer_create_sub (inbuf, 0, frame_size - (in_size - size));
GST_BUFFER_DURATION (subbuf) =
GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size;
GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size;
subbuf = gst_buffer_merge (ffmpegenc->cache, subbuf);
ffmpegenc->cache = NULL;
} else {
subbuf = gst_buffer_create_sub (inbuf, size - in_size, frame_size);
GST_BUFFER_DURATION (subbuf) =
GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size;
GST_BUFFER_DURATION (inbuf) * GST_BUFFER_SIZE (subbuf) / size;
GST_BUFFER_TIMESTAMP (subbuf) =
GST_BUFFER_TIMESTAMP (inbuf) + (GST_BUFFER_DURATION (inbuf) *
(size - in_size) / size);
GST_BUFFER_TIMESTAMP (inbuf) + (GST_BUFFER_DURATION (inbuf) *
(size - in_size) / size);
}
outbuf = gst_buffer_new_and_alloc (GST_BUFFER_SIZE (inbuf));
ret_size = avcodec_encode_audio (ffmpegenc->context,
GST_BUFFER_DATA (outbuf),
GST_BUFFER_MAXSIZE (outbuf),
(const short int *)
GST_BUFFER_DATA (subbuf));
GST_BUFFER_DATA (outbuf),
GST_BUFFER_MAXSIZE (outbuf), (const short int *)
GST_BUFFER_DATA (subbuf));
if (ret_size < 0) {
g_warning("ffenc_%s: failed to encode buffer",
oclass->in_plugin->name);
g_warning ("ffenc_%s: failed to encode buffer", oclass->in_plugin->name);
gst_buffer_unref (inbuf);
gst_buffer_unref (outbuf);
gst_buffer_unref (subbuf);
@ -513,15 +507,13 @@ gst_ffmpegenc_chain_audio (GstPad *pad,
}
static void
gst_ffmpegenc_set_property (GObject *object,
guint prop_id,
const GValue *value,
GParamSpec *pspec)
gst_ffmpegenc_set_property (GObject * object,
guint prop_id, const GValue * value, GParamSpec * pspec)
{
GstFFMpegEnc *ffmpegenc;
/* Get a pointer of the right type. */
ffmpegenc = (GstFFMpegEnc *)(object);
ffmpegenc = (GstFFMpegEnc *) (object);
/* Check the argument id to see which argument we're setting. */
switch (prop_id) {
@ -535,7 +527,7 @@ gst_ffmpegenc_set_property (GObject *object,
ffmpegenc->me_method = g_value_get_enum (value);
break;
case ARG_BUFSIZE:
ffmpegenc->buffer_size = g_value_get_ulong(value);
ffmpegenc->buffer_size = g_value_get_ulong (value);
break;
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
@ -545,15 +537,13 @@ gst_ffmpegenc_set_property (GObject *object,
/* The set function is simply the inverse of the get fuction. */
static void
gst_ffmpegenc_get_property (GObject *object,
guint prop_id,
GValue *value,
GParamSpec *pspec)
gst_ffmpegenc_get_property (GObject * object,
guint prop_id, GValue * value, GParamSpec * pspec)
{
GstFFMpegEnc *ffmpegenc;
/* It's not null if we got it, but it might not be ours */
ffmpegenc = (GstFFMpegEnc *)(object);
ffmpegenc = (GstFFMpegEnc *) (object);
switch (prop_id) {
case ARG_BIT_RATE:
@ -575,7 +565,7 @@ gst_ffmpegenc_get_property (GObject *object,
}
static GstElementStateReturn
gst_ffmpegenc_change_state (GstElement *element)
gst_ffmpegenc_change_state (GstElement * element)
{
GstFFMpegEnc *ffmpegenc = (GstFFMpegEnc *) element;
gint transition = GST_STATE_TRANSITION (element);
@ -600,22 +590,22 @@ gst_ffmpegenc_change_state (GstElement *element)
}
gboolean
gst_ffmpegenc_register (GstPlugin *plugin)
gst_ffmpegenc_register (GstPlugin * plugin)
{
GTypeInfo typeinfo = {
sizeof(GstFFMpegEncClass),
(GBaseInitFunc)gst_ffmpegenc_base_init,
sizeof (GstFFMpegEncClass),
(GBaseInitFunc) gst_ffmpegenc_base_init,
NULL,
(GClassInitFunc)gst_ffmpegenc_class_init,
(GClassInitFunc) gst_ffmpegenc_class_init,
NULL,
NULL,
sizeof(GstFFMpegEnc),
sizeof (GstFFMpegEnc),
0,
(GInstanceInitFunc)gst_ffmpegenc_init,
(GInstanceInitFunc) gst_ffmpegenc_init,
};
GType type;
AVCodec *in_plugin;
in_plugin = first_avcodec;
enc_global_plugins = g_hash_table_new (NULL, NULL);
@ -627,8 +617,8 @@ gst_ffmpegenc_register (GstPlugin *plugin)
/* no quasi codecs, please */
if (in_plugin->id == CODEC_ID_RAWVIDEO ||
(in_plugin->id >= CODEC_ID_PCM_S16LE &&
in_plugin->id <= CODEC_ID_PCM_ALAW)) {
(in_plugin->id >= CODEC_ID_PCM_S16LE &&
in_plugin->id <= CODEC_ID_PCM_ALAW)) {
goto next;
}
@ -639,16 +629,16 @@ gst_ffmpegenc_register (GstPlugin *plugin)
/* first make sure we've got a supported type */
srccaps = gst_ffmpeg_codecid_to_caps (in_plugin->id, NULL, TRUE);
sinkcaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL);
sinkcaps = gst_ffmpeg_codectype_to_caps (in_plugin->type, NULL);
if (!sinkcaps || !srccaps)
goto next;
/* construct the type */
type_name = g_strdup_printf("ffenc_%s", in_plugin->name);
type_name = g_strdup_printf ("ffenc_%s", in_plugin->name);
/* if it's already registered, drop it */
if (g_type_from_name(type_name)) {
g_free(type_name);
if (g_type_from_name (type_name)) {
g_free (type_name);
goto next;
}
@ -657,12 +647,11 @@ gst_ffmpegenc_register (GstPlugin *plugin)
params->srccaps = srccaps;
params->sinkcaps = sinkcaps;
g_hash_table_insert (enc_global_plugins,
GINT_TO_POINTER (0),
(gpointer) params);
g_hash_table_insert (enc_global_plugins,
GINT_TO_POINTER (0), (gpointer) params);
/* create the glib type now */
type = g_type_register_static(GST_TYPE_ELEMENT, type_name , &typeinfo, 0);
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
if (!gst_element_register (plugin, type_name, GST_RANK_NONE, type)) {
g_free (type_name);
return FALSE;
@ -670,11 +659,10 @@ gst_ffmpegenc_register (GstPlugin *plugin)
g_free (type_name);
g_hash_table_insert (enc_global_plugins,
GINT_TO_POINTER (type),
(gpointer) params);
g_hash_table_insert (enc_global_plugins,
GINT_TO_POINTER (type), (gpointer) params);
next:
next:
in_plugin = in_plugin->next;
}
g_hash_table_remove (enc_global_plugins, GINT_TO_POINTER (0));

View file

@ -34,32 +34,35 @@
typedef struct _GstFFMpegMux GstFFMpegMux;
struct _GstFFMpegMux {
GstElement element;
struct _GstFFMpegMux
{
GstElement element;
/* We need to keep track of our pads, so we do so here. */
GstPad *srcpad;
GstPad *srcpad;
AVFormatContext *context;
gboolean opened;
AVFormatContext *context;
gboolean opened;
GstPad *sinkpads[MAX_STREAMS];
gint videopads, audiopads;
GstBuffer *bufferqueue[MAX_STREAMS];
gboolean eos[MAX_STREAMS];
GstPad *sinkpads[MAX_STREAMS];
gint videopads, audiopads;
GstBuffer *bufferqueue[MAX_STREAMS];
gboolean eos[MAX_STREAMS];
};
typedef struct _GstFFMpegMuxClassParams {
AVOutputFormat *in_plugin;
GstCaps *srccaps, *videosinkcaps, *audiosinkcaps;
typedef struct _GstFFMpegMuxClassParams
{
AVOutputFormat *in_plugin;
GstCaps *srccaps, *videosinkcaps, *audiosinkcaps;
} GstFFMpegMuxClassParams;
typedef struct _GstFFMpegMuxClass GstFFMpegMuxClass;
struct _GstFFMpegMuxClass {
GstElementClass parent_class;
struct _GstFFMpegMuxClass
{
GstElementClass parent_class;
AVOutputFormat *in_plugin;
AVOutputFormat *in_plugin;
};
#define GST_TYPE_FFMPEGMUX \
@ -73,12 +76,14 @@ struct _GstFFMpegMuxClass {
#define GST_IS_FFMPEGMUX_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_FFMPEGMUX))
enum {
enum
{
/* FILL ME */
LAST_SIGNAL
};
enum {
enum
{
ARG_0,
/* FILL ME */
};
@ -86,28 +91,25 @@ enum {
static GHashTable *global_plugins;
/* A number of functon prototypes are given so we can refer to them later. */
static void gst_ffmpegmux_class_init (GstFFMpegMuxClass *klass);
static void gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass);
static void gst_ffmpegmux_init (GstFFMpegMux *ffmpegmux);
static void gst_ffmpegmux_dispose (GObject *object);
static void gst_ffmpegmux_class_init (GstFFMpegMuxClass * klass);
static void gst_ffmpegmux_base_init (GstFFMpegMuxClass * klass);
static void gst_ffmpegmux_init (GstFFMpegMux * ffmpegmux);
static void gst_ffmpegmux_dispose (GObject * object);
static GstPadLinkReturn
gst_ffmpegmux_connect (GstPad *pad,
const GstCaps *caps);
static GstPad * gst_ffmpegmux_request_new_pad (GstElement *element,
GstPadTemplate *templ,
const gchar *name);
static void gst_ffmpegmux_loop (GstElement *element);
gst_ffmpegmux_connect (GstPad * pad, const GstCaps * caps);
static GstPad *gst_ffmpegmux_request_new_pad (GstElement * element,
GstPadTemplate * templ, const gchar * name);
static void gst_ffmpegmux_loop (GstElement * element);
static GstElementStateReturn
gst_ffmpegmux_change_state (GstElement *element);
static GstElementStateReturn gst_ffmpegmux_change_state (GstElement * element);
static GstElementClass *parent_class = NULL;
/*static guint gst_ffmpegmux_signals[LAST_SIGNAL] = { 0 }; */
static void
gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass)
gst_ffmpegmux_base_init (GstFFMpegMuxClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
@ -116,20 +118,19 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass)
GstPadTemplate *videosinktempl, *audiosinktempl, *srctempl;
params = g_hash_table_lookup (global_plugins,
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
GINT_TO_POINTER (G_OBJECT_CLASS_TYPE (gobject_class)));
if (!params)
params = g_hash_table_lookup (global_plugins,
GINT_TO_POINTER (0));
params = g_hash_table_lookup (global_plugins, GINT_TO_POINTER (0));
g_assert (params);
/* construct the element details struct */
details.longname = g_strdup_printf ("FFMPEG %s Muxer",
params->in_plugin->name);
params->in_plugin->name);
details.klass = g_strdup ("Codec/Muxer");
details.description = g_strdup_printf ("FFMPEG %s Muxer",
params->in_plugin->name);
params->in_plugin->name);
details.author = "Wim Taymans <wim.taymans@chello.be>, "
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
"Ronald Bultje <rbultje@ronald.bitfreak.net>";
gst_element_class_set_details (element_class, &details);
g_free (details.longname);
g_free (details.klass);
@ -137,16 +138,11 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass)
/* pad templates */
srctempl = gst_pad_template_new ("src", GST_PAD_SRC,
GST_PAD_ALWAYS,
params->srccaps);
GST_PAD_ALWAYS, params->srccaps);
audiosinktempl = gst_pad_template_new ("audio_%d",
GST_PAD_SINK,
GST_PAD_REQUEST,
params->audiosinkcaps);
GST_PAD_SINK, GST_PAD_REQUEST, params->audiosinkcaps);
videosinktempl = gst_pad_template_new ("video_%d",
GST_PAD_SINK,
GST_PAD_REQUEST,
params->videosinkcaps);
GST_PAD_SINK, GST_PAD_REQUEST, params->videosinkcaps);
gst_element_class_add_pad_template (element_class, srctempl);
gst_element_class_add_pad_template (element_class, videosinktempl);
@ -156,15 +152,15 @@ gst_ffmpegmux_base_init (GstFFMpegMuxClass *klass)
}
static void
gst_ffmpegmux_class_init (GstFFMpegMuxClass *klass)
gst_ffmpegmux_class_init (GstFFMpegMuxClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
gobject_class = (GObjectClass*)klass;
gstelement_class = (GstElementClass*)klass;
gobject_class = (GObjectClass *) klass;
gstelement_class = (GstElementClass *) klass;
parent_class = g_type_class_ref(GST_TYPE_ELEMENT);
parent_class = g_type_class_ref (GST_TYPE_ELEMENT);
gstelement_class->request_new_pad = gst_ffmpegmux_request_new_pad;
gstelement_class->change_state = gst_ffmpegmux_change_state;
@ -172,24 +168,22 @@ gst_ffmpegmux_class_init (GstFFMpegMuxClass *klass)
}
static void
gst_ffmpegmux_init(GstFFMpegMux *ffmpegmux)
gst_ffmpegmux_init (GstFFMpegMux * ffmpegmux)
{
GstElementClass *klass = GST_ELEMENT_GET_CLASS (ffmpegmux);
GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*) klass;
GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass *) klass;
GstPadTemplate *templ = gst_element_class_get_pad_template (klass, "src");
ffmpegmux->srcpad = gst_pad_new_from_template (templ, "src");
gst_element_set_loop_function (GST_ELEMENT (ffmpegmux),
gst_ffmpegmux_loop);
gst_element_add_pad (GST_ELEMENT (ffmpegmux),
ffmpegmux->srcpad);
gst_element_set_loop_function (GST_ELEMENT (ffmpegmux), gst_ffmpegmux_loop);
gst_element_add_pad (GST_ELEMENT (ffmpegmux), ffmpegmux->srcpad);
ffmpegmux->context = g_new0 (AVFormatContext, 1);
ffmpegmux->context->oformat = oclass->in_plugin;
ffmpegmux->context->nb_streams = 0;
snprintf (ffmpegmux->context->filename,
sizeof (ffmpegmux->context->filename),
"gstreamer://%p", ffmpegmux->srcpad);
sizeof (ffmpegmux->context->filename),
"gstreamer://%p", ffmpegmux->srcpad);
ffmpegmux->opened = FALSE;
ffmpegmux->videopads = 0;
@ -197,7 +191,7 @@ gst_ffmpegmux_init(GstFFMpegMux *ffmpegmux)
}
static void
gst_ffmpegmux_dispose (GObject *object)
gst_ffmpegmux_dispose (GObject * object)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) object;
@ -210,13 +204,12 @@ gst_ffmpegmux_dispose (GObject *object)
}
static GstPad *
gst_ffmpegmux_request_new_pad (GstElement *element,
GstPadTemplate *templ,
const gchar *name)
gst_ffmpegmux_request_new_pad (GstElement * element,
GstPadTemplate * templ, const gchar * name)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) element;
GstElementClass *klass = GST_ELEMENT_GET_CLASS (element);
GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass*) klass;
GstFFMpegMuxClass *oclass = (GstFFMpegMuxClass *) klass;
gchar *padname;
GstPad *pad;
AVStream *st;
@ -229,18 +222,16 @@ gst_ffmpegmux_request_new_pad (GstElement *element,
/* figure out a name that *we* like */
if (templ == gst_element_class_get_pad_template (klass, "video_%d")) {
padname = g_strdup_printf ("video_%d",
ffmpegmux->videopads++);
padname = g_strdup_printf ("video_%d", ffmpegmux->videopads++);
type = CODEC_TYPE_VIDEO;
bitrate = 64 * 1024;
framesize = 1152;
} else if (templ == gst_element_class_get_pad_template (klass, "audio_%d")) {
padname = g_strdup_printf ("audio_%d",
ffmpegmux->audiopads++);
padname = g_strdup_printf ("audio_%d", ffmpegmux->audiopads++);
type = CODEC_TYPE_AUDIO;
bitrate = 285 * 1024;
} else {
g_warning("ffmux: unknown pad template!");
g_warning ("ffmux: unknown pad template!");
return NULL;
}
@ -254,30 +245,29 @@ gst_ffmpegmux_request_new_pad (GstElement *element,
/* AVStream needs to be created */
st = av_new_stream (ffmpegmux->context, padnum);
st->codec.codec_type = type;
st->codec.codec_id = CODEC_ID_NONE; /* this is a check afterwards */
st->stream_copy = 1; /* we're not the actual encoder */
st->codec.codec_id = CODEC_ID_NONE; /* this is a check afterwards */
st->stream_copy = 1; /* we're not the actual encoder */
st->codec.bit_rate = bitrate;
st->codec.frame_size = framesize;
/* we fill in codec during capsnego */
/* we love debug output (c) (tm) (r) */
GST_DEBUG ("Created %s pad for ffmux_%s element",
padname, oclass->in_plugin->name);
padname, oclass->in_plugin->name);
g_free (padname);
return pad;
}
static GstPadLinkReturn
gst_ffmpegmux_connect (GstPad *pad,
const GstCaps *caps)
gst_ffmpegmux_connect (GstPad * pad, const GstCaps * caps)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *)(gst_pad_get_parent (pad));
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) (gst_pad_get_parent (pad));
gint i;
AVStream *st;
/*g_return_val_if_fail (ffmpegmux->opened == FALSE,
GST_PAD_LINK_REFUSED);*/
GST_PAD_LINK_REFUSED); */
for (i = 0; i < ffmpegmux->context->nb_streams; i++) {
if (pad == ffmpegmux->sinkpads[i]) {
@ -292,17 +282,16 @@ gst_ffmpegmux_connect (GstPad *pad,
/* for the format-specific guesses, we'll go to
* our famous codec mapper */
if (gst_ffmpeg_caps_to_codecid (caps,
&st->codec) != CODEC_ID_NONE) {
if (gst_ffmpeg_caps_to_codecid (caps, &st->codec) != CODEC_ID_NONE) {
ffmpegmux->eos[i] = FALSE;
return GST_PAD_LINK_OK;
}
}
return GST_PAD_LINK_REFUSED;
}
static void
gst_ffmpegmux_loop (GstElement *element)
gst_ffmpegmux_loop (GstElement * element)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) element;
gint i, bufnum;
@ -313,25 +302,24 @@ gst_ffmpegmux_loop (GstElement *element)
/* check for "pull'ability" */
while (pad != NULL &&
GST_PAD_IS_USABLE (pad) &&
ffmpegmux->eos[i] == FALSE &&
ffmpegmux->bufferqueue[i] == NULL) {
GST_PAD_IS_USABLE (pad) &&
ffmpegmux->eos[i] == FALSE && ffmpegmux->bufferqueue[i] == NULL) {
GstData *data;
/* we can pull a buffer! */
data = gst_pad_pull (pad);
if (GST_IS_EVENT (data)) {
GstEvent *event = GST_EVENT (data);
GstEvent *event = GST_EVENT (data);
switch (GST_EVENT_TYPE (event)) {
switch (GST_EVENT_TYPE (event)) {
case GST_EVENT_EOS:
/* flag EOS on this stream */
ffmpegmux->eos[i] = TRUE;
gst_event_unref (event);
break;
default:
gst_pad_event_default (pad, event);
break;
break;
default:
gst_pad_event_default (pad, event);
break;
}
} else {
ffmpegmux->bufferqueue[i] = GST_BUFFER (data);
@ -346,27 +334,26 @@ gst_ffmpegmux_loop (GstElement *element)
for (i = 0; i < ffmpegmux->context->nb_streams; i++) {
AVStream *st = ffmpegmux->context->streams[i];
/* check whether the pad has successfully completed capsnego */
/* check whether the pad has successfully completed capsnego */
if (st->codec.codec_id == CODEC_ID_NONE) {
GST_ELEMENT_ERROR (element, CORE, NEGOTIATION, (NULL),
("no caps set on stream %d (%s)", i,
(st->codec.codec_type == CODEC_TYPE_VIDEO) ?
"video" : "audio"));
("no caps set on stream %d (%s)", i,
(st->codec.codec_type == CODEC_TYPE_VIDEO) ?
"video" : "audio"));
return;
}
}
if (url_fopen (&ffmpegmux->context->pb,
ffmpegmux->context->filename,
URL_WRONLY) < 0) {
ffmpegmux->context->filename, URL_WRONLY) < 0) {
GST_ELEMENT_ERROR (element, LIBRARY, TOO_LAZY, (NULL),
("Failed to open stream context in ffmux"));
("Failed to open stream context in ffmux"));
return;
}
if (av_set_parameters (ffmpegmux->context, NULL)) {
GST_ELEMENT_ERROR (element, LIBRARY, INIT, (NULL),
("Failed to initialize muxer"));
("Failed to initialize muxer"));
return;
}
@ -394,7 +381,7 @@ gst_ffmpegmux_loop (GstElement *element)
/* if we do have one, only use this one if it's older */
if (GST_BUFFER_TIMESTAMP (ffmpegmux->bufferqueue[i]) <
GST_BUFFER_TIMESTAMP (ffmpegmux->bufferqueue[bufnum])) {
GST_BUFFER_TIMESTAMP (ffmpegmux->bufferqueue[bufnum])) {
bufnum = i;
}
}
@ -411,10 +398,10 @@ gst_ffmpegmux_loop (GstElement *element)
ffmpegmux->context->streams[bufnum]->codec.frame_number++;
/* set time */
ffmpegmux->context->streams[bufnum]->pts.val = (GST_BUFFER_TIMESTAMP (buf) * 90) / 1000000;
av_write_frame (ffmpegmux->context, bufnum,
GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
ffmpegmux->context->streams[bufnum]->pts.val =
(GST_BUFFER_TIMESTAMP (buf) * 90) / 1000000;
av_write_frame (ffmpegmux->context, bufnum, GST_BUFFER_DATA (buf),
GST_BUFFER_SIZE (buf));
//ffmpegmux->context->streams[bufnum]->codec.real_pict_num++;
gst_buffer_unref (buf);
} else {
@ -427,9 +414,9 @@ gst_ffmpegmux_loop (GstElement *element)
}
static GstElementStateReturn
gst_ffmpegmux_change_state (GstElement *element)
gst_ffmpegmux_change_state (GstElement * element)
{
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *)(element);
GstFFMpegMux *ffmpegmux = (GstFFMpegMux *) (element);
gint transition = GST_STATE_TRANSITION (element);
switch (transition) {
@ -449,24 +436,24 @@ gst_ffmpegmux_change_state (GstElement *element)
gboolean
gst_ffmpegmux_register (GstPlugin *plugin)
gst_ffmpegmux_register (GstPlugin * plugin)
{
GTypeInfo typeinfo = {
sizeof(GstFFMpegMuxClass),
(GBaseInitFunc)gst_ffmpegmux_base_init,
sizeof (GstFFMpegMuxClass),
(GBaseInitFunc) gst_ffmpegmux_base_init,
NULL,
(GClassInitFunc)gst_ffmpegmux_class_init,
(GClassInitFunc) gst_ffmpegmux_class_init,
NULL,
NULL,
sizeof(GstFFMpegMux),
sizeof (GstFFMpegMux),
0,
(GInstanceInitFunc)gst_ffmpegmux_init,
(GInstanceInitFunc) gst_ffmpegmux_init,
};
GType type;
AVOutputFormat *in_plugin;
GstFFMpegMuxClassParams *params;
AVCodec *in_codec;
in_plugin = first_oformat;
global_plugins = g_hash_table_new (NULL, NULL);
@ -486,9 +473,9 @@ gst_ffmpegmux_register (GstPlugin *plugin)
* when we open the stream */
audiosinkcaps = gst_caps_new_empty ();
videosinkcaps = gst_caps_new_empty ();
for (in_codec = first_avcodec; in_codec != NULL;
in_codec = in_codec->next) {
for (in_codec = first_avcodec; in_codec != NULL; in_codec = in_codec->next) {
GstCaps *temp = gst_ffmpeg_codecid_to_caps (in_codec->id, NULL, TRUE);
if (!temp) {
continue;
}
@ -506,18 +493,19 @@ gst_ffmpegmux_register (GstPlugin *plugin)
}
/* construct the type */
type_name = g_strdup_printf("ffmux_%s", in_plugin->name);
type_name = g_strdup_printf ("ffmux_%s", in_plugin->name);
p = type_name;
while (*p) {
if (*p == '.') *p = '_';
if (*p == '.')
*p = '_';
p++;
}
/* if it's already registered, drop it */
if (g_type_from_name(type_name)) {
g_free(type_name);
if (g_type_from_name (type_name)) {
g_free (type_name);
goto next;
}
@ -528,20 +516,18 @@ gst_ffmpegmux_register (GstPlugin *plugin)
params->videosinkcaps = videosinkcaps;
params->audiosinkcaps = audiosinkcaps;
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (0),
(gpointer) params);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (0), (gpointer) params);
/* create the type now */
type = g_type_register_static(GST_TYPE_ELEMENT, type_name , &typeinfo, 0);
type = g_type_register_static (GST_TYPE_ELEMENT, type_name, &typeinfo, 0);
if (!gst_element_register (plugin, type_name, GST_RANK_NONE, type))
return FALSE;
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (type),
(gpointer) params);
g_hash_table_insert (global_plugins,
GINT_TO_POINTER (type), (gpointer) params);
next:
next:
in_plugin = in_plugin->next;
}
g_hash_table_remove (global_plugins, GINT_TO_POINTER (0));

View file

@ -34,18 +34,17 @@
typedef struct _GstProtocolInfo GstProtocolInfo;
struct _GstProtocolInfo {
GstPad *pad;
struct _GstProtocolInfo
{
GstPad *pad;
int flags;
int flags;
GstByteStream *bs;
gboolean eos;
gboolean eos;
};
static int
gst_ffmpegdata_open (URLContext *h,
const char *filename,
int flags)
static int
gst_ffmpegdata_open (URLContext * h, const char *filename, int flags)
{
GstProtocolInfo *info;
GstPad *pad;
@ -54,8 +53,7 @@ gst_ffmpegdata_open (URLContext *h,
info->flags = flags;
/* we don't support R/W together */
if (flags != URL_RDONLY &&
flags != URL_WRONLY) {
if (flags != URL_RDONLY && flags != URL_WRONLY) {
g_warning ("Only read-only or write-only are supported");
return -EINVAL;
}
@ -87,10 +85,8 @@ gst_ffmpegdata_open (URLContext *h,
return 0;
}
static int
gst_ffmpegdata_read (URLContext *h,
unsigned char *buf,
int size)
static int
gst_ffmpegdata_read (URLContext * h, unsigned char *buf, int size)
{
GstByteStream *bs;
guint32 total, request;
@ -144,7 +140,7 @@ gst_ffmpegdata_read (URLContext *h,
}
}
} while (!info->eos && total != request);
memcpy (buf, data, total);
gst_bytestream_flush (bs, total);
@ -152,9 +148,7 @@ gst_ffmpegdata_read (URLContext *h,
}
static int
gst_ffmpegdata_write (URLContext *h,
unsigned char *buf,
int size)
gst_ffmpegdata_write (URLContext * h, unsigned char *buf, int size)
{
GstProtocolInfo *info;
GstBuffer *outbuf;
@ -174,9 +168,7 @@ gst_ffmpegdata_write (URLContext *h,
}
static offset_t
gst_ffmpegdata_seek (URLContext *h,
offset_t pos,
int whence)
gst_ffmpegdata_seek (URLContext * h, offset_t pos, int whence)
{
GstSeekType seek_type = 0;
GstProtocolInfo *info;
@ -216,16 +208,17 @@ gst_ffmpegdata_seek (URLContext *h,
}
static int
gst_ffmpegdata_close (URLContext *h)
gst_ffmpegdata_close (URLContext * h)
{
GstProtocolInfo *info;
info = (GstProtocolInfo *) h->priv_data;
switch (info->flags) {
case URL_WRONLY: {
case URL_WRONLY:{
/* send EOS - that closes down the stream */
GstEvent *event = gst_event_new (GST_EVENT_EOS);
gst_pad_push (info->pad, GST_DATA (event));
}
break;
@ -243,11 +236,10 @@ gst_ffmpegdata_close (URLContext *h)
}
URLProtocol gstreamer_protocol = {
.name = "gstreamer",
.url_open = gst_ffmpegdata_open,
.url_read = gst_ffmpegdata_read,
.name = "gstreamer",
.url_open = gst_ffmpegdata_open,
.url_read = gst_ffmpegdata_read,
.url_write = gst_ffmpegdata_write,
.url_seek = gst_ffmpegdata_seek,
.url_seek = gst_ffmpegdata_seek,
.url_close = gst_ffmpegdata_close,
};