schro: Move schro plugin from Schroedinger

Previous history is in Schroedinger.  Depends on, and is an example
of using, GstBaseVideo* base classes.

Code was reindented, and an #ifdef HAVE_ENCODER removed.
This commit is contained in:
David Schleef 2009-05-15 16:12:37 -07:00
parent 4ec34e83d5
commit 0f1c5c1b68
9 changed files with 2345 additions and 0 deletions

View file

@ -566,6 +566,12 @@ AG_GST_CHECK_FEATURE(PANGO, [Pango font rendering], pango, [
AG_GST_PKG_CHECK_MODULES(PANGO, pango pangoft2)
])
dnl *** schroedinger ***
translit(dnm, m, l) AM_CONDITIONAL(USE_SCHRO, true)
AG_GST_CHECK_FEATURE(SCHRO, [Schroedinger video codec], schro, [
AG_GST_PKG_CHECK_MODULES(SCHRO, schroedinger-1.0 >= 1.0.7)
])
dnl *** theora ***
translit(dnm, m, l) AM_CONDITIONAL(USE_THEORA, true)
AG_GST_CHECK_FEATURE(THEORA, [Xiph Theora video codec], theora, [
@ -619,6 +625,7 @@ AM_CONDITIONAL(USE_GIO, false)
AM_CONDITIONAL(USE_LIBVISUAL, false)
AM_CONDITIONAL(USE_OGG, false)
AM_CONDITIONAL(USE_PANGO, false)
AM_CONDITIONAL(USE_SCHRO, false)
AM_CONDITIONAL(USE_THEORA, false)
AM_CONDITIONAL(USE_VORBIS, false)
@ -735,6 +742,7 @@ ext/gio/Makefile
ext/libvisual/Makefile
ext/ogg/Makefile
ext/pango/Makefile
ext/schroedinger/Makefile
ext/theora/Makefile
ext/vorbis/Makefile
gst-libs/Makefile

View file

@ -52,6 +52,12 @@ else
THEORA_DIR=
endif
if USE_SCHRO
SCHRO_DIR=schroedinger
else
SCHRO_DIR=
endif
SUBDIRS = \
$(ALSA_DIR) \
$(CDPARANOIA_DIR) \
@ -60,6 +66,7 @@ SUBDIRS = \
$(LIBVISUAL_DIR) \
$(OGG_DIR) \
$(PANGO_DIR) \
$(SCHRO_DIR) \
$(THEORA_DIR) \
$(VORBIS_DIR)
@ -71,5 +78,6 @@ DIST_SUBDIRS = \
libvisual \
ogg \
pango \
schroedinger \
theora \
vorbis

View file

@ -0,0 +1,24 @@
plugin_LTLIBRARIES = libgstschro.la
noinst_HEADERS = \
gstschroutils.h
libgstschro_la_SOURCES = \
gstschro.c \
gstschrodec.c \
gstschroenc.c \
gstschroparse.c \
gstschroutils.c
libgstschro_la_CFLAGS = \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_CFLAGS) \
$(SCHRO_CFLAGS)
libgstschro_la_LIBADD = \
$(top_builddir)/gst-libs/gst/video/libgstvideo-$(GST_MAJORMINOR).la \
$(GST_LIBS) \
$(SCHRO_LIBS)
libgstschro_la_LDFLAGS = $(GST_PLUGIN_LDFLAGS)
libgstschro_la_LIBTOOLFLAGS = --tag=disable-static

View file

@ -0,0 +1,54 @@
/* GStreamer
* Copyright (C) 2005 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <schroedinger/schro.h>
GType gst_schro_enc_get_type (void);
GType gst_schro_dec_get_type (void);
GType gst_schro_parse_get_type (void);
GST_DEBUG_CATEGORY (schro_debug);
#define GST_CAT_DEFAULT schro_debug
static gboolean
plugin_init (GstPlugin * plugin)
{
schro_init ();
GST_DEBUG_CATEGORY_INIT (schro_debug, "schro", 0, "Schroedinger");
gst_element_register (plugin, "schrodec", GST_RANK_PRIMARY,
gst_schro_dec_get_type ());
gst_element_register (plugin, "schroparse", GST_RANK_NONE,
gst_schro_parse_get_type ());
gst_element_register (plugin, "schroenc", GST_RANK_PRIMARY,
gst_schro_enc_get_type ());
return TRUE;
}
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR,
"schro",
"Schro plugins",
plugin_init, VERSION, "LGPL", "schroedinger", GST_PACKAGE_ORIGIN)

View file

@ -0,0 +1,722 @@
/* Schrodinger
* Copyright (C) 2006 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/video/video.h>
#include <gst/video/gstbasevideodecoder.h>
#include <string.h>
#include <schroedinger/schro.h>
#include <math.h>
#include "gstschroutils.h"
#include <schroedinger/schroparse.h>
GST_DEBUG_CATEGORY_EXTERN (schro_debug);
#define GST_CAT_DEFAULT schro_debug
#define GST_TYPE_SCHRO_DEC \
(gst_schro_dec_get_type())
#define GST_SCHRO_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SCHRO_DEC,GstSchroDec))
#define GST_SCHRO_DEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SCHRO_DEC,GstSchroDecClass))
#define GST_IS_SCHRO_DEC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SCHRO_DEC))
#define GST_IS_SCHRO_DEC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SCHRO_DEC))
typedef struct _GstSchroDec GstSchroDec;
typedef struct _GstSchroDecClass GstSchroDecClass;
struct _GstSchroDec
{
GstBaseVideoDecoder base_video_decoder;
SchroDecoder *decoder;
GstBuffer *seq_header_buffer;
};
struct _GstSchroDecClass
{
GstBaseVideoDecoder base_video_decoder_class;
};
/* GstSchroDec signals and args */
enum
{
LAST_SIGNAL
};
enum
{
ARG_0
};
static void gst_schro_dec_finalize (GObject * object);
static void gst_schro_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_schro_dec_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static gboolean gst_schro_dec_sink_query (GstPad * pad, GstQuery * query);
static gboolean gst_schro_dec_start (GstBaseVideoDecoder * dec);
static gboolean gst_schro_dec_stop (GstBaseVideoDecoder * dec);
static gboolean gst_schro_dec_reset (GstBaseVideoDecoder * dec);
static GstFlowReturn gst_schro_dec_parse_data (GstBaseVideoDecoder *
base_video_decoder, gboolean at_eos);
static int gst_schro_dec_scan_for_sync (GstBaseVideoDecoder *
base_video_decoder, gboolean at_eos, int offset, int n);
static GstFlowReturn gst_schro_dec_handle_frame (GstBaseVideoDecoder * decoder,
GstVideoFrame * frame);
static GstFlowReturn gst_schro_dec_finish (GstBaseVideoDecoder *
base_video_decoder, GstVideoFrame * frame);
static void gst_schrodec_send_tags (GstSchroDec * schro_dec);
static GstStaticPadTemplate gst_schro_dec_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-dirac")
);
static GstStaticPadTemplate gst_schro_dec_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YUY2, AYUV }"))
);
GST_BOILERPLATE (GstSchroDec, gst_schro_dec, GstBaseVideoDecoder,
GST_TYPE_BASE_VIDEO_DECODER);
static void
gst_schro_dec_base_init (gpointer g_class)
{
static GstElementDetails compress_details =
GST_ELEMENT_DETAILS ("Dirac Decoder",
"Codec/Decoder/Video",
"Decode Dirac streams",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_dec_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_dec_sink_template));
gst_element_class_set_details (element_class, &compress_details);
}
static void
gst_schro_dec_class_init (GstSchroDecClass * klass)
{
GObjectClass *gobject_class;
GstBaseVideoDecoderClass *base_video_decoder_class;
GstElementClass *element_class;
gobject_class = G_OBJECT_CLASS (klass);
element_class = GST_ELEMENT_CLASS (klass);
base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
gobject_class->set_property = gst_schro_dec_set_property;
gobject_class->get_property = gst_schro_dec_get_property;
gobject_class->finalize = gst_schro_dec_finalize;
base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start);
base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_schro_dec_stop);
base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_schro_dec_reset);
base_video_decoder_class->parse_data =
GST_DEBUG_FUNCPTR (gst_schro_dec_parse_data);
base_video_decoder_class->scan_for_sync =
GST_DEBUG_FUNCPTR (gst_schro_dec_scan_for_sync);
base_video_decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_schro_dec_handle_frame);
base_video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_dec_finish);
}
static void
gst_schro_dec_init (GstSchroDec * schro_dec, GstSchroDecClass * klass)
{
GST_DEBUG ("gst_schro_dec_init");
gst_pad_set_query_function (GST_BASE_VIDEO_CODEC_SINK_PAD (schro_dec),
gst_schro_dec_sink_query);
schro_dec->decoder = schro_decoder_new ();
}
#define OGG_DIRAC_GRANULE_SHIFT 22
#define OGG_DIRAC_GRANULE_LOW_MASK ((1ULL<<OGG_DIRAC_GRANULE_SHIFT)-1)
static gint64
granulepos_to_frame (gint64 granulepos)
{
guint64 pt;
int dist_h;
int dist_l;
int dist;
int delay;
guint64 dt;
if (granulepos == -1)
return -1;
pt = ((granulepos >> 22) + (granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9;
dist_h = (granulepos >> 22) & 0xff;
dist_l = granulepos & 0xff;
dist = (dist_h << 8) | dist_l;
delay = (granulepos >> 9) & 0x1fff;
dt = pt - delay;
return pt >> 1;
}
static gboolean
gst_schro_dec_sink_convert (GstPad * pad,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value)
{
gboolean res = TRUE;
GstSchroDec *dec;
GstVideoState *state;
if (src_format == *dest_format) {
*dest_value = src_value;
return TRUE;
}
dec = GST_SCHRO_DEC (gst_pad_get_parent (pad));
/* FIXME: check if we are in a decoding state */
state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (dec));
res = FALSE;
if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) {
if (state->fps_d != 0) {
*dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
state->fps_d * GST_SECOND, state->fps_n);
res = TRUE;
} else {
res = FALSE;
}
}
gst_object_unref (dec);
return res;
}
static gboolean
gst_schro_dec_sink_query (GstPad * pad, GstQuery * query)
{
GstSchroDec *dec;
gboolean res = FALSE;
dec = GST_SCHRO_DEC (gst_pad_get_parent (pad));
switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONVERT:
{
GstFormat src_fmt, dest_fmt;
gint64 src_val, dest_val;
gst_query_parse_convert (query, &src_fmt, &src_val, &dest_fmt, &dest_val);
res = gst_schro_dec_sink_convert (pad, src_fmt, src_val, &dest_fmt,
&dest_val);
if (!res)
goto error;
gst_query_set_convert (query, src_fmt, src_val, dest_fmt, dest_val);
break;
}
default:
res = gst_pad_query_default (pad, query);
break;
}
done:
gst_object_unref (dec);
return res;
error:
GST_DEBUG_OBJECT (dec, "query failed");
goto done;
}
static gboolean
gst_schro_dec_start (GstBaseVideoDecoder * dec)
{
if (dec->codec_data) {
GST_DEBUG_OBJECT (dec, "codec data!");
}
return TRUE;
}
static gboolean
gst_schro_dec_stop (GstBaseVideoDecoder * dec)
{
return TRUE;
}
static gboolean
gst_schro_dec_reset (GstBaseVideoDecoder * dec)
{
GstSchroDec *schro_dec;
schro_dec = GST_SCHRO_DEC (dec);
GST_DEBUG ("reset");
if (schro_dec->decoder) {
schro_decoder_reset (schro_dec->decoder);
}
return TRUE;
}
static void
gst_schro_dec_finalize (GObject * object)
{
GstSchroDec *schro_dec;
g_return_if_fail (GST_IS_SCHRO_DEC (object));
schro_dec = GST_SCHRO_DEC (object);
if (schro_dec->decoder) {
schro_decoder_free (schro_dec->decoder);
schro_dec->decoder = NULL;
}
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static void
gst_schro_dec_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstSchroDec *src;
g_return_if_fail (GST_IS_SCHRO_DEC (object));
src = GST_SCHRO_DEC (object);
GST_DEBUG ("gst_schro_dec_set_property");
switch (prop_id) {
default:
break;
}
}
static void
gst_schro_dec_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstSchroDec *src;
g_return_if_fail (GST_IS_SCHRO_DEC (object));
src = GST_SCHRO_DEC (object);
switch (prop_id) {
default:
G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
break;
}
}
static void
parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size)
{
SchroVideoFormat video_format;
int ret;
GstVideoState *state;
GST_DEBUG_OBJECT (schro_dec, "parse_sequence_header size=%d", size);
state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (schro_dec));
schro_dec->seq_header_buffer = gst_buffer_new_and_alloc (size);
memcpy (GST_BUFFER_DATA (schro_dec->seq_header_buffer), data, size);
ret = schro_parse_decode_sequence_header (data + 13, size - 13,
&video_format);
if (ret) {
if (video_format.chroma_format == SCHRO_CHROMA_444) {
state->format = GST_VIDEO_FORMAT_AYUV;
} else if (video_format.chroma_format == SCHRO_CHROMA_422) {
state->format = GST_VIDEO_FORMAT_YUY2;
} else if (video_format.chroma_format == SCHRO_CHROMA_420) {
state->format = GST_VIDEO_FORMAT_I420;
}
state->fps_n = video_format.frame_rate_numerator;
state->fps_d = video_format.frame_rate_denominator;
GST_DEBUG_OBJECT (schro_dec, "Frame rate is %d/%d", state->fps_n,
state->fps_d);
state->width = video_format.width;
state->height = video_format.height;
GST_DEBUG ("Frame dimensions are %d x %d\n", state->width, state->height);
state->clean_width = video_format.clean_width;
state->clean_height = video_format.clean_height;
state->clean_offset_left = video_format.left_offset;
state->clean_offset_top = video_format.top_offset;
state->par_n = video_format.aspect_ratio_numerator;
state->par_d = video_format.aspect_ratio_denominator;
GST_DEBUG ("Pixel aspect ratio is %d/%d", state->par_n, state->par_d);
/* FIXME state points to what is actually in the decoder */
//gst_base_video_decoder_set_state (GST_BASE_VIDEO_DECODER (schro_dec),
// state);
} else {
GST_WARNING ("Failed to get frame rate from sequence header");
}
gst_schrodec_send_tags (schro_dec);
}
static GstFlowReturn
gst_schro_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
gboolean at_eos)
{
GstSchroDec *schro_decoder;
unsigned char header[SCHRO_PARSE_HEADER_SIZE];
int next;
int prev;
int parse_code;
GST_DEBUG_OBJECT (base_video_decoder, "parse_data");
schro_decoder = GST_SCHRO_DEC (base_video_decoder);
if (gst_adapter_available (base_video_decoder->input_adapter) <
SCHRO_PARSE_HEADER_SIZE) {
return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA;
}
GST_DEBUG ("available %d",
gst_adapter_available (base_video_decoder->input_adapter));
gst_adapter_copy (base_video_decoder->input_adapter, header, 0,
SCHRO_PARSE_HEADER_SIZE);
parse_code = header[4];
next = GST_READ_UINT32_BE (header + 5);
prev = GST_READ_UINT32_BE (header + 9);
GST_DEBUG ("%08x %02x %08x %08x",
GST_READ_UINT32_BE (header), parse_code, next, prev);
if (memcmp (header, "BBCD", 4) != 0 ||
(next & 0xf0000000) || (prev & 0xf0000000)) {
gst_base_video_decoder_lost_sync (base_video_decoder);
return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA;
}
if (SCHRO_PARSE_CODE_IS_END_OF_SEQUENCE (parse_code)) {
GstVideoFrame *frame;
if (next != 0 && next != SCHRO_PARSE_HEADER_SIZE) {
GST_WARNING ("next is not 0 or 13 in EOS packet (%d)", next);
}
gst_base_video_decoder_add_to_frame (base_video_decoder,
SCHRO_PARSE_HEADER_SIZE);
frame = base_video_decoder->current_frame;
frame->is_eos = TRUE;
SCHRO_DEBUG ("eos");
return gst_base_video_decoder_have_frame (base_video_decoder);
}
if (gst_adapter_available (base_video_decoder->input_adapter) < next) {
return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA;
}
if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (parse_code)) {
guint8 *data;
data = g_malloc (next);
gst_adapter_copy (base_video_decoder->input_adapter, data, 0, next);
parse_sequence_header (schro_decoder, data, next);
gst_base_video_decoder_set_sync_point (base_video_decoder);
if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_sink_timestamp)) {
base_video_decoder->current_frame->presentation_timestamp =
base_video_decoder->last_sink_timestamp;
GST_DEBUG ("got timestamp %lld", base_video_decoder->last_sink_timestamp);
} else if (base_video_decoder->last_sink_offset_end != -1) {
GstVideoState *state;
#if 0
/* FIXME perhaps should use this to determine if the granulepos
* is valid */
{
guint64 pt;
int dist_h;
int dist_l;
int dist;
int delay;
guint64 dt;
gint64 granulepos = base_video_decoder->last_sink_offset_end;
pt = ((granulepos >> 22) +
(granulepos & OGG_DIRAC_GRANULE_LOW_MASK)) >> 9;
dist_h = (granulepos >> 22) & 0xff;
dist_l = granulepos & 0xff;
dist = (dist_h << 8) | dist_l;
delay = (granulepos >> 9) & 0x1fff;
dt = pt - delay;
GST_DEBUG ("gp pt %lld dist %d delay %d dt %lld", pt, dist, delay, dt);
}
#endif
state =
gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER
(schro_decoder));
base_video_decoder->current_frame->presentation_timestamp =
gst_util_uint64_scale (granulepos_to_frame
(base_video_decoder->last_sink_offset_end), state->fps_d * GST_SECOND,
state->fps_n);
} else {
base_video_decoder->current_frame->presentation_timestamp = -1;
}
g_free (data);
}
if (schro_decoder->seq_header_buffer == NULL) {
gst_adapter_flush (base_video_decoder->input_adapter, next);
return GST_FLOW_OK;
}
if (SCHRO_PARSE_CODE_IS_PICTURE (parse_code)) {
GstVideoFrame *frame;
guint8 tmp[4];
frame = base_video_decoder->current_frame;
gst_adapter_copy (base_video_decoder->input_adapter, tmp,
SCHRO_PARSE_HEADER_SIZE, 4);
frame->presentation_frame_number = GST_READ_UINT32_BE (tmp);
gst_base_video_decoder_add_to_frame (base_video_decoder, next);
return gst_base_video_decoder_have_frame (base_video_decoder);
} else {
gst_base_video_decoder_add_to_frame (base_video_decoder, next);
}
return GST_FLOW_OK;
}
static int
gst_schro_dec_scan_for_sync (GstBaseVideoDecoder * base_video_decoder,
gboolean at_eos, int offset, int n)
{
GstAdapter *adapter = base_video_decoder->input_adapter;
int n_available;
n_available = gst_adapter_available (adapter) - offset;
if (n_available < 4) {
if (at_eos) {
return n_available;
} else {
return 0;
}
}
n_available -= 3;
return gst_adapter_masked_scan_uint32 (adapter, 0x42424344, 0xffffffff,
offset, MIN (n, n_available - 3));
}
static void
gst_schrodec_send_tags (GstSchroDec * schro_dec)
{
GstTagList *list;
list = gst_tag_list_new ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_VIDEO_CODEC, "Dirac", NULL);
gst_element_found_tags_for_pad (GST_ELEMENT_CAST (schro_dec),
GST_BASE_VIDEO_CODEC_SRC_PAD (schro_dec), list);
}
static GstFlowReturn
gst_schro_dec_process (GstSchroDec * schro_dec, gboolean eos)
{
gboolean go;
GstFlowReturn ret;
ret = GST_FLOW_OK;
go = TRUE;
while (go) {
int it;
it = schro_decoder_autoparse_wait (schro_dec->decoder);
switch (it) {
case SCHRO_DECODER_FIRST_ACCESS_UNIT:
break;
case SCHRO_DECODER_NEED_BITS:
GST_DEBUG ("need bits");
go = 0;
break;
case SCHRO_DECODER_NEED_FRAME:
{
GstBuffer *outbuf;
GstVideoState *state;
SchroFrame *schro_frame;
GstFlowReturn flow_ret;
int size;
GST_DEBUG ("need frame");
state =
gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER
(schro_dec));
size =
gst_video_format_get_size (state->format, state->width,
state->height);
flow_ret =
gst_pad_alloc_buffer_and_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
(schro_dec), GST_BUFFER_OFFSET_NONE, size,
GST_PAD_CAPS (GST_BASE_VIDEO_CODEC_SRC_PAD (schro_dec)), &outbuf);
if (flow_ret != GST_FLOW_OK) {
go = FALSE;
ret = flow_ret;
break;
}
schro_frame = gst_schro_buffer_wrap (outbuf,
state->format, state->width, state->height);
schro_decoder_add_output_picture (schro_dec->decoder, schro_frame);
break;
}
case SCHRO_DECODER_OK:
{
SchroFrame *schro_frame;
SchroTag *tag;
GstVideoFrame *frame;
GST_DEBUG ("got frame");
tag = schro_decoder_get_picture_tag (schro_dec->decoder);
schro_frame = schro_decoder_pull (schro_dec->decoder);
frame = tag->value;
if (schro_frame) {
if (schro_frame->priv) {
GstFlowReturn flow_ret;
frame->src_buffer = gst_buffer_ref (GST_BUFFER (schro_frame->priv));
flow_ret =
gst_base_video_decoder_finish_frame (GST_BASE_VIDEO_DECODER
(schro_dec), frame);
if (flow_ret != GST_FLOW_OK) {
GST_DEBUG ("finish frame returned %d", flow_ret);
return flow_ret;
}
} else {
GST_DEBUG ("skipped frame");
}
schro_frame_unref (schro_frame);
}
if (!eos) {
go = FALSE;
}
}
break;
case SCHRO_DECODER_EOS:
GST_DEBUG ("eos");
go = FALSE;
break;
case SCHRO_DECODER_ERROR:
go = FALSE;
GST_DEBUG ("codec error");
ret = GST_FLOW_ERROR;
break;
}
}
return ret;
}
GstFlowReturn
gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame)
{
GstSchroDec *schro_dec;
int schro_ret;
SchroBuffer *input_buffer;
GstVideoState *state;
schro_dec = GST_SCHRO_DEC (base_video_decoder);
GST_DEBUG ("handle frame");
state = gst_base_video_decoder_get_state (base_video_decoder);
gst_base_video_decoder_set_src_caps (base_video_decoder);
input_buffer = gst_schro_wrap_gst_buffer (frame->sink_buffer);
frame->sink_buffer = NULL;
input_buffer->tag = schro_tag_new (frame, NULL);
schro_ret = schro_decoder_autoparse_push (schro_dec->decoder, input_buffer);
return gst_schro_dec_process (schro_dec, FALSE);
}
GstFlowReturn
gst_schro_dec_finish (GstBaseVideoDecoder * base_video_decoder,
GstVideoFrame * frame)
{
GstSchroDec *schro_dec;
schro_dec = GST_SCHRO_DEC (base_video_decoder);
GST_DEBUG ("finish");
gst_base_video_decoder_set_src_caps (base_video_decoder);
schro_decoder_autoparse_push_end_of_sequence (schro_dec->decoder);
return gst_schro_dec_process (schro_dec, TRUE);
}

View file

@ -0,0 +1,736 @@
/* Schrodinger
* Copyright (C) 2006 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include <gst/video/gstbasevideoencoder.h>
#include <string.h>
#include <schroedinger/schro.h>
#include <schroedinger/schrobitstream.h>
#include <schroedinger/schrovirtframe.h>
#include <math.h>
#include "gstschroutils.h"
GST_DEBUG_CATEGORY_EXTERN (schro_debug);
#define GST_CAT_DEFAULT schro_debug
#define GST_TYPE_SCHRO_ENC \
(gst_schro_enc_get_type())
#define GST_SCHRO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SCHRO_ENC,GstSchroEnc))
#define GST_SCHRO_ENC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SCHRO_ENC,GstSchroEncClass))
#define GST_IS_SCHRO_ENC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SCHRO_ENC))
#define GST_IS_SCHRO_ENC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SCHRO_ENC))
typedef struct _GstSchroEnc GstSchroEnc;
typedef struct _GstSchroEncClass GstSchroEncClass;
typedef enum
{
GST_SCHRO_ENC_OUTPUT_OGG,
GST_SCHRO_ENC_OUTPUT_QUICKTIME,
GST_SCHRO_ENC_OUTPUT_AVI,
GST_SCHRO_ENC_OUTPUT_MPEG_TS,
GST_SCHRO_ENC_OUTPUT_MP4
} GstSchroEncOutputType;
struct _GstSchroEnc
{
GstBaseVideoEncoder base_encoder;
GstPad *sinkpad;
GstPad *srcpad;
/* video properties */
GstSchroEncOutputType output_format;
/* state */
SchroEncoder *encoder;
SchroVideoFormat *video_format;
GstVideoFrame *eos_frame;
GstBuffer *seq_header_buffer;
guint64 last_granulepos;
};
struct _GstSchroEncClass
{
GstBaseVideoEncoderClass parent_class;
};
enum
{
LAST_SIGNAL
};
enum
{
ARG_0
};
static void gst_schro_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec);
static void gst_schro_enc_get_property (GObject * object, guint prop_id,
GValue * value, GParamSpec * pspec);
static GstFlowReturn gst_schro_enc_process (GstSchroEnc * schro_enc);
static gboolean gst_schro_enc_set_format (GstBaseVideoEncoder *
base_video_encoder, GstVideoState * state);
static gboolean gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder);
static gboolean gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame);
static gboolean gst_schro_enc_handle_frame (GstBaseVideoEncoder *
base_video_encoder, GstVideoFrame * frame);
static GstFlowReturn gst_schro_enc_shape_output (GstBaseVideoEncoder *
base_video_encoder, GstVideoFrame * frame);
static GstCaps *gst_schro_enc_get_caps (GstBaseVideoEncoder *
base_video_encoder);
static GstStaticPadTemplate gst_schro_enc_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV ("{ I420, YV12, YUY2, UYVY, AYUV }"))
);
static GstStaticPadTemplate gst_schro_enc_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-dirac;video/x-qt-part;video/x-mp4-part")
);
GST_BOILERPLATE (GstSchroEnc, gst_schro_enc, GstBaseVideoEncoder,
GST_TYPE_BASE_VIDEO_ENCODER);
static void
gst_schro_enc_base_init (gpointer g_class)
{
static GstElementDetails schro_enc_details =
GST_ELEMENT_DETAILS ("Dirac Encoder",
"Codec/Encoder/Video",
"Encode raw video into Dirac stream",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_enc_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_enc_sink_template));
gst_element_class_set_details (element_class, &schro_enc_details);
}
static void
gst_schro_enc_class_init (GstSchroEncClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *gstelement_class;
GstBaseVideoEncoderClass *basevideocoder_class;
int i;
gobject_class = G_OBJECT_CLASS (klass);
gstelement_class = GST_ELEMENT_CLASS (klass);
basevideocoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass);
gobject_class->set_property = gst_schro_enc_set_property;
gobject_class->get_property = gst_schro_enc_get_property;
for (i = 0; i < schro_encoder_get_n_settings (); i++) {
const SchroEncoderSetting *setting;
setting = schro_encoder_get_setting_info (i);
switch (setting->type) {
case SCHRO_ENCODER_SETTING_TYPE_BOOLEAN:
g_object_class_install_property (gobject_class, i + 1,
g_param_spec_boolean (setting->name, setting->name, setting->name,
setting->default_value, G_PARAM_READWRITE));
break;
case SCHRO_ENCODER_SETTING_TYPE_INT:
g_object_class_install_property (gobject_class, i + 1,
g_param_spec_int (setting->name, setting->name, setting->name,
setting->min, setting->max, setting->default_value,
G_PARAM_READWRITE));
break;
case SCHRO_ENCODER_SETTING_TYPE_ENUM:
g_object_class_install_property (gobject_class, i + 1,
g_param_spec_int (setting->name, setting->name, setting->name,
setting->min, setting->max, setting->default_value,
G_PARAM_READWRITE));
break;
case SCHRO_ENCODER_SETTING_TYPE_DOUBLE:
g_object_class_install_property (gobject_class, i + 1,
g_param_spec_double (setting->name, setting->name, setting->name,
setting->min, setting->max, setting->default_value,
G_PARAM_READWRITE));
break;
default:
break;
}
}
basevideocoder_class->set_format =
GST_DEBUG_FUNCPTR (gst_schro_enc_set_format);
basevideocoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_enc_start);
basevideocoder_class->stop = GST_DEBUG_FUNCPTR (gst_schro_enc_stop);
basevideocoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_enc_finish);
basevideocoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_schro_enc_handle_frame);
basevideocoder_class->shape_output =
GST_DEBUG_FUNCPTR (gst_schro_enc_shape_output);
basevideocoder_class->get_caps = GST_DEBUG_FUNCPTR (gst_schro_enc_get_caps);
}
static void
gst_schro_enc_init (GstSchroEnc * schro_enc, GstSchroEncClass * klass)
{
GST_DEBUG ("gst_schro_enc_init");
/* Normally, we'd create the encoder in ->start(), but we use the
* encoder to store object properties. So it needs to be created
* here. */
schro_enc->encoder = schro_encoder_new ();
schro_encoder_set_packet_assembly (schro_enc->encoder, TRUE);
schro_enc->video_format = schro_encoder_get_video_format (schro_enc->encoder);
}
static gboolean
gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
GstVideoState * state)
{
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
schro_video_format_set_std_video_format (schro_enc->video_format,
SCHRO_VIDEO_FORMAT_CUSTOM);
switch (state->format) {
case GST_VIDEO_FORMAT_I420:
case GST_VIDEO_FORMAT_YV12:
schro_enc->video_format->chroma_format = SCHRO_CHROMA_420;
break;
case GST_VIDEO_FORMAT_YUY2:
case GST_VIDEO_FORMAT_UYVY:
schro_enc->video_format->chroma_format = SCHRO_CHROMA_422;
break;
case GST_VIDEO_FORMAT_AYUV:
schro_enc->video_format->chroma_format = SCHRO_CHROMA_444;
break;
case GST_VIDEO_FORMAT_ARGB:
schro_enc->video_format->chroma_format = SCHRO_CHROMA_420;
break;
default:
g_assert_not_reached ();
}
schro_enc->video_format->frame_rate_numerator = state->fps_n;
schro_enc->video_format->frame_rate_denominator = state->fps_d;
schro_enc->video_format->width = state->width;
schro_enc->video_format->height = state->height;
schro_enc->video_format->clean_width = state->clean_width;
schro_enc->video_format->clean_height = state->clean_height;
schro_enc->video_format->left_offset = state->clean_offset_left;
schro_enc->video_format->top_offset = state->clean_offset_top;
schro_enc->video_format->aspect_ratio_numerator = state->par_n;
schro_enc->video_format->aspect_ratio_denominator = state->par_d;
schro_video_format_set_std_signal_range (schro_enc->video_format,
SCHRO_SIGNAL_RANGE_8BIT_VIDEO);
schro_video_format_set_std_colour_spec (schro_enc->video_format,
SCHRO_COLOUR_SPEC_HDTV);
schro_encoder_set_video_format (schro_enc->encoder, schro_enc->video_format);
schro_encoder_start (schro_enc->encoder);
schro_enc->seq_header_buffer =
gst_schro_wrap_schro_buffer (schro_encoder_encode_sequence_header
(schro_enc->encoder));
return TRUE;
}
static void
gst_schro_enc_set_property (GObject * object, guint prop_id,
const GValue * value, GParamSpec * pspec)
{
GstSchroEnc *src;
g_return_if_fail (GST_IS_SCHRO_ENC (object));
src = GST_SCHRO_ENC (object);
GST_DEBUG ("gst_schro_enc_set_property");
if (prop_id >= 1) {
const SchroEncoderSetting *setting;
setting = schro_encoder_get_setting_info (prop_id - 1);
switch (G_VALUE_TYPE (value)) {
case G_TYPE_DOUBLE:
schro_encoder_setting_set_double (src->encoder, setting->name,
g_value_get_double (value));
break;
case G_TYPE_INT:
schro_encoder_setting_set_double (src->encoder, setting->name,
g_value_get_int (value));
break;
case G_TYPE_BOOLEAN:
schro_encoder_setting_set_double (src->encoder, setting->name,
g_value_get_boolean (value));
break;
}
}
}
static void
gst_schro_enc_get_property (GObject * object, guint prop_id, GValue * value,
GParamSpec * pspec)
{
GstSchroEnc *src;
g_return_if_fail (GST_IS_SCHRO_ENC (object));
src = GST_SCHRO_ENC (object);
if (prop_id >= 1) {
const SchroEncoderSetting *setting;
setting = schro_encoder_get_setting_info (prop_id - 1);
switch (G_VALUE_TYPE (value)) {
case G_TYPE_DOUBLE:
g_value_set_double (value,
schro_encoder_setting_get_double (src->encoder, setting->name));
break;
case G_TYPE_INT:
g_value_set_int (value,
schro_encoder_setting_get_double (src->encoder, setting->name));
break;
case G_TYPE_BOOLEAN:
g_value_set_boolean (value,
schro_encoder_setting_get_double (src->encoder, setting->name));
break;
}
}
}
/*
* start is called once the input format is known. This function
* must decide on an output format and negotiate it.
*/
static gboolean
gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder)
{
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
GstCaps *caps;
GstStructure *structure;
GST_DEBUG ("set_output_caps");
caps =
gst_pad_get_allowed_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
(base_video_encoder));
if (gst_caps_is_empty (caps)) {
gst_caps_unref (caps);
return FALSE;
}
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_has_name (structure, "video/x-dirac")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_OGG;
} else if (gst_structure_has_name (structure, "video/x-qt-part")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_QUICKTIME;
} else if (gst_structure_has_name (structure, "video/x-avi-part")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_AVI;
} else if (gst_structure_has_name (structure, "video/x-mp4-part")) {
schro_enc->output_format = GST_SCHRO_ENC_OUTPUT_MP4;
} else {
return FALSE;
}
gst_base_video_encoder_set_latency_fields (base_video_encoder,
2 * (int) schro_encoder_setting_get_double (schro_enc->encoder,
"queue_depth"));
gst_caps_unref (caps);
return TRUE;
}
static gboolean
gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder)
{
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
if (schro_enc->encoder) {
schro_encoder_free (schro_enc->encoder);
schro_enc->encoder = NULL;
}
return TRUE;
}
static gboolean
gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
GST_DEBUG ("finish");
schro_enc->eos_frame = frame;
schro_encoder_end_of_stream (schro_enc->encoder);
gst_schro_enc_process (schro_enc);
return TRUE;
}
static gboolean
gst_schro_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
SchroFrame *schro_frame;
GstFlowReturn ret;
const GstVideoState *state;
state = gst_base_video_encoder_get_state (base_video_encoder);
schro_frame = gst_schro_buffer_wrap (frame->sink_buffer,
state->format, state->width, state->height);
GST_DEBUG ("pushing frame %p", frame);
schro_encoder_push_frame_full (schro_enc->encoder, schro_frame, frame);
ret = gst_schro_enc_process (schro_enc);
return ret;
}
#if 0
static void
gst_caps_add_streamheader (GstCaps * caps, GList * list)
{
GValue array = { 0 };
GValue value = { 0 };
GstBuffer *buf;
GList *g;
g_value_init (&array, GST_TYPE_ARRAY);
for (g = g_list_first (list); g; g = g_list_next (list)) {
g_value_init (&value, GST_TYPE_BUFFER);
buf = gst_buffer_copy (GST_BUFFER (g->data));
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (&array, &value);
g_value_unset (&value);
}
gst_structure_set_value (gst_caps_get_structure (caps, 0),
"streamheader", &array);
g_value_unset (&array);
}
#endif
static GstCaps *
gst_schro_enc_get_caps (GstBaseVideoEncoder * base_video_encoder)
{
GstCaps *caps;
const GstVideoState *state;
GstSchroEnc *schro_enc;
schro_enc = GST_SCHRO_ENC (base_video_encoder);
state = gst_base_video_encoder_get_state (base_video_encoder);
if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_OGG) {
caps = gst_caps_new_simple ("video/x-dirac",
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
GST_BUFFER_FLAG_SET (schro_enc->seq_header_buffer, GST_BUFFER_FLAG_IN_CAPS);
{
GValue array = { 0 };
GValue value = { 0 };
GstBuffer *buf;
int size;
g_value_init (&array, GST_TYPE_ARRAY);
g_value_init (&value, GST_TYPE_BUFFER);
size = GST_BUFFER_SIZE (schro_enc->seq_header_buffer);
buf = gst_buffer_new_and_alloc (size + SCHRO_PARSE_HEADER_SIZE);
memcpy (GST_BUFFER_DATA (buf),
GST_BUFFER_DATA (schro_enc->seq_header_buffer), size);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 0, 0x42424344);
GST_WRITE_UINT8 (GST_BUFFER_DATA (buf) + size + 4,
SCHRO_PARSE_CODE_END_OF_SEQUENCE);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 5, 0);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 9, size);
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (&array, &value);
gst_structure_set_value (gst_caps_get_structure (caps, 0),
"streamheader", &array);
g_value_unset (&value);
g_value_unset (&array);
}
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_QUICKTIME) {
caps = gst_caps_new_simple ("video/x-qt-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_AVI) {
caps = gst_caps_new_simple ("video/x-avi-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_MPEG_TS) {
caps = gst_caps_new_simple ("video/x-mpegts-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_enc->output_format == GST_SCHRO_ENC_OUTPUT_MP4) {
caps = gst_caps_new_simple ("video/x-mp4-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else {
g_assert_not_reached ();
}
return caps;
}
static GstFlowReturn
gst_schro_enc_shape_output_ogg (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstSchroEnc *schro_enc;
int dpn;
int delay;
int dist;
int pt;
int dt;
guint64 granulepos_hi;
guint64 granulepos_low;
GstBuffer *buf = frame->src_buffer;
schro_enc = GST_SCHRO_ENC (base_video_encoder);
dpn = frame->decode_frame_number;
pt = frame->presentation_frame_number * 2;
dt = frame->decode_frame_number * 2;
delay = pt - dt;
dist = frame->distance_from_sync;
GST_DEBUG ("sys %d dpn %d pt %d dt %d delay %d dist %d",
(int) frame->system_frame_number,
(int) frame->decode_frame_number, pt, dt, delay, dist);
granulepos_hi = (((uint64_t) pt - delay) << 9) | ((dist >> 8));
granulepos_low = (delay << 9) | (dist & 0xff);
GST_DEBUG ("granulepos %lld:%lld", granulepos_hi, granulepos_low);
if (frame->is_eos) {
GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos;
} else {
schro_enc->last_granulepos = (granulepos_hi << 22) | (granulepos_low);
GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos;
}
gst_buffer_set_caps (buf, base_video_encoder->caps);
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_schro_enc_shape_output_quicktime (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_encoder_get_state (base_video_encoder);
GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
frame->presentation_frame_number);
GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
frame->system_frame_number);
GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
if (frame->is_sync_point &&
frame->presentation_frame_number == frame->system_frame_number) {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_buffer_set_caps (buf, base_video_encoder->caps);
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_schro_enc_shape_output_mp4 (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_encoder_get_state (base_video_encoder);
GST_BUFFER_TIMESTAMP (buf) = gst_video_state_get_timestamp (state,
frame->presentation_frame_number);
GST_BUFFER_DURATION (buf) = gst_video_state_get_timestamp (state,
frame->presentation_frame_number + 1) - GST_BUFFER_TIMESTAMP (buf);
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
frame->decode_frame_number);
GST_BUFFER_OFFSET (buf) = GST_CLOCK_TIME_NONE;
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
frame->system_frame_number);
if (frame->is_sync_point &&
frame->presentation_frame_number == frame->system_frame_number) {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
gst_buffer_set_caps (buf, base_video_encoder->caps);
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf);
}
static GstFlowReturn
gst_schro_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
GstVideoFrame * frame)
{
GstSchroEnc *schro_enc;
schro_enc = GST_SCHRO_ENC (base_video_encoder);
switch (schro_enc->output_format) {
case GST_SCHRO_ENC_OUTPUT_OGG:
return gst_schro_enc_shape_output_ogg (base_video_encoder, frame);
case GST_SCHRO_ENC_OUTPUT_QUICKTIME:
return gst_schro_enc_shape_output_quicktime (base_video_encoder, frame);
case GST_SCHRO_ENC_OUTPUT_MP4:
return gst_schro_enc_shape_output_mp4 (base_video_encoder, frame);
default:
g_assert_not_reached ();
break;
}
return GST_FLOW_ERROR;
}
static GstFlowReturn
gst_schro_enc_process (GstSchroEnc * schro_enc)
{
SchroBuffer *encoded_buffer;
GstVideoFrame *frame;
GstFlowReturn ret;
int presentation_frame;
void *voidptr;
GstBaseVideoEncoder *base_video_encoder = GST_BASE_VIDEO_ENCODER (schro_enc);
GST_DEBUG ("process");
while (1) {
switch (schro_encoder_wait (schro_enc->encoder)) {
case SCHRO_STATE_NEED_FRAME:
return GST_FLOW_OK;
case SCHRO_STATE_END_OF_STREAM:
GST_DEBUG ("EOS");
return GST_FLOW_OK;
case SCHRO_STATE_HAVE_BUFFER:
voidptr = NULL;
encoded_buffer = schro_encoder_pull_full (schro_enc->encoder,
&presentation_frame, &voidptr);
frame = voidptr;
if (encoded_buffer == NULL) {
GST_DEBUG ("encoder_pull returned NULL");
/* FIXME This shouldn't happen */
return GST_FLOW_ERROR;
}
if (voidptr == NULL) {
GST_DEBUG ("got eos");
frame = schro_enc->eos_frame;
}
if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (encoded_buffer->data[4])) {
frame->is_sync_point = TRUE;
}
frame->src_buffer = gst_schro_wrap_schro_buffer (encoded_buffer);
ret = gst_base_video_encoder_finish_frame (base_video_encoder, frame);
if (ret != GST_FLOW_OK) {
GST_DEBUG ("pad_push returned %d", ret);
return ret;
}
break;
case SCHRO_STATE_AGAIN:
break;
}
}
return GST_FLOW_OK;
}

View file

@ -0,0 +1,610 @@
/* Schrodinger
* Copyright (C) 2006 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/video/video.h>
#include <gst/video/gstbasevideoparse.h>
#include <string.h>
#include <schroedinger/schro.h>
#include <liboil/liboil.h>
#include <math.h>
#include <schroedinger/schroparse.h>
GST_DEBUG_CATEGORY_EXTERN (schro_debug);
#define GST_CAT_DEFAULT schro_debug
#define GST_TYPE_SCHRO_PARSE \
(gst_schro_parse_get_type())
#define GST_SCHRO_PARSE(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_SCHRO_PARSE,GstSchroParse))
#define GST_SCHRO_PARSE_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_SCHRO_PARSE,GstSchroParseClass))
#define GST_IS_SCHRO_PARSE(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_SCHRO_PARSE))
#define GST_IS_SCHRO_PARSE_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_SCHRO_PARSE))
typedef struct _GstSchroParse GstSchroParse;
typedef struct _GstSchroParseClass GstSchroParseClass;
typedef enum
{
GST_SCHRO_PARSE_OUTPUT_OGG,
GST_SCHRO_PARSE_OUTPUT_QUICKTIME,
GST_SCHRO_PARSE_OUTPUT_AVI,
GST_SCHRO_PARSE_OUTPUT_MPEG_TS,
GST_SCHRO_PARSE_OUTPUT_MP4
} GstSchroParseOutputType;
struct _GstSchroParse
{
GstBaseVideoParse base_video_parse;
GstPad *sinkpad, *srcpad;
GstSchroParseOutputType output_format;
GstBuffer *seq_header_buffer;
/* state */
gboolean have_picture;
int buf_picture_number;
int seq_hdr_picture_number;
int picture_number;
guint64 last_granulepos;
int bytes_per_picture;
};
struct _GstSchroParseClass
{
GstBaseVideoParseClass base_video_parse_class;
};
/* GstSchroParse signals and args */
enum
{
LAST_SIGNAL
};
enum
{
ARG_0
};
static void gst_schro_parse_finalize (GObject * object);
static gboolean gst_schro_parse_start (GstBaseVideoParse * base_video_parse);
static gboolean gst_schro_parse_stop (GstBaseVideoParse * base_video_parse);
static gboolean gst_schro_parse_reset (GstBaseVideoParse * base_video_parse);
static int gst_schro_parse_scan_for_sync (GstAdapter * adapter,
gboolean at_eos, int offset, int n);
static gboolean gst_schro_parse_parse_data (GstBaseVideoParse *
base_video_parse, gboolean at_eos);
static gboolean gst_schro_parse_shape_output (GstBaseVideoParse *
base_video_parse, GstVideoFrame * frame);
static GstCaps *gst_schro_parse_get_caps (GstBaseVideoParse * base_video_parse);
static GstStaticPadTemplate gst_schro_parse_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK,
GST_PAD_ALWAYS,
GST_STATIC_CAPS ("video/x-dirac")
);
static GstStaticPadTemplate gst_schro_parse_src_template =
GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC,
GST_PAD_ALWAYS,
GST_STATIC_CAPS
("video/x-dirac;video/x-qt-part;video/x-avi-part;video/x-mp4-part")
);
GST_BOILERPLATE (GstSchroParse, gst_schro_parse, GstBaseVideoParse,
GST_TYPE_BASE_VIDEO_PARSE);
static void
gst_schro_parse_base_init (gpointer g_class)
{
static GstElementDetails compress_details =
GST_ELEMENT_DETAILS ("Dirac Parser",
"Codec/Parser/Video",
"Parse Dirac streams",
"David Schleef <ds@schleef.org>");
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_parse_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_parse_sink_template));
gst_element_class_set_details (element_class, &compress_details);
}
static void
gst_schro_parse_class_init (GstSchroParseClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
GstBaseVideoParseClass *base_video_parse_class;
gobject_class = G_OBJECT_CLASS (klass);
element_class = GST_ELEMENT_CLASS (klass);
base_video_parse_class = GST_BASE_VIDEO_PARSE_CLASS (klass);
gobject_class->finalize = gst_schro_parse_finalize;
base_video_parse_class->start = GST_DEBUG_FUNCPTR (gst_schro_parse_start);
base_video_parse_class->stop = GST_DEBUG_FUNCPTR (gst_schro_parse_stop);
base_video_parse_class->reset = GST_DEBUG_FUNCPTR (gst_schro_parse_reset);
base_video_parse_class->parse_data =
GST_DEBUG_FUNCPTR (gst_schro_parse_parse_data);
base_video_parse_class->shape_output =
GST_DEBUG_FUNCPTR (gst_schro_parse_shape_output);
base_video_parse_class->scan_for_sync =
GST_DEBUG_FUNCPTR (gst_schro_parse_scan_for_sync);
base_video_parse_class->get_caps =
GST_DEBUG_FUNCPTR (gst_schro_parse_get_caps);
}
static void
gst_schro_parse_init (GstSchroParse * schro_parse, GstSchroParseClass * klass)
{
GstBaseVideoParse *base_video_parse = GST_BASE_VIDEO_PARSE (schro_parse);
GST_DEBUG ("gst_schro_parse_init");
schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_OGG;
base_video_parse->reorder_depth = 2;
}
static gboolean
gst_schro_parse_reset (GstBaseVideoParse * base_video_parse)
{
GstSchroParse *schro_parse;
schro_parse = GST_SCHRO_PARSE (base_video_parse);
GST_DEBUG ("reset");
return TRUE;
}
static void
gst_schro_parse_finalize (GObject * object)
{
GstSchroParse *schro_parse;
g_return_if_fail (GST_IS_SCHRO_PARSE (object));
schro_parse = GST_SCHRO_PARSE (object);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static gboolean
gst_schro_parse_start (GstBaseVideoParse * base_video_parse)
{
GstSchroParse *schro_parse = GST_SCHRO_PARSE (base_video_parse);
GstCaps *caps;
GstStructure *structure;
GST_DEBUG ("start");
caps =
gst_pad_get_allowed_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
(base_video_parse));
if (gst_caps_is_empty (caps)) {
gst_caps_unref (caps);
return FALSE;
}
structure = gst_caps_get_structure (caps, 0);
if (gst_structure_has_name (structure, "video/x-dirac")) {
schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_OGG;
} else if (gst_structure_has_name (structure, "video/x-qt-part")) {
schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_QUICKTIME;
} else if (gst_structure_has_name (structure, "video/x-avi-part")) {
schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_AVI;
} else if (gst_structure_has_name (structure, "video/x-mpegts-part")) {
schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_MPEG_TS;
} else if (gst_structure_has_name (structure, "video/x-mp4-part")) {
schro_parse->output_format = GST_SCHRO_PARSE_OUTPUT_MP4;
} else {
return FALSE;
}
gst_caps_unref (caps);
return TRUE;
}
static gboolean
gst_schro_parse_stop (GstBaseVideoParse * base_video_parse)
{
return TRUE;
}
static void
parse_sequence_header (GstSchroParse * schro_parse, guint8 * data, int size)
{
SchroVideoFormat video_format;
int ret;
GstVideoState *state;
GST_DEBUG ("parse_sequence_header size=%d", size);
state = gst_base_video_parse_get_state (GST_BASE_VIDEO_PARSE (schro_parse));
schro_parse->seq_header_buffer = gst_buffer_new_and_alloc (size);
memcpy (GST_BUFFER_DATA (schro_parse->seq_header_buffer), data, size);
ret = schro_parse_decode_sequence_header (data + 13, size - 13,
&video_format);
if (ret) {
state->fps_n = video_format.frame_rate_numerator;
state->fps_d = video_format.frame_rate_denominator;
GST_DEBUG ("Frame rate is %d/%d", state->fps_n, state->fps_d);
state->width = video_format.width;
state->height = video_format.height;
GST_DEBUG ("Frame dimensions are %d x %d\n", state->width, state->height);
state->clean_width = video_format.clean_width;
state->clean_height = video_format.clean_height;
state->clean_offset_left = video_format.left_offset;
state->clean_offset_top = video_format.top_offset;
state->par_n = video_format.aspect_ratio_numerator;
state->par_d = video_format.aspect_ratio_denominator;
GST_DEBUG ("Pixel aspect ratio is %d/%d", state->par_n, state->par_d);
gst_base_video_parse_set_state (GST_BASE_VIDEO_PARSE (schro_parse), state);
} else {
GST_WARNING ("Failed to get frame rate from sequence header");
}
}
static int
gst_schro_parse_scan_for_sync (GstAdapter * adapter, gboolean at_eos,
int offset, int n)
{
int n_available = gst_adapter_available (adapter) - offset;
if (n_available < 4) {
if (at_eos) {
return n_available;
} else {
return 0;
}
}
n_available -= 3;
return gst_adapter_masked_scan_uint32 (adapter, 0x42424344, 0xffffffff,
offset, MIN (n, n_available - 3));
}
static GstFlowReturn
gst_schro_parse_parse_data (GstBaseVideoParse * base_video_parse,
gboolean at_eos)
{
GstSchroParse *schro_parse;
unsigned char header[SCHRO_PARSE_HEADER_SIZE];
int next;
int prev;
int parse_code;
GST_DEBUG ("parse_data");
schro_parse = GST_SCHRO_PARSE (base_video_parse);
if (gst_adapter_available (base_video_parse->input_adapter) <
SCHRO_PARSE_HEADER_SIZE) {
return GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA;
}
GST_DEBUG ("available %d",
gst_adapter_available (base_video_parse->input_adapter));
gst_adapter_copy (base_video_parse->input_adapter, header, 0,
SCHRO_PARSE_HEADER_SIZE);
parse_code = header[4];
next = GST_READ_UINT32_BE (header + 5);
prev = GST_READ_UINT32_BE (header + 9);
GST_DEBUG ("%08x %02x %08x %08x",
GST_READ_UINT32_BE (header), parse_code, next, prev);
if (memcmp (header, "BBCD", 4) != 0 ||
(next & 0xf0000000) || (prev & 0xf0000000)) {
gst_base_video_parse_lost_sync (base_video_parse);
return GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA;
}
if (SCHRO_PARSE_CODE_IS_END_OF_SEQUENCE (parse_code)) {
GstVideoFrame *frame;
if (next != 0 && next != SCHRO_PARSE_HEADER_SIZE) {
GST_WARNING ("next is not 0 or 13 in EOS packet (%d)", next);
}
gst_base_video_parse_add_to_frame (base_video_parse,
SCHRO_PARSE_HEADER_SIZE);
frame = gst_base_video_parse_get_frame (base_video_parse);
frame->is_eos = TRUE;
SCHRO_DEBUG ("eos");
return gst_base_video_parse_finish_frame (base_video_parse);
}
if (gst_adapter_available (base_video_parse->input_adapter) < next) {
return GST_BASE_VIDEO_PARSE_FLOW_NEED_DATA;
}
if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (parse_code)) {
guint8 *data;
data = g_malloc (next);
gst_adapter_copy (base_video_parse->input_adapter, data, 0, next);
parse_sequence_header (schro_parse, data, next);
base_video_parse->current_frame->is_sync_point = TRUE;
g_free (data);
}
if (schro_parse->seq_header_buffer == NULL) {
gst_adapter_flush (base_video_parse->input_adapter, next);
return GST_FLOW_OK;
}
if (SCHRO_PARSE_CODE_IS_PICTURE (parse_code)) {
GstVideoFrame *frame;
guint8 tmp[4];
frame = gst_base_video_parse_get_frame (base_video_parse);
#if 0
if (GST_CLOCK_TIME_IS_VALID (GST_BUFFER_TIMESTAMP (buf))) {
frame->presentation_timestamp = GST_BUFFER_TIMESTAMP (buf);
}
#endif
gst_adapter_copy (base_video_parse->input_adapter, tmp,
SCHRO_PARSE_HEADER_SIZE, 4);
frame->presentation_frame_number = GST_READ_UINT32_BE (tmp);
gst_base_video_parse_add_to_frame (base_video_parse, next);
return gst_base_video_parse_finish_frame (base_video_parse);
} else {
gst_base_video_parse_add_to_frame (base_video_parse, next);
}
return GST_FLOW_OK;
}
static GstFlowReturn
gst_schro_parse_shape_output_ogg (GstBaseVideoParse * base_video_parse,
GstVideoFrame * frame)
{
GstSchroParse *schro_parse;
int dpn;
int delay;
int dist;
int pt;
int dt;
guint64 granulepos_hi;
guint64 granulepos_low;
GstBuffer *buf = frame->src_buffer;
schro_parse = GST_SCHRO_PARSE (base_video_parse);
dpn = frame->decode_frame_number;
pt = frame->presentation_frame_number * 2;
dt = frame->decode_frame_number * 2;
delay = pt - dt;
dist = frame->distance_from_sync;
GST_DEBUG ("sys %d dpn %d pt %d dt %d delay %d dist %d",
(int) frame->system_frame_number,
(int) frame->decode_frame_number, pt, dt, delay, dist);
granulepos_hi = (((guint64) pt - delay) << 9) | ((dist >> 8));
granulepos_low = (delay << 9) | (dist & 0xff);
GST_DEBUG ("granulepos %lld:%lld", granulepos_hi, granulepos_low);
if (frame->is_eos) {
GST_BUFFER_OFFSET_END (buf) = schro_parse->last_granulepos;
} else {
schro_parse->last_granulepos = (granulepos_hi << 22) | (granulepos_low);
GST_BUFFER_OFFSET_END (buf) = schro_parse->last_granulepos;
}
return gst_base_video_parse_push (base_video_parse, buf);
}
static GstFlowReturn
gst_schro_parse_shape_output_quicktime (GstBaseVideoParse * base_video_parse,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_parse_get_state (base_video_parse);
GST_BUFFER_OFFSET_END (buf) = gst_video_state_get_timestamp (state,
frame->system_frame_number);
if (frame->is_sync_point &&
frame->presentation_frame_number == frame->system_frame_number) {
GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
GST_DEBUG ("sync point");
} else {
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
}
return gst_base_video_parse_push (base_video_parse, buf);
}
static GstFlowReturn
gst_schro_parse_shape_output_mpeg_ts (GstBaseVideoParse * base_video_parse,
GstVideoFrame * frame)
{
GstBuffer *buf = frame->src_buffer;
const GstVideoState *state;
state = gst_base_video_parse_get_state (base_video_parse);
return gst_base_video_parse_push (base_video_parse, buf);
}
static GstFlowReturn
gst_schro_parse_shape_output (GstBaseVideoParse * base_video_parse,
GstVideoFrame * frame)
{
GstSchroParse *schro_parse;
schro_parse = GST_SCHRO_PARSE (base_video_parse);
switch (schro_parse->output_format) {
case GST_SCHRO_PARSE_OUTPUT_OGG:
return gst_schro_parse_shape_output_ogg (base_video_parse, frame);
case GST_SCHRO_PARSE_OUTPUT_QUICKTIME:
return gst_schro_parse_shape_output_quicktime (base_video_parse, frame);
case GST_SCHRO_PARSE_OUTPUT_MPEG_TS:
return gst_schro_parse_shape_output_mpeg_ts (base_video_parse, frame);
default:
break;
}
return GST_FLOW_ERROR;
}
static GstCaps *
gst_schro_parse_get_caps (GstBaseVideoParse * base_video_parse)
{
GstCaps *caps;
GstVideoState *state;
GstSchroParse *schro_parse;
schro_parse = GST_SCHRO_PARSE (base_video_parse);
state = gst_base_video_parse_get_state (base_video_parse);
if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_OGG) {
caps = gst_caps_new_simple ("video/x-dirac",
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
GST_BUFFER_FLAG_SET (schro_parse->seq_header_buffer,
GST_BUFFER_FLAG_IN_CAPS);
{
GValue array = { 0 };
GValue value = { 0 };
GstBuffer *buf;
int size;
g_value_init (&array, GST_TYPE_ARRAY);
g_value_init (&value, GST_TYPE_BUFFER);
size = GST_BUFFER_SIZE (schro_parse->seq_header_buffer);
buf = gst_buffer_new_and_alloc (size + SCHRO_PARSE_HEADER_SIZE);
memcpy (GST_BUFFER_DATA (buf),
GST_BUFFER_DATA (schro_parse->seq_header_buffer), size);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 0, 0x42424344);
GST_WRITE_UINT8 (GST_BUFFER_DATA (buf) + size + 4,
SCHRO_PARSE_CODE_END_OF_SEQUENCE);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 5, 0);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 9, size);
gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf);
gst_value_array_append_value (&array, &value);
gst_structure_set_value (gst_caps_get_structure (caps, 0),
"streamheader", &array);
g_value_unset (&value);
g_value_unset (&array);
}
} else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_QUICKTIME) {
caps = gst_caps_new_simple ("video/x-qt-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_AVI) {
caps = gst_caps_new_simple ("video/x-avi-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_MPEG_TS) {
caps = gst_caps_new_simple ("video/x-mpegts-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else if (schro_parse->output_format == GST_SCHRO_PARSE_OUTPUT_MP4) {
caps = gst_caps_new_simple ("video/x-mp4-part",
"format", GST_TYPE_FOURCC, GST_MAKE_FOURCC ('d', 'r', 'a', 'c'),
"width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height,
"framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, state->par_n,
state->par_d, NULL);
} else {
g_assert_not_reached ();
}
return caps;
}

View file

@ -0,0 +1,149 @@
/* Schrodinger
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
//#define SCHRO_ENABLE_UNSTABLE_API
#include <gst/gst.h>
#include <gst/video/video.h>
#include <schroedinger/schro.h>
#include <schroedinger/schrobitstream.h>
#include <schroedinger/schrovirtframe.h>
#include <math.h>
#include <string.h>
GST_DEBUG_CATEGORY_EXTERN (schro_debug);
#define GST_CAT_DEFAULT schro_debug
static void
gst_schro_frame_free (SchroFrame * frame, void *priv)
{
gst_buffer_unref (GST_BUFFER (priv));
}
SchroFrame *
gst_schro_buffer_wrap (GstBuffer * buf, GstVideoFormat format, int width,
int height)
{
SchroFrame *frame;
switch (format) {
case GST_VIDEO_FORMAT_I420:
frame =
schro_frame_new_from_data_I420 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_YV12:
frame =
schro_frame_new_from_data_YV12 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_YUY2:
frame =
schro_frame_new_from_data_YUY2 (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_UYVY:
frame =
schro_frame_new_from_data_UYVY (GST_BUFFER_DATA (buf), width, height);
break;
case GST_VIDEO_FORMAT_AYUV:
frame =
schro_frame_new_from_data_AYUV (GST_BUFFER_DATA (buf), width, height);
break;
#if 0
case GST_VIDEO_FORMAT_ARGB:
{
SchroFrame *rgbframe =
schro_frame_new_from_data_AYUV (GST_BUFFER_DATA (buf), width, height);
SchroFrame *vframe1;
SchroFrame *vframe2;
SchroFrame *vframe3;
vframe1 = schro_virt_frame_new_unpack (rgbframe);
vframe2 = schro_virt_frame_new_color_matrix (vframe1);
vframe3 =
schro_virt_frame_new_subsample (vframe2, SCHRO_FRAME_FORMAT_U8_420);
frame = schro_frame_new_and_alloc (NULL, SCHRO_FRAME_FORMAT_U8_420,
width, height);
schro_virt_frame_render (vframe3, frame);
schro_frame_unref (vframe3);
}
break;
#endif
default:
g_assert_not_reached ();
}
schro_frame_set_free_callback (frame, gst_schro_frame_free, buf);
return frame;
}
#ifdef GST_BUFFER_FREE_FUNC
static void
schro_buf_free_func (gpointer priv)
{
SchroBuffer *buffer = (SchroBuffer *) priv;
schro_buffer_unref (buffer);
}
#endif
/* takes the reference */
GstBuffer *
gst_schro_wrap_schro_buffer (SchroBuffer * buffer)
{
GstBuffer *gstbuf;
#ifdef GST_BUFFER_FREE_FUNC
gstbuf = gst_buffer_new ();
GST_BUFFER_DATA (gstbuf) = buffer->data;
GST_BUFFER_SIZE (gstbuf) = buffer->length;
GST_BUFFER_MALLOCDATA (gstbuf) = (void *) buffer;
GST_BUFFER_FREE_FUNC (gstbuf) = schro_buf_free_func;
#else
gstbuf = gst_buffer_new_and_alloc (buffer->length);
memcpy (GST_BUFFER_DATA (gstbuf), buffer->data, buffer->length);
#endif
return gstbuf;
}
static void
gst_schro_buffer_free (SchroBuffer * buffer, void *priv)
{
gst_buffer_unref (GST_BUFFER (priv));
}
SchroBuffer *
gst_schro_wrap_gst_buffer (GstBuffer * buffer)
{
SchroBuffer *schrobuf;
schrobuf = schro_buffer_new_with_data (GST_BUFFER_DATA (buffer),
GST_BUFFER_SIZE (buffer));
schrobuf->free = gst_schro_buffer_free;
schrobuf->priv = buffer;
return schrobuf;
}

View file

@ -0,0 +1,34 @@
/* Schrodinger
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_SCHRO_UTILS_H_
#define _GST_SCHRO_UTILS_H_
#include <gst/gst.h>
#include <gst/video/video.h>
#include <schroedinger/schro.h>
SchroFrame *
gst_schro_buffer_wrap (GstBuffer *buf, GstVideoFormat format, int width,
int height);
GstBuffer * gst_schro_wrap_schro_buffer (SchroBuffer *buffer);
SchroBuffer * gst_schro_wrap_gst_buffer (GstBuffer *buffer);
#endif