schroedinger: Port to -base video classes

Conflicts:

	ext/schroedinger/Makefile.am
	ext/schroedinger/gstschrodec.c
	ext/schroedinger/gstschroenc.c

Back to 0.10 state, needs porting again.
This commit is contained in:
Edward Hervey 2012-04-24 11:08:58 +02:00 committed by Sebastian Dröge
parent 1d3996325a
commit ee7afc71f8
6 changed files with 335 additions and 323 deletions

View file

@ -10,14 +10,12 @@ libgstschro_la_SOURCES = \
gstschroenc.c \ gstschroenc.c \
gstschroutils.c gstschroutils.c
libgstschro_la_CFLAGS = \ libgstschro_la_CFLAGS = \
$(GST_PLUGINS_BAD_CFLAGS) \
$(GST_PLUGINS_BASE_CFLAGS) \ $(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \ $(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \ $(GST_CFLAGS) \
-DGST_USE_UNSTABLE_API \ -DGST_USE_UNSTABLE_API \
$(SCHRO_CFLAGS) $(SCHRO_CFLAGS)
libgstschro_la_LIBADD = \ libgstschro_la_LIBADD = \
$(top_builddir)/gst-libs/gst/video/libgstbasevideo-@GST_API_VERSION@.la \
$(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_API_VERSION@ \ $(GST_PLUGINS_BASE_LIBS) -lgstvideo-@GST_API_VERSION@ \
$(GST_BASE_LIBS) $(GST_LIBS) \ $(GST_BASE_LIBS) $(GST_LIBS) \
$(SCHRO_LIBS) $(SCHRO_LIBS)

View file

@ -46,6 +46,6 @@ plugin_init (GstPlugin * plugin)
GST_PLUGIN_DEFINE (GST_VERSION_MAJOR, GST_PLUGIN_DEFINE (GST_VERSION_MAJOR,
GST_VERSION_MINOR, GST_VERSION_MINOR,
schro, "schro",
"Schroedinger plugin", "Schroedinger plugin",
plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN) plugin_init, VERSION, "LGPL", GST_PACKAGE_NAME, GST_PACKAGE_ORIGIN)

View file

@ -24,7 +24,7 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/base/gstadapter.h> #include <gst/base/gstadapter.h>
#include <gst/video/video.h> #include <gst/video/video.h>
#include <gst/video/gstbasevideodecoder.h> #include <gst/video/gstvideodecoder.h>
#include <string.h> #include <string.h>
#include <schroedinger/schro.h> #include <schroedinger/schro.h>
#include <math.h> #include <math.h>
@ -51,7 +51,7 @@ typedef struct _GstSchroDecClass GstSchroDecClass;
struct _GstSchroDec struct _GstSchroDec
{ {
GstBaseVideoDecoder base_video_decoder; GstVideoDecoder base_video_decoder;
SchroDecoder *decoder; SchroDecoder *decoder;
@ -60,7 +60,7 @@ struct _GstSchroDec
struct _GstSchroDecClass struct _GstSchroDecClass
{ {
GstBaseVideoDecoderClass base_video_decoder_class; GstVideoDecoderClass base_video_decoder_class;
}; };
GType gst_schro_dec_get_type (void); GType gst_schro_dec_get_type (void);
@ -79,17 +79,17 @@ enum
static void gst_schro_dec_finalize (GObject * object); static void gst_schro_dec_finalize (GObject * object);
static gboolean gst_schro_dec_sink_query (GstPad * pad, GstSchroDec * dec, static gboolean gst_schro_dec_sink_query (GstPad * pad, GstQuery * query);
GstQuery * query);
static gboolean gst_schro_dec_start (GstBaseVideoDecoder * dec); static gboolean gst_schro_dec_start (GstVideoDecoder * dec);
static gboolean gst_schro_dec_stop (GstBaseVideoDecoder * dec); static gboolean gst_schro_dec_stop (GstVideoDecoder * dec);
static gboolean gst_schro_dec_reset (GstBaseVideoDecoder * dec); static gboolean gst_schro_dec_reset (GstVideoDecoder * dec, gboolean hard);
static GstFlowReturn gst_schro_dec_parse_data (GstBaseVideoDecoder * static GstFlowReturn gst_schro_dec_parse (GstVideoDecoder *
base_video_decoder, gboolean at_eos); base_video_decoder, GstVideoCodecFrame * frame, GstAdapter * adapter,
static GstFlowReturn gst_schro_dec_handle_frame (GstBaseVideoDecoder * decoder, gboolean at_eos);
GstVideoFrameState * frame); static GstFlowReturn gst_schro_dec_handle_frame (GstVideoDecoder * decoder,
static gboolean gst_schro_dec_finish (GstBaseVideoDecoder * base_video_decoder); GstVideoCodecFrame * frame);
static gboolean gst_schro_dec_finish (GstVideoDecoder * base_video_decoder);
static void gst_schrodec_send_tags (GstSchroDec * schro_dec); static void gst_schrodec_send_tags (GstSchroDec * schro_dec);
static GstStaticPadTemplate gst_schro_dec_sink_template = static GstStaticPadTemplate gst_schro_dec_sink_template =
@ -100,57 +100,59 @@ GST_STATIC_PAD_TEMPLATE ("sink",
); );
static GstStaticPadTemplate gst_schro_dec_src_template = static GstStaticPadTemplate gst_schro_dec_src_template =
GST_STATIC_PAD_TEMPLATE ("src", GST_STATIC_PAD_TEMPLATE ("src",
GST_PAD_SRC, GST_PAD_SRC,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_SCHRO_YUV_LIST)) GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV (GST_SCHRO_YUV_LIST) ";"
GST_VIDEO_CAPS_ARGB)
); );
#define gst_schro_dec_parent_class parent_class GST_BOILERPLATE (GstSchroDec, gst_schro_dec, GstVideoDecoder,
G_DEFINE_TYPE (GstSchroDec, gst_schro_dec, GST_TYPE_BASE_VIDEO_DECODER); GST_TYPE_VIDEO_DECODER);
static void
gst_schro_dec_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_static_pad_template (element_class,
&gst_schro_dec_src_template);
gst_element_class_add_static_pad_template (element_class,
&gst_schro_dec_sink_template);
gst_element_class_set_details_simple (element_class, "Dirac Decoder",
"Codec/Decoder/Video",
"Decode Dirac streams", "David Schleef <ds@schleef.org>");
}
static void static void
gst_schro_dec_class_init (GstSchroDecClass * klass) gst_schro_dec_class_init (GstSchroDecClass * klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstElementClass *element_class; GstVideoDecoderClass *base_video_decoder_class;
GstBaseVideoDecoderClass *base_video_decoder_class;
gobject_class = G_OBJECT_CLASS (klass); gobject_class = G_OBJECT_CLASS (klass);
element_class = GST_ELEMENT_CLASS (klass); base_video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
base_video_decoder_class = GST_BASE_VIDEO_DECODER_CLASS (klass);
gobject_class->finalize = gst_schro_dec_finalize; gobject_class->finalize = gst_schro_dec_finalize;
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_dec_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_dec_sink_template));
gst_element_class_set_details_simple (element_class, "Dirac Decoder",
"Codec/Decoder/Video",
"Decode Dirac streams", "David Schleef <ds@schleef.org>");
base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start); base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_dec_start);
base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_schro_dec_stop); base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_schro_dec_stop);
base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_schro_dec_reset); base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_schro_dec_reset);
base_video_decoder_class->parse_data = base_video_decoder_class->parse = GST_DEBUG_FUNCPTR (gst_schro_dec_parse);
GST_DEBUG_FUNCPTR (gst_schro_dec_parse_data);
base_video_decoder_class->handle_frame = base_video_decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_schro_dec_handle_frame); GST_DEBUG_FUNCPTR (gst_schro_dec_handle_frame);
base_video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_dec_finish); base_video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_dec_finish);
gst_base_video_decoder_class_set_capture_pattern (base_video_decoder_class,
0xffffffff, 0x42424344);
} }
static void static void
gst_schro_dec_init (GstSchroDec * schro_dec) gst_schro_dec_init (GstSchroDec * schro_dec, GstSchroDecClass * klass)
{ {
GST_DEBUG ("gst_schro_dec_init"); GST_DEBUG ("gst_schro_dec_init");
gst_pad_set_query_function (GST_BASE_VIDEO_CODEC_SINK_PAD (schro_dec), gst_pad_set_query_function (GST_VIDEO_DECODER_SINK_PAD (schro_dec),
(GstPadQueryFunction) gst_schro_dec_sink_query); gst_schro_dec_sink_query);
schro_dec->decoder = schro_decoder_new (); schro_dec->decoder = schro_decoder_new ();
} }
@ -181,42 +183,48 @@ gst_schro_dec_sink_convert (GstPad * pad,
GstFormat src_format, gint64 src_value, GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value) GstFormat * dest_format, gint64 * dest_value)
{ {
gboolean res = TRUE; gboolean res = FALSE;
GstSchroDec *dec; GstSchroDec *dec;
GstVideoState *state; GstVideoCodecState *state;
if (src_format == *dest_format) { if (src_format == *dest_format) {
*dest_value = src_value; *dest_value = src_value;
return TRUE; return TRUE;
} }
if (src_format != GST_FORMAT_DEFAULT || *dest_format != GST_FORMAT_TIME)
return FALSE;
dec = GST_SCHRO_DEC (gst_pad_get_parent (pad)); dec = GST_SCHRO_DEC (gst_pad_get_parent (pad));
/* FIXME: check if we are in a decoding state */ /* FIXME: check if we are in a decoding state */
state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (dec)); state = gst_video_decoder_get_output_state (GST_VIDEO_DECODER (dec));
res = FALSE; if (G_UNLIKELY (state == NULL))
if (src_format == GST_FORMAT_DEFAULT && *dest_format == GST_FORMAT_TIME) { goto beach;
if (state->fps_d != 0) {
*dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
state->fps_d * GST_SECOND, state->fps_n);
res = TRUE;
} else {
res = FALSE;
}
}
if (state->info.fps_d == 0)
goto beach;
*dest_value = gst_util_uint64_scale (granulepos_to_frame (src_value),
state->info.fps_d * GST_SECOND, state->info.fps_n);
res = TRUE;
beach:
gst_object_unref (dec); gst_object_unref (dec);
return res; return res;
} }
static gboolean static gboolean
gst_schro_dec_sink_query (GstPad * pad, GstSchroDec * dec, GstQuery * query) gst_schro_dec_sink_query (GstPad * pad, GstQuery * query)
{ {
GstSchroDec *dec;
gboolean res = FALSE; gboolean res = FALSE;
dec = GST_SCHRO_DEC (gst_pad_get_parent (pad));
switch (GST_QUERY_TYPE (query)) { switch (GST_QUERY_TYPE (query)) {
case GST_QUERY_CONVERT: case GST_QUERY_CONVERT:
{ {
@ -232,33 +240,34 @@ gst_schro_dec_sink_query (GstPad * pad, GstSchroDec * dec, GstQuery * query)
break; break;
} }
default: default:
res = gst_pad_query_default (pad, GST_OBJECT (dec), query); res = gst_pad_query_default (pad, query);
break; break;
} }
done: done:
return res; gst_object_unref (dec);
return res;
error: error:
GST_DEBUG_OBJECT (dec, "query failed"); GST_DEBUG_OBJECT (dec, "query failed");
goto done; goto done;
} }
static gboolean static gboolean
gst_schro_dec_start (GstBaseVideoDecoder * dec) gst_schro_dec_start (GstVideoDecoder * dec)
{ {
return TRUE; return TRUE;
} }
static gboolean static gboolean
gst_schro_dec_stop (GstBaseVideoDecoder * dec) gst_schro_dec_stop (GstVideoDecoder * dec)
{ {
return TRUE; return TRUE;
} }
static gboolean static gboolean
gst_schro_dec_reset (GstBaseVideoDecoder * dec) gst_schro_dec_reset (GstVideoDecoder * dec, gboolean hard)
{ {
GstSchroDec *schro_dec; GstSchroDec *schro_dec;
@ -294,98 +303,108 @@ parse_sequence_header (GstSchroDec * schro_dec, guint8 * data, int size)
{ {
SchroVideoFormat video_format; SchroVideoFormat video_format;
int ret; int ret;
GstVideoState *state; GstVideoCodecState *state;
int bit_depth;
GstVideoFormat fmt = GST_VIDEO_FORMAT_UNKNOWN;
GST_DEBUG_OBJECT (schro_dec, "parse_sequence_header size=%d", size); GST_DEBUG_OBJECT (schro_dec, "parse_sequence_header size=%d", size);
state = gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER (schro_dec));
schro_dec->seq_header_buffer_seen = TRUE; schro_dec->seq_header_buffer_seen = TRUE;
ret = schro_parse_decode_sequence_header (data + 13, size - 13, ret = schro_parse_decode_sequence_header (data + 13, size - 13,
&video_format); &video_format);
if (ret) { if (!ret) {
int bit_depth; /* FIXME : Isn't this meant to be a *fatal* error ? */
#if SCHRO_CHECK_VERSION(1,0,11)
bit_depth = schro_video_format_get_bit_depth (&video_format);
#else
bit_depth = 8;
#endif
if (bit_depth == 8) {
if (video_format.chroma_format == SCHRO_CHROMA_444) {
state->format = GST_VIDEO_FORMAT_AYUV;
} else if (video_format.chroma_format == SCHRO_CHROMA_422) {
state->format = GST_VIDEO_FORMAT_UYVY;
} else if (video_format.chroma_format == SCHRO_CHROMA_420) {
state->format = GST_VIDEO_FORMAT_I420;
}
#if SCHRO_CHECK_VERSION(1,0,11)
} else if (bit_depth <= 10) {
if (video_format.colour_matrix == SCHRO_COLOUR_MATRIX_REVERSIBLE) {
state->format = GST_VIDEO_FORMAT_ARGB;
} else {
state->format = GST_VIDEO_FORMAT_v210;
}
} else if (bit_depth <= 16) {
state->format = GST_VIDEO_FORMAT_AYUV64;
} else {
GST_ERROR ("bit depth too large (%d > 16)", bit_depth);
state->format = GST_VIDEO_FORMAT_AYUV64;
#endif
}
state->fps_n = video_format.frame_rate_numerator;
state->fps_d = video_format.frame_rate_denominator;
GST_DEBUG_OBJECT (schro_dec, "Frame rate is %d/%d", state->fps_n,
state->fps_d);
state->width = video_format.width;
state->height = video_format.height;
GST_DEBUG ("Frame dimensions are %d x %d\n", state->width, state->height);
state->clean_width = video_format.clean_width;
state->clean_height = video_format.clean_height;
state->clean_offset_left = video_format.left_offset;
state->clean_offset_top = video_format.top_offset;
state->par_n = video_format.aspect_ratio_numerator;
state->par_d = video_format.aspect_ratio_denominator;
GST_DEBUG ("Pixel aspect ratio is %d/%d", state->par_n, state->par_d);
gst_base_video_decoder_set_src_caps (GST_BASE_VIDEO_DECODER (schro_dec));
} else {
GST_WARNING ("Failed to get frame rate from sequence header"); GST_WARNING ("Failed to get frame rate from sequence header");
goto beach;
}
#if SCHRO_CHECK_VERSION(1,0,11)
bit_depth = schro_video_format_get_bit_depth (&video_format);
#else
bit_depth = 8;
#endif
if (bit_depth == 8) {
if (video_format.chroma_format == SCHRO_CHROMA_444) {
fmt = GST_VIDEO_FORMAT_AYUV;
} else if (video_format.chroma_format == SCHRO_CHROMA_422) {
fmt = GST_VIDEO_FORMAT_UYVY;
} else if (video_format.chroma_format == SCHRO_CHROMA_420) {
fmt = GST_VIDEO_FORMAT_I420;
}
#if SCHRO_CHECK_VERSION(1,0,11)
} else if (bit_depth <= 10) {
if (video_format.colour_matrix == SCHRO_COLOUR_MATRIX_REVERSIBLE) {
fmt = GST_VIDEO_FORMAT_ARGB;
} else {
fmt = GST_VIDEO_FORMAT_v210;
}
} else if (bit_depth <= 16) {
fmt = GST_VIDEO_FORMAT_AYUV64;
} else {
GST_ERROR ("bit depth too large (%d > 16)", bit_depth);
fmt = GST_VIDEO_FORMAT_AYUV64;
#endif
} }
state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (schro_dec),
fmt, video_format.width, video_format.height, NULL);
GST_DEBUG ("Frame dimensions are %d x %d\n", state->info.width,
state->info.height);
state->info.fps_n = video_format.frame_rate_numerator;
state->info.fps_d = video_format.frame_rate_denominator;
GST_DEBUG_OBJECT (schro_dec, "Frame rate is %d/%d", state->info.fps_n,
state->info.fps_d);
state->info.par_n = video_format.aspect_ratio_numerator;
state->info.par_d = video_format.aspect_ratio_denominator;
GST_DEBUG ("Pixel aspect ratio is %d/%d", state->info.par_n,
state->info.par_d);
beach:
gst_schrodec_send_tags (schro_dec); gst_schrodec_send_tags (schro_dec);
} }
static GstFlowReturn static GstFlowReturn
gst_schro_dec_parse_data (GstBaseVideoDecoder * base_video_decoder, gst_schro_dec_parse (GstVideoDecoder * base_video_decoder,
gboolean at_eos) GstVideoCodecFrame * frame, GstAdapter * adapter, gboolean at_eos)
{ {
GstSchroDec *schro_decoder; GstSchroDec *schro_decoder;
unsigned char header[SCHRO_PARSE_HEADER_SIZE]; unsigned char header[SCHRO_PARSE_HEADER_SIZE];
int next; int next;
int prev; int prev;
int parse_code; int parse_code;
int av, loc;
GST_DEBUG_OBJECT (base_video_decoder, "parse_data"); GST_DEBUG_OBJECT (base_video_decoder, "parse");
schro_decoder = GST_SCHRO_DEC (base_video_decoder); schro_decoder = GST_SCHRO_DEC (base_video_decoder);
av = gst_adapter_available (adapter);
if (gst_adapter_available (base_video_decoder->input_adapter) < if (av < SCHRO_PARSE_HEADER_SIZE) {
SCHRO_PARSE_HEADER_SIZE) { return GST_VIDEO_DECODER_FLOW_NEED_DATA;
return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA;
} }
GST_DEBUG ("available %d", GST_DEBUG ("available %d", av);
gst_adapter_available (base_video_decoder->input_adapter));
gst_adapter_copy (base_video_decoder->input_adapter, header, 0, /* Check for header */
SCHRO_PARSE_HEADER_SIZE); loc =
gst_adapter_masked_scan_uint32 (adapter, 0xffffffff, 0x42424344, 0,
av - 3);
if (G_UNLIKELY (loc == -1)) {
GST_DEBUG_OBJECT (schro_decoder, "No header");
gst_adapter_flush (adapter, av - 3);
return GST_VIDEO_DECODER_FLOW_NEED_DATA;
}
/* Skip data until header */
if (loc > 0)
gst_adapter_flush (adapter, loc);
gst_adapter_copy (adapter, header, 0, SCHRO_PARSE_HEADER_SIZE);
parse_code = header[4]; parse_code = header[4];
next = GST_READ_UINT32_BE (header + 5); next = GST_READ_UINT32_BE (header + 5);
@ -396,30 +415,25 @@ gst_schro_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
if (memcmp (header, "BBCD", 4) != 0 || if (memcmp (header, "BBCD", 4) != 0 ||
(next & 0xf0000000) || (prev & 0xf0000000)) { (next & 0xf0000000) || (prev & 0xf0000000)) {
gst_base_video_decoder_lost_sync (base_video_decoder); gst_adapter_flush (adapter, 1);
return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA; return GST_VIDEO_DECODER_FLOW_NEED_DATA;
} }
if (SCHRO_PARSE_CODE_IS_END_OF_SEQUENCE (parse_code)) { if (SCHRO_PARSE_CODE_IS_END_OF_SEQUENCE (parse_code)) {
GstVideoFrameState *frame;
if (next != 0 && next != SCHRO_PARSE_HEADER_SIZE) { if (next != 0 && next != SCHRO_PARSE_HEADER_SIZE) {
GST_WARNING ("next is not 0 or 13 in EOS packet (%d)", next); GST_WARNING ("next is not 0 or 13 in EOS packet (%d)", next);
} }
gst_base_video_decoder_add_to_frame (base_video_decoder, gst_video_decoder_add_to_frame (base_video_decoder,
SCHRO_PARSE_HEADER_SIZE); SCHRO_PARSE_HEADER_SIZE);
frame = base_video_decoder->current_frame;
frame->is_eos = TRUE;
SCHRO_DEBUG ("eos"); SCHRO_DEBUG ("eos");
return gst_base_video_decoder_have_frame (base_video_decoder); return gst_video_decoder_have_frame (base_video_decoder);
} }
if (gst_adapter_available (base_video_decoder->input_adapter) < next) { if (gst_adapter_available (adapter) < next) {
return GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA; return GST_VIDEO_DECODER_FLOW_NEED_DATA;
} }
if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (parse_code)) { if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (parse_code)) {
@ -427,19 +441,19 @@ gst_schro_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
data = g_malloc (next); data = g_malloc (next);
gst_adapter_copy (base_video_decoder->input_adapter, data, 0, next); gst_adapter_copy (adapter, data, 0, next);
parse_sequence_header (schro_decoder, data, next); parse_sequence_header (schro_decoder, data, next);
gst_base_video_decoder_set_sync_point (base_video_decoder); GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
#if 0 #if 0
if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_sink_timestamp)) { if (GST_CLOCK_TIME_IS_VALID (base_video_decoder->last_sink_timestamp)) {
base_video_decoder->current_frame->presentation_timestamp = base_video_decoder->current_frame->pts =
base_video_decoder->last_sink_timestamp; base_video_decoder->last_sink_timestamp;
GST_DEBUG ("got timestamp %" G_GINT64_FORMAT, GST_DEBUG ("got timestamp %" G_GINT64_FORMAT,
base_video_decoder->last_sink_timestamp); base_video_decoder->last_sink_timestamp);
} else if (base_video_decoder->last_sink_offset_end != -1) { } else if (base_video_decoder->last_sink_offset_end != -1) {
GstVideoState *state; GstVideoCodecState *state;
#if 0 #if 0
/* FIXME perhaps should use this to determine if the granulepos /* FIXME perhaps should use this to determine if the granulepos
@ -463,13 +477,13 @@ gst_schro_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("gp pt %lld dist %d delay %d dt %lld", pt, dist, delay, dt); GST_DEBUG ("gp pt %lld dist %d delay %d dt %lld", pt, dist, delay, dt);
} }
#endif #endif
state = gst_base_video_decoder_get_state (base_video_decoder); state = gst_video_decoder_get_state (base_video_decoder);
base_video_decoder->current_frame->presentation_timestamp = base_video_decoder->current_frame->pts =
gst_util_uint64_scale (granulepos_to_frame gst_util_uint64_scale (granulepos_to_frame
(base_video_decoder->last_sink_offset_end), state->fps_d * GST_SECOND, (base_video_decoder->last_sink_offset_end), state->fps_d * GST_SECOND,
state->fps_n); state->fps_n);
} else { } else {
base_video_decoder->current_frame->presentation_timestamp = -1; base_video_decoder->current_frame->pts = -1;
} }
#endif #endif
@ -477,26 +491,24 @@ gst_schro_dec_parse_data (GstBaseVideoDecoder * base_video_decoder,
} }
if (!schro_decoder->seq_header_buffer_seen) { if (!schro_decoder->seq_header_buffer_seen) {
gst_adapter_flush (base_video_decoder->input_adapter, next); gst_adapter_flush (adapter, next);
return GST_FLOW_OK; return GST_FLOW_OK;
} }
if (SCHRO_PARSE_CODE_IS_PICTURE (parse_code)) { if (SCHRO_PARSE_CODE_IS_PICTURE (parse_code)) {
GstVideoFrameState *frame;
guint8 tmp[4]; guint8 tmp[4];
frame = base_video_decoder->current_frame; gst_adapter_copy (adapter, tmp, SCHRO_PARSE_HEADER_SIZE, 4);
gst_adapter_copy (base_video_decoder->input_adapter, tmp,
SCHRO_PARSE_HEADER_SIZE, 4);
/* What is the point of this ? BaseVideoDecoder doesn't
* do anything with presentation_frame_number */
frame->presentation_frame_number = GST_READ_UINT32_BE (tmp); frame->presentation_frame_number = GST_READ_UINT32_BE (tmp);
gst_base_video_decoder_add_to_frame (base_video_decoder, next); gst_video_decoder_add_to_frame (base_video_decoder, next);
return gst_base_video_decoder_have_frame (base_video_decoder); return gst_video_decoder_have_frame (base_video_decoder);
} else { } else {
gst_base_video_decoder_add_to_frame (base_video_decoder, next); gst_video_decoder_add_to_frame (base_video_decoder, next);
} }
return GST_FLOW_OK; return GST_FLOW_OK;
@ -507,12 +519,12 @@ gst_schrodec_send_tags (GstSchroDec * schro_dec)
{ {
GstTagList *list; GstTagList *list;
list = gst_tag_list_new_empty (); list = gst_tag_list_new ();
gst_tag_list_add (list, GST_TAG_MERGE_REPLACE, gst_tag_list_add (list, GST_TAG_MERGE_REPLACE,
GST_TAG_VIDEO_CODEC, "Dirac", NULL); GST_TAG_VIDEO_CODEC, "Dirac", NULL);
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (schro_dec), gst_element_found_tags_for_pad (GST_ELEMENT_CAST (schro_dec),
gst_event_new_tag (gst_tag_list_copy (list))); GST_VIDEO_DECODER_SRC_PAD (schro_dec), list);
} }
static GstFlowReturn static GstFlowReturn
@ -538,28 +550,28 @@ gst_schro_dec_process (GstSchroDec * schro_dec, gboolean eos)
case SCHRO_DECODER_NEED_FRAME: case SCHRO_DECODER_NEED_FRAME:
{ {
GstBuffer *outbuf; GstBuffer *outbuf;
GstVideoState *state; GstVideoCodecState *state;
SchroFrame *schro_frame; SchroFrame *schro_frame;
GST_DEBUG ("need frame"); GST_DEBUG ("need frame");
state = state =
gst_base_video_decoder_get_state (GST_BASE_VIDEO_DECODER gst_video_decoder_get_output_state (GST_VIDEO_DECODER (schro_dec));
(schro_dec));
outbuf = outbuf =
gst_base_video_decoder_alloc_src_buffer (GST_BASE_VIDEO_DECODER gst_video_decoder_alloc_output_buffer (GST_VIDEO_DECODER
(schro_dec)); (schro_dec));
schro_frame = schro_frame =
gst_schro_buffer_wrap (outbuf, state->format, state->width, gst_schro_buffer_wrap (outbuf, GST_VIDEO_INFO_FORMAT (&state->info),
state->height); state->info.width, state->info.height);
schro_decoder_add_output_picture (schro_dec->decoder, schro_frame); schro_decoder_add_output_picture (schro_dec->decoder, schro_frame);
gst_video_codec_state_unref (state);
break; break;
} }
case SCHRO_DECODER_OK: case SCHRO_DECODER_OK:
{ {
SchroFrame *schro_frame; SchroFrame *schro_frame;
SchroTag *tag; SchroTag *tag;
GstVideoFrameState *frame; GstVideoCodecFrame *frame;
GST_DEBUG ("got frame"); GST_DEBUG ("got frame");
@ -571,10 +583,11 @@ gst_schro_dec_process (GstSchroDec * schro_dec, gboolean eos)
if (schro_frame->priv) { if (schro_frame->priv) {
GstFlowReturn flow_ret; GstFlowReturn flow_ret;
frame->src_buffer = gst_buffer_ref (GST_BUFFER (schro_frame->priv)); frame->output_buffer =
gst_buffer_ref (GST_BUFFER (schro_frame->priv));
flow_ret = flow_ret =
gst_base_video_decoder_finish_frame (GST_BASE_VIDEO_DECODER gst_video_decoder_finish_frame (GST_VIDEO_DECODER
(schro_dec), frame); (schro_dec), frame);
if (flow_ret != GST_FLOW_OK) { if (flow_ret != GST_FLOW_OK) {
GST_DEBUG ("finish frame returned %d", flow_ret); GST_DEBUG ("finish frame returned %d", flow_ret);
@ -611,8 +624,8 @@ gst_schro_dec_process (GstSchroDec * schro_dec, gboolean eos)
} }
GstFlowReturn GstFlowReturn
gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder, gst_schro_dec_handle_frame (GstVideoDecoder * base_video_decoder,
GstVideoFrameState * frame) GstVideoCodecFrame * frame)
{ {
GstSchroDec *schro_dec; GstSchroDec *schro_dec;
SchroBuffer *input_buffer; SchroBuffer *input_buffer;
@ -621,8 +634,8 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
GST_DEBUG ("handle frame"); GST_DEBUG ("handle frame");
input_buffer = gst_schro_wrap_gst_buffer (frame->sink_buffer); input_buffer = gst_schro_wrap_gst_buffer (frame->input_buffer);
frame->sink_buffer = NULL; frame->input_buffer = NULL;
input_buffer->tag = schro_tag_new (frame, NULL); input_buffer->tag = schro_tag_new (frame, NULL);
@ -632,7 +645,7 @@ gst_schro_dec_handle_frame (GstBaseVideoDecoder * base_video_decoder,
} }
gboolean gboolean
gst_schro_dec_finish (GstBaseVideoDecoder * base_video_decoder) gst_schro_dec_finish (GstVideoDecoder * base_video_decoder)
{ {
GstSchroDec *schro_dec; GstSchroDec *schro_dec;

View file

@ -23,8 +23,8 @@
#include <gst/gst.h> #include <gst/gst.h>
#include <gst/video/video.h> #include <gst/video/video.h>
#include <gst/video/gstbasevideoencoder.h> #include <gst/video/gstvideoencoder.h>
#include <gst/video/gstbasevideoutils.h> #include <gst/video/gstvideoutils.h>
#include <string.h> #include <string.h>
#include <schroedinger/schro.h> #include <schroedinger/schro.h>
@ -52,7 +52,7 @@ typedef struct _GstSchroEncClass GstSchroEncClass;
struct _GstSchroEnc struct _GstSchroEnc
{ {
GstBaseVideoEncoder base_encoder; GstVideoEncoder base_encoder;
GstPad *sinkpad; GstPad *sinkpad;
GstPad *srcpad; GstPad *srcpad;
@ -63,11 +63,13 @@ struct _GstSchroEnc
guint64 last_granulepos; guint64 last_granulepos;
guint64 granule_offset; guint64 granule_offset;
GstVideoCodecState *input_state;
}; };
struct _GstSchroEncClass struct _GstSchroEncClass
{ {
GstBaseVideoEncoderClass parent_class; GstVideoEncoderClass parent_class;
}; };
GType gst_schro_enc_get_type (void); GType gst_schro_enc_get_type (void);
@ -91,23 +93,28 @@ static void gst_schro_enc_get_property (GObject * object, guint prop_id,
static GstFlowReturn gst_schro_enc_process (GstSchroEnc * schro_enc); static GstFlowReturn gst_schro_enc_process (GstSchroEnc * schro_enc);
static gboolean gst_schro_enc_set_format (GstBaseVideoEncoder * static gboolean gst_schro_enc_set_format (GstVideoEncoder *
base_video_encoder, GstVideoInfo * info); base_video_encoder, GstVideoCodecState * state);
static gboolean gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_schro_enc_start (GstVideoEncoder * base_video_encoder);
static gboolean gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder); static gboolean gst_schro_enc_stop (GstVideoEncoder * base_video_encoder);
static GstFlowReturn gst_schro_enc_finish (GstBaseVideoEncoder * static GstFlowReturn gst_schro_enc_finish (GstVideoEncoder *
base_video_encoder); base_video_encoder);
static GstFlowReturn gst_schro_enc_handle_frame (GstBaseVideoEncoder * static GstFlowReturn gst_schro_enc_handle_frame (GstVideoEncoder *
base_video_encoder, GstVideoFrameState * frame); base_video_encoder, GstVideoCodecFrame * frame);
static GstFlowReturn gst_schro_enc_shape_output (GstBaseVideoEncoder * static GstFlowReturn gst_schro_enc_pre_push (GstVideoEncoder *
base_video_encoder, GstVideoFrameState * frame); base_video_encoder, GstVideoCodecFrame * frame);
static void gst_schro_enc_finalize (GObject * object); static void gst_schro_enc_finalize (GObject * object);
#if SCHRO_CHECK_VERSION(1,0,12)
#define ARGB_CAPS ";" GST_VIDEO_CAPS_ARGB
#else
#define ARGB_CAPS
#endif
static GstStaticPadTemplate gst_schro_enc_sink_template = static GstStaticPadTemplate gst_schro_enc_sink_template =
GST_STATIC_PAD_TEMPLATE ("sink", GST_STATIC_PAD_TEMPLATE ("sink",
GST_PAD_SINK, GST_PAD_SINK,
GST_PAD_ALWAYS, GST_PAD_ALWAYS,
GST_STATIC_CAPS (GST_VIDEO_CAPS_MAKE (GST_SCHRO_YUV_LIST)) GST_STATIC_CAPS (GST_VIDEO_CAPS_YUV (GST_SCHRO_YUV_LIST) ARGB_CAPS)
); );
static GstStaticPadTemplate gst_schro_enc_src_template = static GstStaticPadTemplate gst_schro_enc_src_template =
@ -117,8 +124,24 @@ static GstStaticPadTemplate gst_schro_enc_src_template =
GST_STATIC_CAPS ("video/x-dirac;video/x-qt-part;video/x-mp4-part") GST_STATIC_CAPS ("video/x-dirac;video/x-qt-part;video/x-mp4-part")
); );
#define gst_schro_enc_parent_class parent_class GST_BOILERPLATE (GstSchroEnc, gst_schro_enc, GstVideoEncoder,
G_DEFINE_TYPE (GstSchroEnc, gst_schro_enc, GST_TYPE_BASE_VIDEO_ENCODER); GST_TYPE_VIDEO_ENCODER);
static void
gst_schro_enc_base_init (gpointer g_class)
{
GstElementClass *element_class = GST_ELEMENT_CLASS (g_class);
gst_element_class_add_static_pad_template (element_class,
&gst_schro_enc_src_template);
gst_element_class_add_static_pad_template (element_class,
&gst_schro_enc_sink_template);
gst_element_class_set_details_simple (element_class, "Dirac Encoder",
"Codec/Encoder/Video",
"Encode raw video into Dirac stream", "David Schleef <ds@schleef.org>");
}
static GType static GType
register_enum_list (const SchroEncoderSetting * setting) register_enum_list (const SchroEncoderSetting * setting)
@ -149,13 +172,11 @@ static void
gst_schro_enc_class_init (GstSchroEncClass * klass) gst_schro_enc_class_init (GstSchroEncClass * klass)
{ {
GObjectClass *gobject_class; GObjectClass *gobject_class;
GstElementClass *element_class; GstVideoEncoderClass *basevideocoder_class;
GstBaseVideoEncoderClass *basevideocoder_class;
int i; int i;
gobject_class = G_OBJECT_CLASS (klass); gobject_class = G_OBJECT_CLASS (klass);
element_class = GST_ELEMENT_CLASS (klass); basevideocoder_class = GST_VIDEO_ENCODER_CLASS (klass);
basevideocoder_class = GST_BASE_VIDEO_ENCODER_CLASS (klass);
gobject_class->set_property = gst_schro_enc_set_property; gobject_class->set_property = gst_schro_enc_set_property;
gobject_class->get_property = gst_schro_enc_get_property; gobject_class->get_property = gst_schro_enc_get_property;
@ -195,15 +216,6 @@ gst_schro_enc_class_init (GstSchroEncClass * klass)
} }
} }
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_enc_src_template));
gst_element_class_add_pad_template (element_class,
gst_static_pad_template_get (&gst_schro_enc_sink_template));
gst_element_class_set_details_simple (element_class, "Dirac Encoder",
"Codec/Encoder/Video",
"Encode raw video into Dirac stream", "David Schleef <ds@schleef.org>");
basevideocoder_class->set_format = basevideocoder_class->set_format =
GST_DEBUG_FUNCPTR (gst_schro_enc_set_format); GST_DEBUG_FUNCPTR (gst_schro_enc_set_format);
basevideocoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_enc_start); basevideocoder_class->start = GST_DEBUG_FUNCPTR (gst_schro_enc_start);
@ -211,12 +223,11 @@ gst_schro_enc_class_init (GstSchroEncClass * klass)
basevideocoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_enc_finish); basevideocoder_class->finish = GST_DEBUG_FUNCPTR (gst_schro_enc_finish);
basevideocoder_class->handle_frame = basevideocoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_schro_enc_handle_frame); GST_DEBUG_FUNCPTR (gst_schro_enc_handle_frame);
basevideocoder_class->shape_output = basevideocoder_class->pre_push = GST_DEBUG_FUNCPTR (gst_schro_enc_pre_push);
GST_DEBUG_FUNCPTR (gst_schro_enc_shape_output);
} }
static void static void
gst_schro_enc_init (GstSchroEnc * schro_enc) gst_schro_enc_init (GstSchroEnc * schro_enc, GstSchroEncClass * klass)
{ {
GST_DEBUG ("gst_schro_enc_init"); GST_DEBUG ("gst_schro_enc_init");
@ -246,20 +257,17 @@ gst_schro_enc_finalize (GObject * object)
} }
static gboolean static gboolean
gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder, gst_schro_enc_set_format (GstVideoEncoder * base_video_encoder,
GstVideoInfo * info) GstVideoCodecState * state)
{ {
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder); GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
GstCaps *caps;
GstBuffer *seq_header_buffer; GstBuffer *seq_header_buffer;
gboolean ret; GstVideoInfo *info = &state->info;
GstVideoCodecState *output_state;
GstClockTime latency;
GST_DEBUG ("set_output_caps"); GST_DEBUG ("set_output_caps");
gst_base_video_encoder_set_latency_fields (base_video_encoder,
2 * (int) schro_encoder_setting_get_double (schro_enc->encoder,
"queue_depth"));
schro_video_format_set_std_video_format (schro_enc->video_format, schro_video_format_set_std_video_format (schro_enc->video_format,
SCHRO_VIDEO_FORMAT_CUSTOM); SCHRO_VIDEO_FORMAT_CUSTOM);
@ -293,27 +301,21 @@ gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
g_assert_not_reached (); g_assert_not_reached ();
} }
schro_enc->video_format->frame_rate_numerator = info->fps_n; schro_enc->video_format->frame_rate_numerator = GST_VIDEO_INFO_FPS_N (info);
schro_enc->video_format->frame_rate_denominator = info->fps_d; schro_enc->video_format->frame_rate_denominator = GST_VIDEO_INFO_FPS_D (info);
schro_enc->video_format->width = info->width; schro_enc->video_format->width = GST_VIDEO_INFO_WIDTH (info);
schro_enc->video_format->height = info->height; schro_enc->video_format->height = GST_VIDEO_INFO_HEIGHT (info);
#if 0 schro_enc->video_format->clean_width = GST_VIDEO_INFO_WIDTH (info);
schro_enc->video_format->clean_width = state->clean_width; schro_enc->video_format->clean_height = GST_VIDEO_INFO_HEIGHT (info);
schro_enc->video_format->clean_height = state->clean_height;
schro_enc->video_format->left_offset = state->clean_offset_left;
schro_enc->video_format->top_offset = state->clean_offset_top;
#else
schro_enc->video_format->clean_width = info->width;
schro_enc->video_format->clean_height = info->height;
schro_enc->video_format->left_offset = 0; schro_enc->video_format->left_offset = 0;
schro_enc->video_format->top_offset = 0; schro_enc->video_format->top_offset = 0;
#endif
schro_enc->video_format->aspect_ratio_numerator = info->par_n; schro_enc->video_format->aspect_ratio_numerator = GST_VIDEO_INFO_PAR_N (info);
schro_enc->video_format->aspect_ratio_denominator = info->par_d; schro_enc->video_format->aspect_ratio_denominator =
GST_VIDEO_INFO_PAR_D (info);
switch (GST_VIDEO_INFO_FORMAT (info)) { switch (GST_VIDEO_INFO_FORMAT (&state->info)) {
default: default:
schro_video_format_set_std_signal_range (schro_enc->video_format, schro_video_format_set_std_signal_range (schro_enc->video_format,
SCHRO_SIGNAL_RANGE_8BIT_VIDEO); SCHRO_SIGNAL_RANGE_8BIT_VIDEO);
@ -341,6 +343,13 @@ gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
#endif #endif
} }
/* Finally set latency */
latency = gst_util_uint64_scale (GST_SECOND,
GST_VIDEO_INFO_FPS_D (info) *
(int) schro_encoder_setting_get_double (schro_enc->encoder,
"queue_depth"), GST_VIDEO_INFO_FPS_N (info));
gst_video_encoder_set_latency (base_video_encoder, latency, latency);
schro_video_format_set_std_colour_spec (schro_enc->video_format, schro_video_format_set_std_colour_spec (schro_enc->video_format,
SCHRO_COLOUR_SPEC_HDTV); SCHRO_COLOUR_SPEC_HDTV);
@ -353,63 +362,52 @@ gst_schro_enc_set_format (GstBaseVideoEncoder * base_video_encoder,
schro_enc->granule_offset = ~0; schro_enc->granule_offset = ~0;
caps = gst_caps_new_simple ("video/x-dirac", output_state =
"width", G_TYPE_INT, info->width, gst_video_encoder_set_output_state (base_video_encoder,
"height", G_TYPE_INT, info->height, gst_caps_new_simple ("video/x-dirac", NULL), state);
"framerate", GST_TYPE_FRACTION, info->fps_n,
info->fps_d,
"pixel-aspect-ratio", GST_TYPE_FRACTION, info->par_n, info->par_d, NULL);
GST_BUFFER_FLAG_SET (seq_header_buffer, GST_BUFFER_FLAG_HEADER);
GST_BUFFER_FLAG_SET (seq_header_buffer, GST_BUFFER_FLAG_IN_CAPS);
{ {
GValue array = { 0 }; GValue array = { 0 };
GValue value = { 0 }; GValue value = { 0 };
guint8 *outdata;
GstBuffer *buf; GstBuffer *buf;
GstMemory *seq_header_memory, *extra_header; int size;
gsize size;
g_value_init (&array, GST_TYPE_ARRAY); g_value_init (&array, GST_TYPE_ARRAY);
g_value_init (&value, GST_TYPE_BUFFER); g_value_init (&value, GST_TYPE_BUFFER);
size = GST_BUFFER_SIZE (seq_header_buffer);
buf = gst_buffer_new (); buf = gst_buffer_new_and_alloc (size + SCHRO_PARSE_HEADER_SIZE);
/* Add the sequence header */
seq_header_memory = gst_buffer_get_memory (seq_header_buffer, 0);
gst_buffer_append_memory (buf, seq_header_memory);
size = gst_buffer_get_size (buf) + SCHRO_PARSE_HEADER_SIZE;
outdata = g_malloc0 (SCHRO_PARSE_HEADER_SIZE);
GST_WRITE_UINT32_BE (outdata, 0x42424344);
GST_WRITE_UINT8 (outdata + 4, SCHRO_PARSE_CODE_END_OF_SEQUENCE);
GST_WRITE_UINT32_BE (outdata + 5, 0);
GST_WRITE_UINT32_BE (outdata + 9, size);
extra_header = gst_memory_new_wrapped (0, outdata, SCHRO_PARSE_HEADER_SIZE,
0, SCHRO_PARSE_HEADER_SIZE, outdata, g_free);
gst_buffer_append_memory (buf, extra_header);
/* ogg(mux) expects the header buffers to have 0 timestamps - /* ogg(mux) expects the header buffers to have 0 timestamps -
set OFFSET and OFFSET_END accordingly */ set OFFSET and OFFSET_END accordingly */
GST_BUFFER_OFFSET (buf) = 0; GST_BUFFER_OFFSET (buf) = 0;
GST_BUFFER_OFFSET_END (buf) = 0; GST_BUFFER_OFFSET_END (buf) = 0;
GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_HEADER); GST_BUFFER_FLAG_SET (buf, GST_BUFFER_FLAG_IN_CAPS);
memcpy (GST_BUFFER_DATA (buf), GST_BUFFER_DATA (seq_header_buffer), size);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 0, 0x42424344);
GST_WRITE_UINT8 (GST_BUFFER_DATA (buf) + size + 4,
SCHRO_PARSE_CODE_END_OF_SEQUENCE);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 5, 0);
GST_WRITE_UINT32_BE (GST_BUFFER_DATA (buf) + size + 9, size);
gst_value_set_buffer (&value, buf); gst_value_set_buffer (&value, buf);
gst_buffer_unref (buf); gst_buffer_unref (buf);
gst_value_array_append_value (&array, &value); gst_value_array_append_value (&array, &value);
gst_structure_set_value (gst_caps_get_structure (caps, 0), gst_structure_set_value (gst_caps_get_structure (output_state->caps, 0),
"streamheader", &array); "streamheader", &array);
g_value_unset (&value); g_value_unset (&value);
g_value_unset (&array); g_value_unset (&array);
} }
gst_buffer_unref (seq_header_buffer); gst_buffer_unref (seq_header_buffer);
ret = gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (schro_enc), caps); gst_video_codec_state_unref (output_state);
gst_caps_unref (caps);
return ret; /* And save the input state for later use */
if (schro_enc->input_state)
gst_video_codec_state_unref (schro_enc->input_state);
schro_enc->input_state = gst_video_codec_state_ref (state);
return TRUE;
} }
static void static void
@ -482,19 +480,19 @@ gst_schro_enc_get_property (GObject * object, guint prop_id, GValue * value,
} }
static gboolean static gboolean
gst_schro_enc_start (GstBaseVideoEncoder * base_video_encoder) gst_schro_enc_start (GstVideoEncoder * base_video_encoder)
{ {
return TRUE; return TRUE;
} }
static gboolean static gboolean
gst_schro_enc_stop (GstBaseVideoEncoder * base_video_encoder) gst_schro_enc_stop (GstVideoEncoder * base_video_encoder)
{ {
return TRUE; return TRUE;
} }
static GstFlowReturn static GstFlowReturn
gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder) gst_schro_enc_finish (GstVideoEncoder * base_video_encoder)
{ {
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder); GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
@ -507,25 +505,25 @@ gst_schro_enc_finish (GstBaseVideoEncoder * base_video_encoder)
} }
static GstFlowReturn static GstFlowReturn
gst_schro_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder, gst_schro_enc_handle_frame (GstVideoEncoder * base_video_encoder,
GstVideoFrameState * frame) GstVideoCodecFrame * frame)
{ {
GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder); GstSchroEnc *schro_enc = GST_SCHRO_ENC (base_video_encoder);
SchroFrame *schro_frame; SchroFrame *schro_frame;
GstFlowReturn ret; GstFlowReturn ret;
const GstVideoState *state; GstVideoInfo *info = &schro_enc->input_state->info;
state = gst_base_video_encoder_get_state (base_video_encoder);
if (schro_enc->granule_offset == ~0LL) { if (schro_enc->granule_offset == ~0LL) {
schro_enc->granule_offset = schro_enc->granule_offset =
gst_util_uint64_scale (frame->presentation_timestamp, gst_util_uint64_scale (frame->pts, 2 * GST_VIDEO_INFO_FPS_N (info),
2 * state->fps_n, GST_SECOND * state->fps_d); GST_SECOND * GST_VIDEO_INFO_FPS_D (info));
GST_DEBUG ("granule offset %" G_GINT64_FORMAT, schro_enc->granule_offset); GST_DEBUG ("granule offset %" G_GINT64_FORMAT, schro_enc->granule_offset);
} }
schro_frame = gst_schro_buffer_wrap (gst_buffer_ref (frame->sink_buffer), /* FIXME : We could make that method just take GstVideoInfo ... */
state->format, state->width, state->height); schro_frame = gst_schro_buffer_wrap (gst_buffer_ref (frame->input_buffer),
GST_VIDEO_INFO_FORMAT (info),
GST_VIDEO_INFO_WIDTH (info), GST_VIDEO_INFO_HEIGHT (info));
GST_DEBUG ("pushing frame %p", frame); GST_DEBUG ("pushing frame %p", frame);
schro_encoder_push_frame_full (schro_enc->encoder, schro_frame, frame); schro_encoder_push_frame_full (schro_enc->encoder, schro_frame, frame);
@ -536,8 +534,8 @@ gst_schro_enc_handle_frame (GstBaseVideoEncoder * base_video_encoder,
} }
static GstFlowReturn static GstFlowReturn
gst_schro_enc_shape_output (GstBaseVideoEncoder * base_video_encoder, gst_schro_enc_pre_push (GstVideoEncoder * base_video_encoder,
GstVideoFrameState * frame) GstVideoCodecFrame * frame)
{ {
GstSchroEnc *schro_enc; GstSchroEnc *schro_enc;
int delay; int delay;
@ -546,7 +544,7 @@ gst_schro_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
int dt; int dt;
guint64 granulepos_hi; guint64 granulepos_hi;
guint64 granulepos_low; guint64 granulepos_low;
GstBuffer *buf = frame->src_buffer; GstBuffer *buf = frame->output_buffer;
schro_enc = GST_SCHRO_ENC (base_video_encoder); schro_enc = GST_SCHRO_ENC (base_video_encoder);
@ -564,25 +562,29 @@ gst_schro_enc_shape_output (GstBaseVideoEncoder * base_video_encoder,
GST_DEBUG ("granulepos %" G_GINT64_FORMAT ":%" G_GINT64_FORMAT, granulepos_hi, GST_DEBUG ("granulepos %" G_GINT64_FORMAT ":%" G_GINT64_FORMAT, granulepos_hi,
granulepos_low); granulepos_low);
#if 0
if (frame->is_eos) { if (frame->is_eos) {
GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos; GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos;
} else { } else {
#endif
schro_enc->last_granulepos = (granulepos_hi << 22) | (granulepos_low); schro_enc->last_granulepos = (granulepos_hi << 22) | (granulepos_low);
GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos; GST_BUFFER_OFFSET_END (buf) = schro_enc->last_granulepos;
#if 0
} }
#endif
return gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (base_video_encoder), buf); return GST_FLOW_OK;
} }
static GstFlowReturn static GstFlowReturn
gst_schro_enc_process (GstSchroEnc * schro_enc) gst_schro_enc_process (GstSchroEnc * schro_enc)
{ {
SchroBuffer *encoded_buffer; SchroBuffer *encoded_buffer;
GstVideoFrameState *frame; GstVideoCodecFrame *frame;
GstFlowReturn ret; GstFlowReturn ret;
int presentation_frame; int presentation_frame;
void *voidptr; void *voidptr;
GstBaseVideoEncoder *base_video_encoder = GST_BASE_VIDEO_ENCODER (schro_enc); GstVideoEncoder *base_video_encoder = GST_VIDEO_ENCODER (schro_enc);
GST_DEBUG ("process"); GST_DEBUG ("process");
@ -608,12 +610,10 @@ gst_schro_enc_process (GstSchroEnc * schro_enc)
GstMessage *message; GstMessage *message;
GstStructure *structure; GstStructure *structure;
GstBuffer *buf; GstBuffer *buf;
gpointer data;
data = g_malloc (sizeof (double) * 21); buf = gst_buffer_new_and_alloc (sizeof (double) * 21);
schro_encoder_get_frame_stats (schro_enc->encoder, schro_encoder_get_frame_stats (schro_enc->encoder,
(double *) data, 21); (double *) GST_BUFFER_DATA (buf), 21);
buf = gst_buffer_new_wrapped (data, sizeof (double) * 21);
structure = gst_structure_new ("GstSchroEnc", structure = gst_structure_new ("GstSchroEnc",
"frame-stats", GST_TYPE_BUFFER, buf, NULL); "frame-stats", GST_TYPE_BUFFER, buf, NULL);
gst_buffer_unref (buf); gst_buffer_unref (buf);
@ -632,12 +632,12 @@ gst_schro_enc_process (GstSchroEnc * schro_enc)
/* FIXME: Get the frame from somewhere somehow... */ /* FIXME: Get the frame from somewhere somehow... */
if (frame) { if (frame) {
if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (encoded_buffer->data[4])) { if (SCHRO_PARSE_CODE_IS_SEQ_HEADER (encoded_buffer->data[4])) {
frame->is_sync_point = TRUE; GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
} }
frame->src_buffer = gst_schro_wrap_schro_buffer (encoded_buffer); frame->output_buffer = gst_schro_wrap_schro_buffer (encoded_buffer);
ret = gst_base_video_encoder_finish_frame (base_video_encoder, frame); ret = gst_video_encoder_finish_frame (base_video_encoder, frame);
if (ret != GST_FLOW_OK) { if (ret != GST_FLOW_OK) {
GST_DEBUG ("pad_push returned %d", ret); GST_DEBUG ("pad_push returned %d", ret);

View file

@ -50,47 +50,54 @@ gst_schro_buffer_wrap (GstBuffer * buf, GstVideoFormat format, int width,
int height) int height)
{ {
SchroFrame *frame; SchroFrame *frame;
GstMapInfo info;
if (!gst_buffer_map (buf, &info, GST_MAP_READ))
return NULL;
switch (format) { switch (format) {
case GST_VIDEO_FORMAT_I420: case GST_VIDEO_FORMAT_I420:
frame = schro_frame_new_from_data_I420 (info.data, width, height); frame =
schro_frame_new_from_data_I420 (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_YV12: case GST_VIDEO_FORMAT_YV12:
frame = schro_frame_new_from_data_YV12 (info.data, width, height); frame =
schro_frame_new_from_data_YV12 (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_YUY2: case GST_VIDEO_FORMAT_YUY2:
frame = schro_frame_new_from_data_YUY2 (info.data, width, height); frame =
schro_frame_new_from_data_YUY2 (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_UYVY: case GST_VIDEO_FORMAT_UYVY:
frame = schro_frame_new_from_data_UYVY (info.data, width, height); frame =
schro_frame_new_from_data_UYVY (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_AYUV: case GST_VIDEO_FORMAT_AYUV:
frame = schro_frame_new_from_data_AYUV (info.data, width, height); frame =
schro_frame_new_from_data_AYUV (GST_BUFFER_DATA (buf), width, height);
break; break;
#if SCHRO_CHECK_VERSION(1,0,12) #if SCHRO_CHECK_VERSION(1,0,12)
case GST_VIDEO_FORMAT_ARGB: case GST_VIDEO_FORMAT_ARGB:
frame = schro_frame_new_from_data_ARGB (info.data, width, height); frame =
schro_frame_new_from_data_ARGB (GST_BUFFER_DATA (buf), width, height);
break; break;
#endif #endif
#if SCHRO_CHECK_VERSION(1,0,11) #if SCHRO_CHECK_VERSION(1,0,11)
case GST_VIDEO_FORMAT_Y42B: case GST_VIDEO_FORMAT_Y42B:
frame = schro_frame_new_from_data_Y42B (info.data, width, height); frame =
schro_frame_new_from_data_Y42B (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_Y444: case GST_VIDEO_FORMAT_Y444:
frame = schro_frame_new_from_data_Y444 (info.data, width, height); frame =
schro_frame_new_from_data_Y444 (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_v210: case GST_VIDEO_FORMAT_v210:
frame = schro_frame_new_from_data_v210 (info.data, width, height); frame =
schro_frame_new_from_data_v210 (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_v216: case GST_VIDEO_FORMAT_v216:
frame = schro_frame_new_from_data_v216 (info.data, width, height); frame =
schro_frame_new_from_data_v216 (GST_BUFFER_DATA (buf), width, height);
break; break;
case GST_VIDEO_FORMAT_AYUV64: case GST_VIDEO_FORMAT_AYUV64:
frame = schro_frame_new_from_data_AY64 (info.data, width, height); frame =
schro_frame_new_from_data_AY64 (GST_BUFFER_DATA (buf), width, height);
break; break;
#endif #endif
default: default:
@ -99,11 +106,10 @@ gst_schro_buffer_wrap (GstBuffer * buf, GstVideoFormat format, int width,
} }
schro_frame_set_free_callback (frame, gst_schro_frame_free, buf); schro_frame_set_free_callback (frame, gst_schro_frame_free, buf);
gst_buffer_unmap (buf, &info);
return frame; return frame;
} }
#ifdef GST_BUFFER_FREE_FUNC
static void static void
schro_buf_free_func (gpointer priv) schro_buf_free_func (gpointer priv)
{ {
@ -111,48 +117,43 @@ schro_buf_free_func (gpointer priv)
schro_buffer_unref (buffer); schro_buffer_unref (buffer);
} }
#endif
/* takes the reference */ /* takes the reference */
GstBuffer * GstBuffer *
gst_schro_wrap_schro_buffer (SchroBuffer * buffer) gst_schro_wrap_schro_buffer (SchroBuffer * buffer)
{ {
GstMemory *mem; GstBuffer *gstbuf;
GstBuffer *buf;
mem = #ifdef GST_BUFFER_FREE_FUNC
gst_memory_new_wrapped (0, buffer->data, buffer->length, 0, gstbuf = gst_buffer_new ();
buffer->length, buffer, schro_buf_free_func); GST_BUFFER_DATA (gstbuf) = buffer->data;
buf = gst_buffer_new (); GST_BUFFER_SIZE (gstbuf) = buffer->length;
gst_buffer_append_memory (buf, mem); GST_BUFFER_MALLOCDATA (gstbuf) = (void *) buffer;
GST_BUFFER_FREE_FUNC (gstbuf) = schro_buf_free_func;
#else
gstbuf = gst_buffer_new_and_alloc (buffer->length);
memcpy (GST_BUFFER_DATA (gstbuf), buffer->data, buffer->length);
#endif
return buf; return gstbuf;
} }
static void static void
gst_schro_buffer_free (SchroBuffer * buffer, void *priv) gst_schro_buffer_free (SchroBuffer * buffer, void *priv)
{ {
gst_memory_unref (GST_MEMORY_CAST (priv)); gst_buffer_unref (GST_BUFFER (priv));
} }
SchroBuffer * SchroBuffer *
gst_schro_wrap_gst_buffer (GstBuffer * buffer) gst_schro_wrap_gst_buffer (GstBuffer * buffer)
{ {
SchroBuffer *schrobuf; SchroBuffer *schrobuf;
GstMemory *mem;
GstMapInfo info;
mem = gst_buffer_get_all_memory (buffer); schrobuf = schro_buffer_new_with_data (GST_BUFFER_DATA (buffer),
if (!gst_memory_map (mem, &info, GST_MAP_READ)) { GST_BUFFER_SIZE (buffer));
GST_ERROR ("Couldn't get readable memory from gstbuffer");
return NULL;
}
/* FIXME : We can't control if data won't be read/write outside
* of schro ... */
schrobuf = schro_buffer_new_with_data (info.data, info.size);
gst_memory_unmap (mem, &info);
schrobuf->free = gst_schro_buffer_free; schrobuf->free = gst_schro_buffer_free;
schrobuf->priv = mem; schrobuf->priv = buffer;
return schrobuf; return schrobuf;
} }

View file

@ -25,9 +25,9 @@
#include <schroedinger/schro.h> #include <schroedinger/schro.h>
#if SCHRO_CHECK_VERSION(1,0,11) #if SCHRO_CHECK_VERSION(1,0,11)
#define GST_SCHRO_YUV_LIST "{ I420, YV12, YUY2, UYVY, AYUV, Y42B, Y444, v216, v210, AY64, ARGB }" #define GST_SCHRO_YUV_LIST "{ I420, YV12, YUY2, UYVY, AYUV, Y42B, Y444, v216, v210, AY64 }"
#else #else
#define GST_SCHRO_YUV_LIST "{ I420, YV12, YUY2, UYVY, AYUV, ARGB }" #define GST_SCHRO_YUV_LIST "{ I420, YV12, YUY2, UYVY, AYUV }"
#endif #endif
SchroFrame * SchroFrame *