omx: Port to video base classes from -base

This commit is contained in:
Sebastian Dröge 2012-06-20 13:11:58 +01:00
parent 49ba281e8b
commit 5fdb490f1a
20 changed files with 377 additions and 5273 deletions

View file

@ -13,10 +13,6 @@ libgstopenmax_la_SOURCES = \
gstomxh264enc.c \
gstomxh263enc.c \
gstomxaacenc.c \
gstbasevideocodec.c \
gstbasevideodecoder.c \
gstbasevideoencoder.c \
gstbasevideoutils.c \
gstomxrecmutex.c
noinst_HEADERS = \
@ -32,27 +28,14 @@ noinst_HEADERS = \
gstomxh264enc.h \
gstomxh263enc.h \
gstomxaacenc.h \
gstbasevideocodec.h \
gstbasevideodecoder.h \
gstbasevideoencoder.h \
gstbasevideoutils.h \
gstomxrecmutex.h
fixbaseclasses = \
-DGstBaseVideoCodec=OMXBaseVideoCodec \
-DGstBaseVideoCodecClass=OMXBaseVideoCodecClass \
-DGstBaseVideoEncoder=OMXBaseVideoEncoder \
-DGstBaseVideoEncoderClass=OMXBaseVideoEncoderClass \
-DGstBaseVideoDecoder=OMXBaseVideoDecoder \
-DGstBaseVideoDecoderClass=OMXBaseVideoDecoderClass
libgstopenmax_la_CFLAGS = \
-DGST_USE_UNSTABLE_API=1 \
-I$(abs_srcdir)/openmax \
$(GST_PLUGINS_BASE_CFLAGS) \
$(GST_BASE_CFLAGS) \
$(GST_CFLAGS) \
$(fixbaseclasses)
$(GST_CFLAGS)
libgstopenmax_la_LIBADD = \
$(GST_PLUGINS_BASE_LIBS) \
-lgstaudio-@GST_API_VERSION@ \
@ -76,5 +59,5 @@ Android.mk: Makefile.am $(BUILT_SOURCES)
$(libgstopenmax_la_LIBADD) \
-ldl \
-:PASSTHROUGH LOCAL_ARM_MODE:=arm \
LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-0.10' \
LOCAL_MODULE_PATH:='$$(TARGET_OUT)/lib/gstreamer-$(GST_API_VERSION)' \
> $@

View file

@ -1,343 +0,0 @@
/* GStreamer
* Copyright (C) 2006 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
/**
* SECTION:gstbasevideocodec
* @short_description: Base class and objects for video codecs
*
**/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
/**
* SECTION:gstbasevideocodec
* @short_description: Base class for video codecs
* @see_also: #GstBaseVideoDecoder , #GstBaseVideoEncoder
*/
/* FIXME 0.11: suppress warnings for deprecated API such as GStaticRecMutex
* with newer GLib versions (>= 2.31.0) */
#define GLIB_DISABLE_DEPRECATION_WARNINGS
#include "gstbasevideocodec.h"
#include <string.h>
#include <math.h>
GST_DEBUG_CATEGORY (basevideocodec_debug);
#define GST_CAT_DEFAULT basevideocodec_debug
/* GstBaseVideoCodec signals and args */
enum
{
LAST_SIGNAL
};
enum
{
ARG_0
};
static void gst_base_video_codec_finalize (GObject * object);
static GstStateChangeReturn gst_base_video_codec_change_state (GstElement *
element, GstStateChange transition);
static GstElementClass *parent_class = NULL;
G_DEFINE_BOXED_TYPE (GstVideoFrameState, gst_video_frame_state,
(GBoxedCopyFunc) gst_video_frame_state_ref,
(GBoxedFreeFunc) gst_video_frame_state_unref);
/* NOTE (Edward): Do not use G_DEFINE_* because we need to have
* a GClassInitFunc called with the target class (which the macros
* don't handle).
*/
static void gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass);
static void gst_base_video_codec_init (GstBaseVideoCodec * dec,
GstBaseVideoCodecClass * klass);
GType
gst_base_video_codec_get_type (void)
{
static volatile gsize base_video_codec_type = 0;
if (g_once_init_enter (&base_video_codec_type)) {
GType _type;
static const GTypeInfo base_video_codec_info = {
sizeof (GstBaseVideoCodecClass),
NULL,
NULL,
(GClassInitFunc) gst_base_video_codec_class_init,
NULL,
NULL,
sizeof (GstBaseVideoCodec),
0,
(GInstanceInitFunc) gst_base_video_codec_init,
};
_type = g_type_register_static (GST_TYPE_ELEMENT,
"GstBaseVideoCodec", &base_video_codec_info, G_TYPE_FLAG_ABSTRACT);
g_once_init_leave (&base_video_codec_type, _type);
}
return base_video_codec_type;
}
static void
gst_base_video_codec_class_init (GstBaseVideoCodecClass * klass)
{
GObjectClass *gobject_class;
GstElementClass *element_class;
gobject_class = G_OBJECT_CLASS (klass);
element_class = GST_ELEMENT_CLASS (klass);
parent_class = g_type_class_peek_parent (klass);
gobject_class->finalize = gst_base_video_codec_finalize;
element_class->change_state = gst_base_video_codec_change_state;
GST_DEBUG_CATEGORY_INIT (basevideocodec_debug, "basevideocodec", 0,
"Base Video Codec");
}
static void
gst_base_video_codec_init (GstBaseVideoCodec * base_video_codec,
GstBaseVideoCodecClass * klass)
{
GstPadTemplate *pad_template;
GST_DEBUG_OBJECT (base_video_codec, "gst_base_video_codec_init");
pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "sink");
g_return_if_fail (pad_template != NULL);
base_video_codec->sinkpad = gst_pad_new_from_template (pad_template, "sink");
gst_element_add_pad (GST_ELEMENT (base_video_codec),
base_video_codec->sinkpad);
pad_template =
gst_element_class_get_pad_template (GST_ELEMENT_CLASS (klass), "src");
g_return_if_fail (pad_template != NULL);
base_video_codec->srcpad = gst_pad_new_from_template (pad_template, "src");
gst_element_add_pad (GST_ELEMENT (base_video_codec),
base_video_codec->srcpad);
gst_segment_init (&base_video_codec->segment, GST_FORMAT_TIME);
g_rec_mutex_init (&base_video_codec->stream_lock);
}
static void
gst_base_video_codec_reset (GstBaseVideoCodec * base_video_codec)
{
GList *g;
GST_DEBUG_OBJECT (base_video_codec, "reset");
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_codec);
for (g = base_video_codec->frames; g; g = g_list_next (g)) {
gst_video_frame_state_unref ((GstVideoFrameState *) g->data);
}
g_list_free (base_video_codec->frames);
base_video_codec->frames = NULL;
base_video_codec->bytes = 0;
base_video_codec->time = 0;
gst_buffer_replace (&base_video_codec->state.codec_data, NULL);
gst_caps_replace (&base_video_codec->state.caps, NULL);
memset (&base_video_codec->state, 0, sizeof (GstVideoState));
base_video_codec->state.format = GST_VIDEO_FORMAT_UNKNOWN;
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec);
}
static void
gst_base_video_codec_finalize (GObject * object)
{
GstBaseVideoCodec *base_video_codec = GST_BASE_VIDEO_CODEC (object);
g_rec_mutex_clear (&base_video_codec->stream_lock);
G_OBJECT_CLASS (parent_class)->finalize (object);
}
static GstStateChangeReturn
gst_base_video_codec_change_state (GstElement * element,
GstStateChange transition)
{
GstBaseVideoCodec *base_video_codec = GST_BASE_VIDEO_CODEC (element);
GstStateChangeReturn ret;
switch (transition) {
case GST_STATE_CHANGE_NULL_TO_READY:
break;
case GST_STATE_CHANGE_READY_TO_PAUSED:
gst_base_video_codec_reset (base_video_codec);
break;
case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
break;
default:
break;
}
ret = parent_class->change_state (element, transition);
switch (transition) {
case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
break;
case GST_STATE_CHANGE_PAUSED_TO_READY:
gst_base_video_codec_reset (base_video_codec);
break;
case GST_STATE_CHANGE_READY_TO_NULL:
break;
default:
break;
}
return ret;
}
/**
* gst_base_video_codec_append_frame:
* @codec: a #GstBaseVideoCodec
* @frame: the #GstVideoFrameState to append
*
* Appends a frame to the list of frames handled by the codec.
*
* Note: This should normally not be used by implementations.
**/
void
gst_base_video_codec_append_frame (GstBaseVideoCodec * codec,
GstVideoFrameState * frame)
{
g_return_if_fail (frame != NULL);
gst_video_frame_state_ref (frame);
codec->frames = g_list_append (codec->frames, frame);
}
void
gst_base_video_codec_remove_frame (GstBaseVideoCodec * codec,
GstVideoFrameState * frame)
{
GList *link;
g_return_if_fail (frame != NULL);
link = g_list_find (codec->frames, frame);
if (link) {
gst_video_frame_state_unref ((GstVideoFrameState *) link->data);
codec->frames = g_list_delete_link (codec->frames, link);
}
}
static void
_gst_video_frame_state_free (GstVideoFrameState * frame)
{
g_return_if_fail (frame != NULL);
GST_LOG ("Freeing frame %p (sfn:%d)", frame, frame->system_frame_number);
if (frame->sink_buffer) {
gst_buffer_unref (frame->sink_buffer);
}
if (frame->src_buffer) {
gst_buffer_unref (frame->src_buffer);
}
g_list_foreach (frame->events, (GFunc) gst_event_unref, NULL);
g_list_free (frame->events);
if (frame->coder_hook_destroy_notify && frame->coder_hook)
frame->coder_hook_destroy_notify (frame->coder_hook);
g_slice_free (GstVideoFrameState, frame);
}
/**
* gst_base_video_codec_new_frame:
* @base_video_codec: a #GstBaseVideoCodec
*
* Creates a new #GstVideoFrameState for usage in decoders or encoders.
*
* Returns: (transfer full): The new #GstVideoFrameState, call
* #gst_video_frame_state_unref() when done with it.
*/
GstVideoFrameState *
gst_base_video_codec_new_frame (GstBaseVideoCodec * base_video_codec)
{
GstVideoFrameState *frame;
frame = g_slice_new0 (GstVideoFrameState);
frame->ref_count = 1;
GST_BASE_VIDEO_CODEC_STREAM_LOCK (base_video_codec);
frame->system_frame_number = base_video_codec->system_frame_number;
base_video_codec->system_frame_number++;
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (base_video_codec);
GST_LOG_OBJECT (base_video_codec, "Created new frame %p (sfn:%d)",
frame, frame->system_frame_number);
return frame;
}
/**
* gst_video_frame_state_ref:
* @frame: a #GstVideoFrameState
*
* Increases the refcount of the given frame by one.
*
* Returns: @buf
*/
GstVideoFrameState *
gst_video_frame_state_ref (GstVideoFrameState * frame)
{
g_return_val_if_fail (frame != NULL, NULL);
g_atomic_int_inc (&frame->ref_count);
return frame;
}
/**
* gst_video_frame_state_unref:
* @frame: a #GstVideoFrameState
*
* Decreases the refcount of the frame. If the refcount reaches 0, the frame
* will be freed.
*/
void
gst_video_frame_state_unref (GstVideoFrameState * frame)
{
g_return_if_fail (frame != NULL);
g_return_if_fail (frame->ref_count > 0);
if (g_atomic_int_dec_and_test (&frame->ref_count)) {
_gst_video_frame_state_free (frame);
}
}

View file

@ -1,289 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_CODEC_H_
#define _GST_BASE_VIDEO_CODEC_H_
#ifndef GST_USE_UNSTABLE_API
#warning "GstBaseVideoCodec is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
#include <gst/base/gstadapter.h>
#include <gst/video/video.h>
#include <gst/video/gstvideopool.h>
#include <gst/video/gstvideometa.h>
G_BEGIN_DECLS
#define GST_TYPE_BASE_VIDEO_CODEC \
(gst_base_video_codec_get_type())
#define GST_BASE_VIDEO_CODEC(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodec))
#define GST_BASE_VIDEO_CODEC_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodecClass))
#define GST_BASE_VIDEO_CODEC_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_CODEC,GstBaseVideoCodecClass))
#define GST_IS_BASE_VIDEO_CODEC(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_CODEC))
#define GST_IS_BASE_VIDEO_CODEC_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_CODEC))
/**
* GST_BASE_VIDEO_CODEC_SINK_NAME:
*
* The name of the templates for the sink pad.
*/
#define GST_BASE_VIDEO_CODEC_SINK_NAME "sink"
/**
* GST_BASE_VIDEO_CODEC_SRC_NAME:
*
* The name of the templates for the source pad.
*/
#define GST_BASE_VIDEO_CODEC_SRC_NAME "src"
/**
* GST_BASE_VIDEO_CODEC_SRC_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the source #GstPad object of the element.
*/
#define GST_BASE_VIDEO_CODEC_SRC_PAD(obj) (((GstBaseVideoCodec *) (obj))->srcpad)
/**
* GST_BASE_VIDEO_CODEC_SINK_PAD:
* @obj: base video codec instance
*
* Gives the pointer to the sink #GstPad object of the element.
*/
#define GST_BASE_VIDEO_CODEC_SINK_PAD(obj) (((GstBaseVideoCodec *) (obj))->sinkpad)
/**
* GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA:
*
* Returned while parsing to indicate more data is needed.
*/
#define GST_BASE_VIDEO_CODEC_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
/**
* GST_BASE_VIDEO_CODEC_STREAM_LOCK:
* @codec: video codec instance
*
* Obtain a lock to protect the codec function from concurrent access.
*
* Since: 0.10.22
*/
#define GST_BASE_VIDEO_CODEC_STREAM_LOCK(codec) g_rec_mutex_lock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock)
/**
* GST_BASE_VIDEO_CODEC_STREAM_UNLOCK:
* @codec: video codec instance
*
* Release the lock that protects the codec function from concurrent access.
*
* Since: 0.10.22
*/
#define GST_BASE_VIDEO_CODEC_STREAM_UNLOCK(codec) g_rec_mutex_unlock (&GST_BASE_VIDEO_CODEC (codec)->stream_lock)
typedef struct _GstVideoState GstVideoState;
typedef struct _GstVideoFrameState GstVideoFrameState;
typedef struct _GstBaseVideoCodec GstBaseVideoCodec;
typedef struct _GstBaseVideoCodecClass GstBaseVideoCodecClass;
/* GstVideoState is only used on the compressed video pad */
/**
* GstVideoState:
* @width: Width in pixels (including borders)
* @height: Height in pixels (including borders)
* @fps_n: Numerator of framerate
* @fps_d: Denominator of framerate
* @par_n: Numerator of Pixel Aspect Ratio
* @par_d: Denominator of Pixel Aspect Ratio
* @have_interlaced: The content of the @interlaced field is present and valid
* @interlaced: %TRUE if the stream is interlaced
* @top_field_first: %TRUE if the interlaced frame is top-field-first
* @clean_width: Useful width of video in pixels (i.e. without borders)
* @clean_height: Useful height of video in pixels (i.e. without borders)
* @clean_offset_left: Horizontal offset (from the left) of useful region in pixels
* @clean_offset_top: Vertical offset (from the top) of useful region in pixels
* @bytes_per_picture: Size in bytes of each picture
* @codec_data: Optional Codec Data for the stream
*
* Information about compressed video stream.
* FIXME: Re-use GstVideoInfo for more fields.
*/
struct _GstVideoState
{
GstCaps *caps;
GstVideoFormat format;
int width, height;
int fps_n, fps_d;
int par_n, par_d;
gboolean have_interlaced;
gboolean interlaced;
gboolean top_field_first;
int clean_width, clean_height;
int clean_offset_left, clean_offset_top;
int bytes_per_picture;
GstBuffer *codec_data;
};
/**
* GstVideoFrameState:
* @decode_timestamp: Decoding timestamp (aka DTS)
* @presentation_timestamp: Presentation timestamp (aka PTS)
* @presentation_duration: Duration of frame
* @system_frame_number: unique ID attributed when #GstVideoFrameState is
* created
* @decode_frame_number: Decoded frame number, increases in decoding order
* @presentation_frame_number: Presentation frame number, increases in
* presentation order.
* @distance_from_sync: Distance of the frame from a sync point, in number
* of frames.
* @is_sync_point: #TRUE if the frame is a synchronization point (like a
* keyframe)
* @is_eos: #TRUE if the frame is the last one of a segment.
* @decode_only: If #TRUE, the frame is only meant to be decoded but not
* pushed downstream
* @sink_buffer: input buffer
* @src_buffer: output buffer
* @field_index: Number of fields since beginning of stream
* @n_fields: Number of fields present in frame (default 2)
* @coder_hook: Private data called with @coder_hook_destroy_notify
* @coder_hook_destroy_notify: Called when frame is destroyed
* @deadline: Target clock time for display (running time)
* @force_keyframe: For encoders, if #TRUE a keyframe must be generated
* @force_keyframe_headers: For encoders, if #TRUE new headers must be generated
* @events: List of #GstEvent that must be pushed before the next @src_buffer
*
* State of a video frame going through the codec
**/
struct _GstVideoFrameState
{
/*< private >*/
gint ref_count;
/*< public >*/
GstClockTime decode_timestamp;
GstClockTime presentation_timestamp;
GstClockTime presentation_duration;
gint system_frame_number;
gint decode_frame_number;
gint presentation_frame_number;
int distance_from_sync;
gboolean is_sync_point;
gboolean is_eos;
/* Frames that should not be pushed downstream and are
* not meant for display */
gboolean decode_only;
GstBuffer *sink_buffer;
GstBuffer *src_buffer;
int field_index;
int n_fields;
void *coder_hook;
GDestroyNotify coder_hook_destroy_notify;
GstClockTime deadline;
gboolean force_keyframe;
gboolean force_keyframe_headers;
/* Events that should be pushed downstream *before*
* the next src_buffer */
GList *events;
};
/**
* GstBaseVideoCodec:
*
* The opaque #GstBaseVideoCodec data structure.
*/
struct _GstBaseVideoCodec
{
/*< private >*/
GstElement element;
/*< protected >*/
GstPad *sinkpad;
GstPad *srcpad;
/* protects all data processing, i.e. is locked
* in the chain function, finish_frame and when
* processing serialized events */
GRecMutex stream_lock;
guint64 system_frame_number;
GList *frames; /* Protected with OBJECT_LOCK */
GstVideoState state; /* Compressed video pad */
GstVideoInfo info; /* Raw video pad */
GstSegment segment;
/* QoS properties */
gdouble proportion;
GstClockTime earliest_time;
gboolean discont;
gint64 bytes;
gint64 time;
/* FIXME before moving to base */
void *padding[GST_PADDING_LARGE];
};
/**
* GstBaseVideoCodecClass:
*
* The opaque #GstBaseVideoCodecClass data structure.
*/
struct _GstBaseVideoCodecClass
{
/*< private >*/
GstElementClass element_class;
/* FIXME before moving to base */
void *padding[GST_PADDING_LARGE];
};
GType gst_video_frame_state_get_type (void);
GType gst_base_video_codec_get_type (void);
void gst_base_video_codec_append_frame (GstBaseVideoCodec *codec, GstVideoFrameState *frame);
void gst_base_video_codec_remove_frame (GstBaseVideoCodec *codec, GstVideoFrameState *frame);
GstVideoFrameState * gst_base_video_codec_new_frame (GstBaseVideoCodec *base_video_codec);
GstVideoFrameState * gst_video_frame_state_ref (GstVideoFrameState * frame);
void gst_video_frame_state_unref (GstVideoFrameState * frame);
G_END_DECLS
#endif

File diff suppressed because it is too large Load diff

View file

@ -1,291 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
* Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
* Copyright (C) 2011 Nokia Corporation. All rights reserved.
* Contact: Stefan Kost <stefan.kost@nokia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_DECODER_H_
#define _GST_BASE_VIDEO_DECODER_H_
#ifndef GST_USE_UNSTABLE_API
#warning "GstBaseVideoDecoder is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include "gstbasevideocodec.h"
G_BEGIN_DECLS
#define GST_TYPE_BASE_VIDEO_DECODER \
(gst_base_video_decoder_get_type())
#define GST_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoder))
#define GST_BASE_VIDEO_DECODER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
#define GST_BASE_VIDEO_DECODER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_DECODER,GstBaseVideoDecoderClass))
#define GST_IS_BASE_VIDEO_DECODER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_DECODER))
#define GST_IS_BASE_VIDEO_DECODER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_DECODER))
/**
* GST_BASE_VIDEO_DECODER_SINK_NAME:
*
* The name of the templates for the sink pad.
*/
#define GST_BASE_VIDEO_DECODER_SINK_NAME "sink"
/**
* GST_BASE_VIDEO_DECODER_SRC_NAME:
*
* The name of the templates for the source pad.
*/
#define GST_BASE_VIDEO_DECODER_SRC_NAME "src"
/**
* GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA:
*
* Returned while parsing to indicate more data is needed.
**/
#define GST_BASE_VIDEO_DECODER_FLOW_NEED_DATA GST_FLOW_CUSTOM_SUCCESS
/**
* GST_BASE_VIDEO_DECODER_FLOW_DROPPED:
*
* Returned when the event/buffer should be dropped.
*/
#define GST_BASE_VIDEO_DECODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
typedef struct _GstBaseVideoDecoder GstBaseVideoDecoder;
typedef struct _GstBaseVideoDecoderClass GstBaseVideoDecoderClass;
/* do not use this one, use macro below */
GstFlowReturn _gst_base_video_decoder_error (GstBaseVideoDecoder *dec, gint weight,
GQuark domain, gint code,
gchar *txt, gchar *debug,
const gchar *file, const gchar *function,
gint line);
/**
* GST_BASE_VIDEO_DECODER_ERROR:
* @el: the base video decoder element that generates the error
* @weight: element defined weight of the error, added to error count
* @domain: like CORE, LIBRARY, RESOURCE or STREAM (see #gstreamer-GstGError)
* @code: error code defined for that domain (see #gstreamer-GstGError)
* @text: the message to display (format string and args enclosed in
* parentheses)
* @debug: debugging information for the message (format string and args
* enclosed in parentheses)
* @ret: variable to receive return value
*
* Utility function that video decoder elements can use in case they encountered
* a data processing error that may be fatal for the current "data unit" but
* need not prevent subsequent decoding. Such errors are counted and if there
* are too many, as configured in the context's max_errors, the pipeline will
* post an error message and the application will be requested to stop further
* media processing. Otherwise, it is considered a "glitch" and only a warning
* is logged. In either case, @ret is set to the proper value to
* return to upstream/caller (indicating either GST_FLOW_ERROR or GST_FLOW_OK).
*/
#define GST_BASE_VIDEO_DECODER_ERROR(el, w, domain, code, text, debug, ret) \
G_STMT_START { \
gchar *__txt = _gst_element_error_printf text; \
gchar *__dbg = _gst_element_error_printf debug; \
GstBaseVideoDecoder *dec = GST_BASE_VIDEO_DECODER (el); \
ret = _gst_base_video_decoder_error (dec, w, GST_ ## domain ## _ERROR, \
GST_ ## domain ## _ERROR_ ## code, __txt, __dbg, __FILE__, \
GST_FUNCTION, __LINE__); \
} G_STMT_END
/**
* GstBaseVideoDecoder:
*
* The opaque #GstBaseVideoDecoder data structure.
*/
struct _GstBaseVideoDecoder
{
/*< private >*/
GstBaseVideoCodec base_video_codec;
/*< protected >*/
gboolean sink_clipping;
gboolean do_byte_time;
gboolean packetized;
gint max_errors;
/* parse tracking */
/* input data */
GstAdapter *input_adapter;
/* assembles current frame */
GstAdapter *output_adapter;
/*< private >*/
/* FIXME move to real private part ?
* (and introduce a context ?) */
/* ... being tracked here;
* only available during parsing */
/* FIXME remove and add parameter to method */
GstVideoFrameState *current_frame;
/* events that should apply to the current frame */
GList *current_frame_events;
/* relative offset of input data */
guint64 input_offset;
/* relative offset of frame */
guint64 frame_offset;
/* tracking ts and offsets */
GList *timestamps;
/* whether parsing is in sync */
gboolean have_sync;
/* maybe sort-of protected ? */
/* combine to yield (presentation) ts */
GstClockTime timestamp_offset;
int field_index;
/* last outgoing ts */
GstClockTime last_timestamp;
gint error_count;
/* reverse playback */
/* collect input */
GList *gather;
/* to-be-parsed */
GList *parse;
/* collected parsed frames */
GList *parse_gather;
/* frames to be handled == decoded */
GList *decode;
/* collected output */
GList *queued;
gboolean process;
/* no comment ... */
guint64 base_picture_number;
int reorder_depth;
int distance_from_sync;
/* Raw video bufferpool */
GstBufferPool *pool;
/* Indicates whether downstream can handle
* GST_META_API_VIDEO_CROP */
gboolean use_cropping;
/* qos messages: frames dropped/processed */
guint dropped;
guint processed;
/* FIXME before moving to base */
void *padding[GST_PADDING_LARGE];
};
/**
* GstBaseVideoDecoderClass:
* @start: Optional.
* Called when the element starts processing.
* Allows opening external resources.
* @stop: Optional.
* Called when the element stops processing.
* Allows closing external resources.
* @set_format: Notifies subclass of incoming data format (caps).
* @scan_for_sync: Optional.
* Allows subclass to obtain sync for subsequent parsing
* by custom means (above an beyond scanning for specific
* marker and mask).
* @parse_data: Required for non-packetized input.
* Allows chopping incoming data into manageable units (frames)
* for subsequent decoding.
* @reset: Optional.
* Allows subclass (codec) to perform post-seek semantics reset.
* @handle_frame: Provides input data frame to subclass.
* @finish: Optional.
* Called to request subclass to dispatch any pending remaining
* data (e.g. at EOS).
*
* Subclasses can override any of the available virtual methods or not, as
* needed. At minimum @handle_frame needs to be overridden, and @set_format
* and likely as well. If non-packetized input is supported or expected,
* @parse needs to be overridden as well.
*/
struct _GstBaseVideoDecoderClass
{
/*< private >*/
GstBaseVideoCodecClass base_video_codec_class;
/*< public >*/
gboolean (*start) (GstBaseVideoDecoder *coder);
gboolean (*stop) (GstBaseVideoDecoder *coder);
int (*scan_for_sync) (GstBaseVideoDecoder *decoder, gboolean at_eos,
int offset, int n);
GstFlowReturn (*parse_data) (GstBaseVideoDecoder *decoder, gboolean at_eos);
gboolean (*set_format) (GstBaseVideoDecoder *coder, GstVideoState * state);
gboolean (*reset) (GstBaseVideoDecoder *coder);
GstFlowReturn (*finish) (GstBaseVideoDecoder *coder);
GstFlowReturn (*handle_frame) (GstBaseVideoDecoder *coder, GstVideoFrameState *frame);
/*< private >*/
guint32 capture_mask;
guint32 capture_pattern;
/* FIXME before moving to base */
void *padding[GST_PADDING_LARGE];
};
void gst_base_video_decoder_class_set_capture_pattern (GstBaseVideoDecoderClass *base_video_decoder_class,
guint32 mask, guint32 pattern);
GstVideoFrameState *gst_base_video_decoder_get_frame (GstBaseVideoDecoder *coder,
int frame_number);
GstVideoFrameState *gst_base_video_decoder_get_oldest_frame (GstBaseVideoDecoder *coder);
void gst_base_video_decoder_add_to_frame (GstBaseVideoDecoder *base_video_decoder,
int n_bytes);
void gst_base_video_decoder_lost_sync (GstBaseVideoDecoder *base_video_decoder);
GstFlowReturn gst_base_video_decoder_have_frame (GstBaseVideoDecoder *base_video_decoder);
void gst_base_video_decoder_set_sync_point (GstBaseVideoDecoder *base_video_decoder);
gboolean gst_base_video_decoder_set_src_caps (GstBaseVideoDecoder *base_video_decoder);
GstBuffer *gst_base_video_decoder_alloc_src_buffer (GstBaseVideoDecoder * base_video_decoder);
GstFlowReturn gst_base_video_decoder_alloc_src_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrameState *frame);
GstVideoState *gst_base_video_decoder_get_state (GstBaseVideoDecoder *base_video_decoder);
GstClockTimeDiff gst_base_video_decoder_get_max_decode_time (
GstBaseVideoDecoder *base_video_decoder,
GstVideoFrameState *frame);
GstFlowReturn gst_base_video_decoder_finish_frame (GstBaseVideoDecoder *base_video_decoder,
GstVideoFrameState *frame);
GstFlowReturn gst_base_video_decoder_drop_frame (GstBaseVideoDecoder *dec,
GstVideoFrameState *frame);
GType gst_base_video_decoder_get_type (void);
G_END_DECLS
#endif

File diff suppressed because it is too large Load diff

View file

@ -1,185 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
* Copyright (C) 2011 Mark Nauwelaerts <mark.nauwelaerts@collabora.co.uk>.
* Copyright (C) 2011 Nokia Corporation. All rights reserved.
* Contact: Stefan Kost <stefan.kost@nokia.com>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_ENCODER_H_
#define _GST_BASE_VIDEO_ENCODER_H_
#ifndef GST_USE_UNSTABLE_API
#warning "GstBaseVideoEncoder is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include "gstbasevideocodec.h"
G_BEGIN_DECLS
#define GST_TYPE_BASE_VIDEO_ENCODER \
(gst_base_video_encoder_get_type())
#define GST_BASE_VIDEO_ENCODER(obj) \
(G_TYPE_CHECK_INSTANCE_CAST((obj),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoder))
#define GST_BASE_VIDEO_ENCODER_CLASS(klass) \
(G_TYPE_CHECK_CLASS_CAST((klass),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoderClass))
#define GST_BASE_VIDEO_ENCODER_GET_CLASS(obj) \
(G_TYPE_INSTANCE_GET_CLASS((obj),GST_TYPE_BASE_VIDEO_ENCODER,GstBaseVideoEncoderClass))
#define GST_IS_BASE_VIDEO_ENCODER(obj) \
(G_TYPE_CHECK_INSTANCE_TYPE((obj),GST_TYPE_BASE_VIDEO_ENCODER))
#define GST_IS_BASE_VIDEO_ENCODER_CLASS(obj) \
(G_TYPE_CHECK_CLASS_TYPE((klass),GST_TYPE_BASE_VIDEO_ENCODER))
/**
* GST_BASE_VIDEO_ENCODER_SINK_NAME:
*
* The name of the templates for the sink pad.
*/
#define GST_BASE_VIDEO_ENCODER_SINK_NAME "sink"
/**
* GST_BASE_VIDEO_ENCODER_SRC_NAME:
*
* The name of the templates for the source pad.
*/
#define GST_BASE_VIDEO_ENCODER_SRC_NAME "src"
/**
* GST_BASE_VIDEO_ENCODER_FLOW_DROPPED:
*
* Returned when the event/buffer should be dropped.
*/
#define GST_BASE_VIDEO_ENCODER_FLOW_DROPPED GST_FLOW_CUSTOM_SUCCESS_1
typedef struct _GstBaseVideoEncoder GstBaseVideoEncoder;
typedef struct _GstBaseVideoEncoderClass GstBaseVideoEncoderClass;
/**
* GstBaseVideoEncoder:
*
* The opaque #GstBaseVideoEncoder data structure.
*/
struct _GstBaseVideoEncoder
{
/*< private >*/
GstBaseVideoCodec base_video_codec;
/*< protected >*/
gboolean sink_clipping;
guint64 presentation_frame_number;
int distance_from_sync;
/*< private >*/
/* FIXME move to real private part ?
* (and introduce a context ?) */
gboolean drained;
gboolean at_eos;
gint64 min_latency;
gint64 max_latency;
GList *current_frame_events;
GstBuffer *headers;
GList *force_key_unit; /* List of pending forced keyunits */
void *padding[GST_PADDING_LARGE];
};
/**
* GstBaseVideoEncoderClass:
* @start: Optional.
* Called when the element starts processing.
* Allows opening external resources.
* @stop: Optional.
* Called when the element stops processing.
* Allows closing external resources.
* @set_format: Optional.
* Notifies subclass of incoming data format.
* GstVideoInfo fields have already been
* set according to provided caps.
* @handle_frame: Provides input frame to subclass.
* @reset: Optional.
* Allows subclass (codec) to perform post-seek semantics reset.
* @finish: Optional.
* Called to request subclass to dispatch any pending remaining
* data (e.g. at EOS).
* @shape_output: Optional.
* Allows subclass to push frame downstream in whatever
* shape or form it deems appropriate. If not provided,
* provided encoded frame data is simply pushed downstream.
* @event: Optional.
* Event handler on the sink pad. This function should return
* TRUE if the event was handled and should be discarded
* (i.e. not unref'ed).
*
* Subclasses can override any of the available virtual methods or not, as
* needed. At minimum @handle_frame needs to be overridden, and @set_format
* and @get_caps are likely needed as well.
*/
struct _GstBaseVideoEncoderClass
{
/*< private >*/
GstBaseVideoCodecClass base_video_codec_class;
/*< public >*/
/* virtual methods for subclasses */
gboolean (*start) (GstBaseVideoEncoder *coder);
gboolean (*stop) (GstBaseVideoEncoder *coder);
gboolean (*set_format) (GstBaseVideoEncoder *coder,
GstVideoInfo *info);
GstFlowReturn (*handle_frame) (GstBaseVideoEncoder *coder,
GstVideoFrameState *frame);
gboolean (*reset) (GstBaseVideoEncoder *coder);
GstFlowReturn (*finish) (GstBaseVideoEncoder *coder);
GstFlowReturn (*shape_output) (GstBaseVideoEncoder *coder,
GstVideoFrameState *frame);
gboolean (*event) (GstBaseVideoEncoder *coder,
GstEvent *event);
/*< private >*/
/* FIXME before moving to base */
gpointer _gst_reserved[GST_PADDING_LARGE];
};
GType gst_base_video_encoder_get_type (void);
const GstVideoState* gst_base_video_encoder_get_state (GstBaseVideoEncoder *base_video_encoder);
GstVideoFrameState* gst_base_video_encoder_get_oldest_frame (GstBaseVideoEncoder *coder);
GstFlowReturn gst_base_video_encoder_finish_frame (GstBaseVideoEncoder *base_video_encoder,
GstVideoFrameState *frame);
void gst_base_video_encoder_set_latency (GstBaseVideoEncoder *base_video_encoder,
GstClockTime min_latency, GstClockTime max_latency);
void gst_base_video_encoder_set_latency_fields (GstBaseVideoEncoder *base_video_encoder,
int n_fields);
void gst_base_video_encoder_set_headers (GstBaseVideoEncoder *base_video_encoder,
GstBuffer *headers);
G_END_DECLS
#endif

View file

@ -1,159 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "gstbasevideoutils.h"
#include <string.h>
GST_DEBUG_CATEGORY_EXTERN (basevideocodec_debug);
#define GST_CAT_DEFAULT basevideocodec_debug
gboolean
gst_base_video_rawvideo_convert (GstVideoState * state,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 * dest_value)
{
gboolean res = FALSE;
g_return_val_if_fail (dest_format != NULL, FALSE);
g_return_val_if_fail (dest_value != NULL, FALSE);
if (src_format == *dest_format || src_value == 0 || src_value == -1) {
*dest_value = src_value;
return TRUE;
}
if (src_format == GST_FORMAT_BYTES &&
*dest_format == GST_FORMAT_DEFAULT && state->bytes_per_picture != 0) {
/* convert bytes to frames */
*dest_value = gst_util_uint64_scale_int (src_value, 1,
state->bytes_per_picture);
res = TRUE;
} else if (src_format == GST_FORMAT_DEFAULT &&
*dest_format == GST_FORMAT_BYTES && state->bytes_per_picture != 0) {
/* convert bytes to frames */
*dest_value = src_value * state->bytes_per_picture;
res = TRUE;
} else if (src_format == GST_FORMAT_DEFAULT &&
*dest_format == GST_FORMAT_TIME && state->fps_n != 0) {
/* convert frames to time */
/* FIXME add segment time? */
*dest_value = gst_util_uint64_scale (src_value,
GST_SECOND * state->fps_d, state->fps_n);
res = TRUE;
} else if (src_format == GST_FORMAT_TIME &&
*dest_format == GST_FORMAT_DEFAULT && state->fps_d != 0) {
/* convert time to frames */
/* FIXME subtract segment time? */
*dest_value = gst_util_uint64_scale (src_value, state->fps_n,
GST_SECOND * state->fps_d);
res = TRUE;
} else if (src_format == GST_FORMAT_TIME &&
*dest_format == GST_FORMAT_BYTES && state->fps_d != 0 &&
state->bytes_per_picture != 0) {
/* convert time to frames */
/* FIXME subtract segment time? */
*dest_value = gst_util_uint64_scale (src_value,
state->fps_n * state->bytes_per_picture, GST_SECOND * state->fps_d);
res = TRUE;
} else if (src_format == GST_FORMAT_BYTES &&
*dest_format == GST_FORMAT_TIME && state->fps_n != 0 &&
state->bytes_per_picture != 0) {
/* convert frames to time */
/* FIXME add segment time? */
*dest_value = gst_util_uint64_scale (src_value,
GST_SECOND * state->fps_d, state->fps_n * state->bytes_per_picture);
res = TRUE;
}
return res;
}
gboolean
gst_base_video_encoded_video_convert (GstVideoState * state,
gint64 bytes, gint64 time, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value)
{
gboolean res = FALSE;
g_return_val_if_fail (dest_format != NULL, FALSE);
g_return_val_if_fail (dest_value != NULL, FALSE);
if (G_UNLIKELY (src_format == *dest_format || src_value == 0 ||
src_value == -1)) {
if (dest_value)
*dest_value = src_value;
return TRUE;
}
if (bytes <= 0 || time <= 0) {
GST_DEBUG ("not enough metadata yet to convert");
goto exit;
}
switch (src_format) {
case GST_FORMAT_BYTES:
switch (*dest_format) {
case GST_FORMAT_TIME:
*dest_value = gst_util_uint64_scale (src_value, time, bytes);
res = TRUE;
break;
default:
res = FALSE;
}
break;
case GST_FORMAT_TIME:
switch (*dest_format) {
case GST_FORMAT_BYTES:
*dest_value = gst_util_uint64_scale (src_value, bytes, time);
res = TRUE;
break;
default:
res = FALSE;
}
break;
default:
GST_DEBUG ("unhandled conversion from %d to %d", src_format,
*dest_format);
res = FALSE;
}
exit:
return res;
}
GstClockTime
gst_video_state_get_timestamp (const GstVideoState * state,
GstSegment * segment, int frame_number)
{
if (frame_number < 0) {
return segment->start -
(gint64) gst_util_uint64_scale (-frame_number,
state->fps_d * GST_SECOND, state->fps_n);
} else {
return segment->start +
gst_util_uint64_scale (frame_number,
state->fps_d * GST_SECOND, state->fps_n);
}
}

View file

@ -1,46 +0,0 @@
/* GStreamer
* Copyright (C) 2008 David Schleef <ds@schleef.org>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Library General Public
* License as published by the Free Software Foundation; either
* version 2 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Library General Public License for more details.
*
* You should have received a copy of the GNU Library General Public
* License along with this library; if not, write to the
* Free Software Foundation, Inc., 59 Temple Place - Suite 330,
* Boston, MA 02111-1307, USA.
*/
#ifndef _GST_BASE_VIDEO_UTILS_H_
#define _GST_BASE_VIDEO_UTILS_H_
#ifndef GST_USE_UNSTABLE_API
#warning "GstBaseVideoCodec is unstable API and may change in future."
#warning "You can define GST_USE_UNSTABLE_API to avoid this warning."
#endif
#include <gst/gst.h>
#include <gst/video/video.h>
#include "gstbasevideocodec.h"
G_BEGIN_DECLS
gboolean gst_base_video_rawvideo_convert (GstVideoState *state,
GstFormat src_format, gint64 src_value,
GstFormat * dest_format, gint64 *dest_value);
gboolean gst_base_video_encoded_video_convert (GstVideoState * state,
gint64 bytes, gint64 time, GstFormat src_format,
gint64 src_value, GstFormat * dest_format, gint64 * dest_value);
GstClockTime gst_video_state_get_timestamp (const GstVideoState *state,
GstSegment *segment, int frame_number);
G_END_DECLS
#endif

View file

@ -31,9 +31,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h263_dec_debug_category);
/* prototypes */
static gboolean gst_omx_h263_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
static gboolean gst_omx_h263_dec_set_format (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
enum
{
@ -78,14 +78,14 @@ gst_omx_h263_dec_init (GstOMXH263Dec * self)
static gboolean
gst_omx_h263_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state)
GstOMXPort * port, GstVideoCodecState * state)
{
return FALSE;
}
static gboolean
gst_omx_h263_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
GstVideoState * state)
GstVideoCodecState * state)
{
gboolean ret;
OMX_PARAM_PORTDEFINITIONTYPE port_def;

View file

@ -31,9 +31,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h263_enc_debug_category);
/* prototypes */
static gboolean gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc,
GstOMXPort * port, GstVideoInfo * state);
GstOMXPort * port, GstVideoCodecState * state);
static GstCaps *gst_omx_h263_enc_get_caps (GstOMXVideoEnc * enc,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
enum
{
@ -77,7 +77,7 @@ gst_omx_h263_enc_init (GstOMXH263Enc * self)
static gboolean
gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port,
GstVideoInfo * info)
GstVideoCodecState * state)
{
GstOMXH263Enc *self = GST_OMX_H263_ENC (enc);
GstCaps *peercaps;
@ -87,8 +87,8 @@ gst_omx_h263_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port,
OMX_ERRORTYPE err;
guint profile_id, level_id;
peercaps = gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc),
gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc)));
peercaps = gst_pad_peer_query_caps (GST_VIDEO_ENCODER_SRC_PAD (enc),
gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (enc)));
if (peercaps) {
GstStructure *s;
@ -195,7 +195,7 @@ unsupported_level:
static GstCaps *
gst_omx_h263_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port,
GstVideoState * state)
GstVideoCodecState * state)
{
GstOMXH263Enc *self = GST_OMX_H263_ENC (enc);
GstCaps *caps;
@ -203,16 +203,7 @@ gst_omx_h263_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port,
OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
guint profile, level;
caps =
gst_caps_new_simple ("video/x-h263", "width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height, NULL);
if (state->fps_n != 0)
gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d, NULL);
if (state->par_n != 1 || state->par_d != 1)
gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
state->par_n, state->par_d, NULL);
caps = gst_caps_new_empty_simple ("video/x-h263");
GST_OMX_INIT_STRUCT (&param);
param.nPortIndex = GST_OMX_VIDEO_ENC (self)->out_port->index;

View file

@ -31,9 +31,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h264_dec_debug_category);
/* prototypes */
static gboolean gst_omx_h264_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
static gboolean gst_omx_h264_dec_set_format (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
enum
{
@ -79,14 +79,14 @@ gst_omx_h264_dec_init (GstOMXH264Dec * self)
static gboolean
gst_omx_h264_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state)
GstOMXPort * port, GstVideoCodecState * state)
{
return FALSE;
}
static gboolean
gst_omx_h264_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
GstVideoState * state)
GstVideoCodecState * state)
{
gboolean ret;
OMX_PARAM_PORTDEFINITIONTYPE port_def;

View file

@ -31,11 +31,11 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_h264_enc_debug_category);
/* prototypes */
static gboolean gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc,
GstOMXPort * port, GstVideoInfo * info);
GstOMXPort * port, GstVideoCodecState * state);
static GstCaps *gst_omx_h264_enc_get_caps (GstOMXVideoEnc * enc,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
static GstFlowReturn gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc *
self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoFrameState * frame);
self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoCodecFrame * frame);
enum
{
@ -81,7 +81,7 @@ gst_omx_h264_enc_init (GstOMXH264Enc * self)
static gboolean
gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port,
GstVideoInfo * info)
GstVideoCodecState * state)
{
GstOMXH264Enc *self = GST_OMX_H264_ENC (enc);
GstCaps *peercaps;
@ -91,8 +91,8 @@ gst_omx_h264_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port,
OMX_ERRORTYPE err;
const gchar *profile_string, *level_string;
peercaps = gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc),
gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc)));
peercaps = gst_pad_peer_query_caps (GST_VIDEO_ENCODER_SRC_PAD (enc),
gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (enc)));
if (peercaps) {
GstStructure *s;
@ -197,7 +197,7 @@ unsupported_level:
static GstCaps *
gst_omx_h264_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port,
GstVideoState * state)
GstVideoCodecState * state)
{
GstOMXH264Enc *self = GST_OMX_H264_ENC (enc);
GstCaps *caps;
@ -205,16 +205,7 @@ gst_omx_h264_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port,
OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
const gchar *profile, *level;
caps =
gst_caps_new_simple ("video/x-h264", "width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height, NULL);
if (state->fps_n != 0)
gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d, NULL);
if (state->par_n != 1 || state->par_d != 1)
gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
state->par_n, state->par_d, NULL);
caps = gst_caps_new_empty_simple ("video/x-h264");
GST_OMX_INIT_STRUCT (&param);
param.nPortIndex = GST_OMX_VIDEO_ENC (self)->out_port->index;
@ -315,7 +306,7 @@ gst_omx_h264_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port,
static GstFlowReturn
gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port,
GstOMXBuffer * buf, GstVideoFrameState * frame)
GstOMXBuffer * buf, GstVideoCodecFrame * frame)
{
if (buf->omx_buf->nFlags & OMX_BUFFERFLAG_CODECCONFIG) {
/* The codec data is SPS/PPS with a startcode => bytestream stream format
@ -325,6 +316,7 @@ gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port,
if (buf->omx_buf->nFilledLen >= 4 &&
GST_READ_UINT32_BE (buf->omx_buf->pBuffer +
buf->omx_buf->nOffset) == 0x00000001) {
GList *l = NULL;
GstBuffer *hdrs;
GstMapInfo map = GST_MAP_INFO_INIT;
@ -338,8 +330,8 @@ gst_omx_h264_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port,
buf->omx_buf->pBuffer + buf->omx_buf->nOffset,
buf->omx_buf->nFilledLen);
gst_buffer_unmap (hdrs, &map);
gst_base_video_encoder_set_headers (GST_BASE_VIDEO_ENCODER (self), hdrs);
gst_buffer_unref (hdrs);
l = g_list_append (l, hdrs);
gst_video_encoder_set_headers (GST_VIDEO_ENCODER (self), l);
}
}

View file

@ -31,9 +31,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_mpeg4_video_dec_debug_category);
/* prototypes */
static gboolean gst_omx_mpeg4_video_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
static gboolean gst_omx_mpeg4_video_dec_set_format (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
enum
{
@ -81,14 +81,14 @@ gst_omx_mpeg4_video_dec_init (GstOMXMPEG4VideoDec * self)
static gboolean
gst_omx_mpeg4_video_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state)
GstOMXPort * port, GstVideoCodecState * state)
{
return FALSE;
}
static gboolean
gst_omx_mpeg4_video_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
GstVideoState * state)
GstVideoCodecState * state)
{
gboolean ret;
OMX_PARAM_PORTDEFINITIONTYPE port_def;

View file

@ -31,9 +31,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_mpeg4_video_enc_debug_category);
/* prototypes */
static gboolean gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc,
GstOMXPort * port, GstVideoInfo * info);
GstOMXPort * port, GstVideoCodecState * state);
static GstCaps *gst_omx_mpeg4_video_enc_get_caps (GstOMXVideoEnc * enc,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
enum
{
@ -81,7 +81,7 @@ gst_omx_mpeg4_video_enc_init (GstOMXMPEG4VideoEnc * self)
static gboolean
gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port,
GstVideoInfo * info)
GstVideoCodecState * state)
{
GstOMXMPEG4VideoEnc *self = GST_OMX_MPEG4_VIDEO_ENC (enc);
GstCaps *peercaps, *intersection;
@ -91,13 +91,13 @@ gst_omx_mpeg4_video_enc_set_format (GstOMXVideoEnc * enc, GstOMXPort * port,
OMX_ERRORTYPE err;
const gchar *profile_string, *level_string;
peercaps = gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc), NULL);
peercaps = gst_pad_peer_query_caps (GST_VIDEO_ENCODER_SRC_PAD (enc), NULL);
if (peercaps) {
GstStructure *s;
intersection =
gst_caps_intersect (peercaps,
gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (enc)));
gst_pad_get_pad_template_caps (GST_VIDEO_ENCODER_SRC_PAD (enc)));
gst_caps_unref (peercaps);
if (gst_caps_is_empty (intersection)) {
@ -204,7 +204,7 @@ unsupported_level:
static GstCaps *
gst_omx_mpeg4_video_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port,
GstVideoState * state)
GstVideoCodecState * state)
{
GstOMXMPEG4VideoEnc *self = GST_OMX_MPEG4_VIDEO_ENC (enc);
GstCaps *caps;
@ -214,15 +214,7 @@ gst_omx_mpeg4_video_enc_get_caps (GstOMXVideoEnc * enc, GstOMXPort * port,
caps =
gst_caps_new_simple ("video/mpeg", "mpegversion", G_TYPE_INT, 4,
"systemstream", G_TYPE_BOOLEAN, FALSE, "width", G_TYPE_INT, state->width,
"height", G_TYPE_INT, state->height, NULL);
if (state->fps_n != 0)
gst_caps_set_simple (caps, "framerate", GST_TYPE_FRACTION, state->fps_n,
state->fps_d, NULL);
if (state->par_n != 1 || state->par_d != 1)
gst_caps_set_simple (caps, "pixel-aspect-ratio", GST_TYPE_FRACTION,
state->par_n, state->par_d, NULL);
"systemstream", G_TYPE_BOOLEAN, FALSE, NULL);
GST_OMX_INIT_STRUCT (&param);
param.nPortIndex = GST_OMX_VIDEO_ENC (self)->out_port->index;

View file

@ -49,18 +49,18 @@ static GstStateChangeReturn
gst_omx_video_dec_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_omx_video_dec_start (GstBaseVideoDecoder * decoder);
static gboolean gst_omx_video_dec_stop (GstBaseVideoDecoder * decoder);
static gboolean gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder,
GstVideoState * state);
static gboolean gst_omx_video_dec_reset (GstBaseVideoDecoder * decoder);
static GstFlowReturn gst_omx_video_dec_parse_data (GstBaseVideoDecoder *
decoder, gboolean at_eos);
static GstFlowReturn gst_omx_video_dec_handle_frame (GstBaseVideoDecoder *
decoder, GstVideoFrameState * frame);
static GstFlowReturn gst_omx_video_dec_finish (GstBaseVideoDecoder * decoder);
static gboolean gst_omx_video_dec_start (GstVideoDecoder * decoder);
static gboolean gst_omx_video_dec_stop (GstVideoDecoder * decoder);
static gboolean gst_omx_video_dec_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state);
static gboolean gst_omx_video_dec_reset (GstVideoDecoder * decoder,
gboolean hard);
static GstFlowReturn gst_omx_video_dec_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame);
static GstFlowReturn gst_omx_video_dec_finish (GstVideoDecoder * decoder);
static GstFlowReturn gst_omx_video_dec_drain (GstOMXVideoDec * self);
static GstFlowReturn gst_omx_video_dec_drain (GstOMXVideoDec * self,
gboolean is_eos);
enum
{
@ -75,32 +75,28 @@ enum
G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstOMXVideoDec, gst_omx_video_dec,
GST_TYPE_BASE_VIDEO_DECODER, DEBUG_INIT);
GST_TYPE_VIDEO_DECODER, DEBUG_INIT);
static void
gst_omx_video_dec_class_init (GstOMXVideoDecClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseVideoDecoderClass *base_video_decoder_class =
GST_BASE_VIDEO_DECODER_CLASS (klass);
GstVideoDecoderClass *video_decoder_class = GST_VIDEO_DECODER_CLASS (klass);
gobject_class->finalize = gst_omx_video_dec_finalize;
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_omx_video_dec_change_state);
base_video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_omx_video_dec_start);
base_video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_omx_video_dec_stop);
base_video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_omx_video_dec_reset);
base_video_decoder_class->set_format =
video_decoder_class->start = GST_DEBUG_FUNCPTR (gst_omx_video_dec_start);
video_decoder_class->stop = GST_DEBUG_FUNCPTR (gst_omx_video_dec_stop);
video_decoder_class->reset = GST_DEBUG_FUNCPTR (gst_omx_video_dec_reset);
video_decoder_class->set_format =
GST_DEBUG_FUNCPTR (gst_omx_video_dec_set_format);
base_video_decoder_class->parse_data =
GST_DEBUG_FUNCPTR (gst_omx_video_dec_parse_data);
base_video_decoder_class->handle_frame =
video_decoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_omx_video_dec_handle_frame);
base_video_decoder_class->finish =
GST_DEBUG_FUNCPTR (gst_omx_video_dec_finish);
video_decoder_class->finish = GST_DEBUG_FUNCPTR (gst_omx_video_dec_finish);
klass->cdata.default_src_template_caps = "video/x-raw, "
"width = " GST_VIDEO_SIZE_RANGE ", "
@ -110,7 +106,7 @@ gst_omx_video_dec_class_init (GstOMXVideoDecClass * klass)
static void
gst_omx_video_dec_init (GstOMXVideoDec * self)
{
GST_BASE_VIDEO_DECODER (self)->packetized = TRUE;
gst_video_decoder_set_packetized (GST_VIDEO_DECODER (self), TRUE);
self->drain_lock = g_mutex_new ();
self->drain_cond = g_cond_new ();
@ -277,19 +273,22 @@ gst_omx_video_dec_change_state (GstElement * element, GstStateChange transition)
#define MAX_FRAME_DIST_TICKS (5 * OMX_TICKS_PER_SECOND)
#define MAX_FRAME_DIST_FRAMES (100)
static GstVideoFrameState *
static GstVideoCodecFrame *
_find_nearest_frame (GstOMXVideoDec * self, GstOMXBuffer * buf)
{
GList *l, *best_l = NULL;
GList *finish_frames = NULL;
GstVideoFrameState *best = NULL;
GstVideoCodecFrame *best = NULL;
guint64 best_timestamp = 0;
guint64 best_diff = G_MAXUINT64;
BufferIdentification *best_id = NULL;
GList *frames;
for (l = GST_BASE_VIDEO_CODEC (self)->frames; l; l = l->next) {
GstVideoFrameState *tmp = l->data;
BufferIdentification *id = tmp->coder_hook;
frames = gst_video_decoder_get_frames (GST_VIDEO_DECODER (self));
for (l = frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
guint64 timestamp, diff;
/* This happens for frames that were just added but
@ -320,9 +319,9 @@ _find_nearest_frame (GstOMXVideoDec * self, GstOMXBuffer * buf)
}
if (best_id) {
for (l = GST_BASE_VIDEO_CODEC (self)->frames; l && l != best_l; l = l->next) {
GstVideoFrameState *tmp = l->data;
BufferIdentification *id = tmp->coder_hook;
for (l = frames; l && l != best_l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
guint64 diff_ticks, diff_frames;
if (id->timestamp > best_timestamp)
@ -344,11 +343,16 @@ _find_nearest_frame (GstOMXVideoDec * self, GstOMXBuffer * buf)
if (finish_frames) {
g_warning ("Too old frames, bug in decoder -- please file a bug");
for (l = finish_frames; l; l = l->next) {
gst_base_video_decoder_finish_frame (GST_BASE_VIDEO_DECODER (self),
l->data);
gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), l->data);
}
}
if (best)
gst_video_codec_frame_ref (best);
g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
g_list_free (frames);
return best;
}
@ -356,14 +360,15 @@ static gboolean
gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf,
GstBuffer * outbuf)
{
GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state;
GstVideoCodecState *state =
gst_video_decoder_get_output_state (GST_VIDEO_DECODER (self));
GstVideoInfo *vinfo = &state->info;
OMX_PARAM_PORTDEFINITIONTYPE *port_def = &self->out_port->port_def;
gboolean ret = FALSE;
GstVideoInfo vinfo;
GstVideoFrame frame;
if (state->width != port_def->format.video.nFrameWidth ||
state->height != port_def->format.video.nFrameHeight) {
if (vinfo->width != port_def->format.video.nFrameWidth ||
vinfo->height != port_def->format.video.nFrameHeight) {
GST_ERROR_OBJECT (self, "Width or height do not match");
goto done;
}
@ -383,25 +388,24 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf,
/* Different strides */
gst_video_info_from_caps (&vinfo, state->caps);
switch (state->format) {
switch (vinfo->finfo->format) {
case GST_VIDEO_FORMAT_I420:{
gint i, j, height;
guint8 *src, *dest;
gint src_stride, dest_stride;
gst_video_frame_map (&frame, vinfo, outbuf, GST_MAP_WRITE);
for (i = 0; i < 3; i++) {
if (i == 0) {
src_stride = port_def->format.video.nStride;
dest_stride = vinfo.stride[0];
dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, i);
/* XXX: Try this if no stride was set */
if (src_stride == 0)
src_stride = dest_stride;
} else {
src_stride = port_def->format.video.nStride / 2;
dest_stride = vinfo.stride[1];
dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, i);
/* XXX: Try this if no stride was set */
if (src_stride == 0)
@ -418,7 +422,6 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf,
(port_def->format.video.nSliceHeight / 2) *
(port_def->format.video.nStride / 2);
gst_video_frame_map (&frame, &vinfo, outbuf, GST_MAP_WRITE);
dest = GST_VIDEO_FRAME_COMP_DATA (&frame, i);
height = GST_VIDEO_FRAME_HEIGHT (&frame);
@ -427,8 +430,8 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf,
src += src_stride;
dest += dest_stride;
}
gst_video_frame_unmap (&frame);
}
gst_video_frame_unmap (&frame);
ret = TRUE;
break;
}
@ -437,17 +440,18 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf,
guint8 *src, *dest;
gint src_stride, dest_stride;
gst_video_frame_map (&frame, vinfo, outbuf, GST_MAP_WRITE);
for (i = 0; i < 2; i++) {
if (i == 0) {
src_stride = port_def->format.video.nStride;
dest_stride = vinfo.stride[0];
dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, i);
/* XXX: Try this if no stride was set */
if (src_stride == 0)
src_stride = dest_stride;
} else {
src_stride = port_def->format.video.nStride;
dest_stride = vinfo.stride[1];
dest_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, i);
/* XXX: Try this if no stride was set */
if (src_stride == 0)
@ -460,7 +464,6 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf,
port_def->format.video.nSliceHeight *
port_def->format.video.nStride;
gst_video_frame_map (&frame, &vinfo, outbuf, GST_MAP_WRITE);
dest = GST_VIDEO_FRAME_COMP_DATA (&frame, i);
height = GST_VIDEO_FRAME_HEIGHT (&frame);
@ -469,8 +472,8 @@ gst_omx_video_dec_fill_buffer (GstOMXVideoDec * self, GstOMXBuffer * inbuf,
src += src_stride;
dest += dest_stride;
}
gst_video_frame_unmap (&frame);
}
gst_video_frame_unmap (&frame);
ret = TRUE;
break;
}
@ -492,6 +495,8 @@ done:
OMX_TICKS_PER_SECOND);
}
gst_video_codec_state_unref (state);
return ret;
}
@ -501,7 +506,7 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self)
GstOMXVideoDecClass *klass;
GstOMXPort *port = self->out_port;
GstOMXBuffer *buf = NULL;
GstVideoFrameState *frame;
GstVideoCodecFrame *frame;
GstFlowReturn flow_ret = GST_FLOW_OK;
GstOMXAcquireBufferReturn acq_return;
GstClockTimeDiff deadline;
@ -521,48 +526,52 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self)
return;
}
if (!gst_pad_has_current_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self))
if (!gst_pad_has_current_caps (GST_VIDEO_DECODER_SRC_PAD (self))
|| acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURED) {
GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state;
GstVideoCodecState *state;
OMX_PARAM_PORTDEFINITIONTYPE port_def;
GstVideoFormat format;
GST_DEBUG_OBJECT (self, "Port settings have changed, updating caps");
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_LOCK (self);
gst_omx_port_get_port_definition (port, &port_def);
g_assert (port_def.format.video.eCompressionFormat ==
OMX_VIDEO_CodingUnused);
switch (port_def.format.video.eColorFormat) {
case OMX_COLOR_FormatYUV420Planar:
state->format = GST_VIDEO_FORMAT_I420;
format = GST_VIDEO_FORMAT_I420;
break;
case OMX_COLOR_FormatYUV420SemiPlanar:
state->format = GST_VIDEO_FORMAT_NV12;
format = GST_VIDEO_FORMAT_NV12;
break;
default:
GST_ERROR_OBJECT (self, "Unsupported color format: %d",
port_def.format.video.eColorFormat);
if (buf)
gst_omx_port_release_buffer (self->out_port, buf);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
goto caps_failed;
break;
}
state->width = port_def.format.video.nFrameWidth;
state->height = port_def.format.video.nFrameHeight;
state = gst_video_decoder_set_output_state (GST_VIDEO_DECODER (self),
format, port_def.format.video.nFrameWidth,
port_def.format.video.nFrameHeight, self->input_state);
/* Take framerate and pixel-aspect-ratio from sinkpad caps */
if (!gst_base_video_decoder_set_src_caps (GST_BASE_VIDEO_DECODER (self))) {
if (!gst_video_decoder_negotiate (GST_VIDEO_DECODER (self))) {
if (buf)
gst_omx_port_release_buffer (self->out_port, buf);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
gst_video_codec_state_unref (state);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
goto caps_failed;
}
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
gst_video_codec_state_unref (state);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
/* Now get a buffer */
if (acq_return != GST_OMX_ACQUIRE_BUFFER_OK)
@ -585,20 +594,18 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self)
goto flushing;
}
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_LOCK (self);
frame = _find_nearest_frame (self, buf);
is_eos = ! !(buf->omx_buf->nFlags & OMX_BUFFERFLAG_EOS);
if (frame
&& (deadline = gst_base_video_decoder_get_max_decode_time
(GST_BASE_VIDEO_DECODER (self), frame)) < 0) {
&& (deadline = gst_video_decoder_get_max_decode_time
(GST_VIDEO_DECODER (self), frame)) < 0) {
GST_WARNING_OBJECT (self,
"Frame is too late, dropping (deadline %" GST_TIME_FORMAT ")",
GST_TIME_ARGS (-deadline));
flow_ret =
gst_base_video_decoder_drop_frame (GST_BASE_VIDEO_DECODER (self),
frame);
flow_ret = gst_video_decoder_drop_frame (GST_VIDEO_DECODER (self), frame);
} else if (!frame && buf->omx_buf->nFilledLen > 0) {
GstBuffer *outbuf;
@ -610,8 +617,7 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self)
GST_ERROR_OBJECT (self, "No corresponding frame found");
outbuf =
gst_base_video_decoder_alloc_src_buffer (GST_BASE_VIDEO_DECODER
(self));
gst_video_decoder_allocate_output_buffer (GST_VIDEO_DECODER (self));
if (!gst_omx_video_dec_fill_buffer (self, buf, outbuf)) {
gst_buffer_unref (outbuf);
@ -619,38 +625,27 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self)
goto invalid_buffer;
}
flow_ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (self), outbuf);
flow_ret = gst_pad_push (GST_VIDEO_DECODER_SRC_PAD (self), outbuf);
} else if (buf->omx_buf->nFilledLen > 0) {
if (GST_BASE_VIDEO_CODEC (self)->state.bytes_per_picture == 0) {
/* FIXME: If the sinkpad caps change we have currently no way
* to allocate new src buffers because basevideodecoder assumes
* that the caps on both pads are equivalent all the time
*/
GST_WARNING_OBJECT (self,
"Caps change pending and still have buffers for old caps -- dropping");
} else
if (gst_base_video_decoder_alloc_src_frame (GST_BASE_VIDEO_DECODER
(self), frame) == GST_FLOW_OK) {
if ((flow_ret = gst_video_decoder_allocate_output_frame (GST_VIDEO_DECODER
(self), frame)) == GST_FLOW_OK) {
/* FIXME: This currently happens because of a race condition too.
* We first need to reconfigure the output port and then the input
* port if both need reconfiguration.
*/
if (!gst_omx_video_dec_fill_buffer (self, buf, frame->src_buffer)) {
gst_buffer_replace (&frame->src_buffer, NULL);
if (!gst_omx_video_dec_fill_buffer (self, buf, frame->output_buffer)) {
gst_buffer_replace (&frame->output_buffer, NULL);
flow_ret =
gst_base_video_decoder_finish_frame (GST_BASE_VIDEO_DECODER
(self), frame);
gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
gst_omx_port_release_buffer (self->out_port, buf);
goto invalid_buffer;
}
flow_ret =
gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
}
flow_ret =
gst_base_video_decoder_finish_frame (GST_BASE_VIDEO_DECODER (self),
frame);
} else if (frame != NULL) {
flow_ret =
gst_base_video_decoder_finish_frame (GST_BASE_VIDEO_DECODER (self),
frame);
gst_video_decoder_finish_frame (GST_VIDEO_DECODER (self), frame);
}
if (is_eos || flow_ret == GST_FLOW_EOS) {
@ -674,14 +669,14 @@ gst_omx_video_dec_loop (GstOMXVideoDec * self)
self->downstream_flow_ret = flow_ret;
} else {
g_assert ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER));
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_LOCK (self);
flow_ret = GST_FLOW_EOS;
}
if (flow_ret != GST_FLOW_OK)
goto flow_error;
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
return;
@ -691,9 +686,8 @@ component_error:
("OpenMAX component in error state %s (0x%08x)",
gst_omx_component_get_last_error_string (self->component),
gst_omx_component_get_last_error (self->component)));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_ERROR;
self->started = FALSE;
return;
@ -702,7 +696,7 @@ component_error:
flushing:
{
GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_FLUSHING;
self->started = FALSE;
return;
@ -713,19 +707,19 @@ flow_error:
if (flow_ret == GST_FLOW_EOS) {
GST_DEBUG_OBJECT (self, "EOS");
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
} else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) {
GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."),
("stream stopped, reason %s", gst_flow_get_name (flow_ret)));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
}
self->started = FALSE;
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
return;
}
@ -733,9 +727,8 @@ reconfigure_error:
{
GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
("Unable to reconfigure output port"));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_ERROR;
self->started = FALSE;
return;
@ -745,21 +738,19 @@ invalid_buffer:
{
GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
("Invalid sized input buffer"));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
self->started = FALSE;
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
return;
}
caps_failed:
{
GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to set caps"));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_push_event (GST_VIDEO_DECODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_VIDEO_DECODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
self->started = FALSE;
return;
@ -767,7 +758,7 @@ caps_failed:
}
static gboolean
gst_omx_video_dec_start (GstBaseVideoDecoder * decoder)
gst_omx_video_dec_start (GstVideoDecoder * decoder)
{
GstOMXVideoDec *self;
gboolean ret;
@ -778,14 +769,14 @@ gst_omx_video_dec_start (GstBaseVideoDecoder * decoder)
self->eos = FALSE;
self->downstream_flow_ret = GST_FLOW_OK;
ret =
gst_pad_start_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_dec_loop, self, NULL);
return ret;
}
static gboolean
gst_omx_video_dec_stop (GstBaseVideoDecoder * decoder)
gst_omx_video_dec_stop (GstVideoDecoder * decoder)
{
GstOMXVideoDec *self;
@ -796,7 +787,7 @@ gst_omx_video_dec_stop (GstBaseVideoDecoder * decoder)
gst_omx_port_set_flushing (self->in_port, TRUE);
gst_omx_port_set_flushing (self->out_port, TRUE);
gst_pad_stop_task (GST_BASE_VIDEO_CODEC_SRC_PAD (decoder));
gst_pad_stop_task (GST_VIDEO_DECODER_SRC_PAD (decoder));
if (gst_omx_component_get_state (self->component, 0) > OMX_StateIdle)
gst_omx_component_set_state (self->component, OMX_StateIdle);
@ -814,6 +805,10 @@ gst_omx_video_dec_stop (GstBaseVideoDecoder * decoder)
gst_buffer_replace (&self->codec_data, NULL);
if (self->input_state)
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
GST_DEBUG_OBJECT (self, "Stopped decoder");
return TRUE;
@ -823,7 +818,8 @@ static gboolean
gst_omx_video_dec_negotiate (GstOMXVideoDec * self)
{
GstOMXPort *port = self->out_port;
GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state;
GstVideoCodecState *state = self->input_state;
GstVideoInfo *info = &state->info;
OMX_VIDEO_PARAM_PORTFORMATTYPE param;
OMX_ERRORTYPE err;
GstCaps *comp_supported_caps;
@ -835,10 +831,10 @@ gst_omx_video_dec_negotiate (GstOMXVideoDec * self)
const gchar *format_str;
templ_caps =
gst_caps_copy (gst_pad_get_pad_template_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
gst_caps_copy (gst_pad_get_pad_template_caps (GST_VIDEO_DECODER_SRC_PAD
(self)));
peer_caps =
gst_pad_peer_query_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self), templ_caps);
gst_pad_peer_query_caps (GST_VIDEO_DECODER_SRC_PAD (self), templ_caps);
if (peer_caps) {
intersection = peer_caps;
gst_caps_unref (templ_caps);
@ -849,10 +845,10 @@ gst_omx_video_dec_negotiate (GstOMXVideoDec * self)
GST_OMX_INIT_STRUCT (&param);
param.nPortIndex = port->index;
param.nIndex = 0;
if (state->fps_n == 0)
if (info->fps_n == 0)
param.xFramerate = 0;
else
param.xFramerate = (state->fps_n << 16) / (state->fps_d);
param.xFramerate = (info->fps_n << 16) / (info->fps_d);
old_index = -1;
comp_supported_caps = gst_caps_new_empty ();
@ -895,7 +891,6 @@ gst_omx_video_dec_negotiate (GstOMXVideoDec * self)
intersection = tmp;
}
if (gst_caps_is_empty (intersection)) {
gst_caps_unref (intersection);
GST_ERROR_OBJECT (self, "Empty caps");
@ -942,11 +937,12 @@ gst_omx_video_dec_negotiate (GstOMXVideoDec * self)
}
static gboolean
gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder,
GstVideoState * state)
gst_omx_video_dec_set_format (GstVideoDecoder * decoder,
GstVideoCodecState * state)
{
GstOMXVideoDec *self;
GstOMXVideoDecClass *klass;
GstVideoInfo *info = &state->info;
gboolean is_format_change = FALSE;
gboolean needs_disable = FALSE;
OMX_PARAM_PORTDEFINITIONTYPE port_def;
@ -961,12 +957,12 @@ gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder,
/* Check if the caps change is a real format change or if only irrelevant
* parts of the caps have changed or nothing at all.
*/
is_format_change |= port_def.format.video.nFrameWidth != state->width;
is_format_change |= port_def.format.video.nFrameHeight != state->height;
is_format_change |= port_def.format.video.nFrameWidth != info->width;
is_format_change |= port_def.format.video.nFrameHeight != info->height;
is_format_change |= (port_def.format.video.xFramerate == 0
&& state->fps_n != 0)
&& info->fps_n != 0)
|| (port_def.format.video.xFramerate !=
(state->fps_n << 16) / (state->fps_d));
(info->fps_n << 16) / (info->fps_d));
is_format_change |= (self->codec_data != state->codec_data);
if (klass->is_format_change)
is_format_change |= klass->is_format_change (self, self->in_port, state);
@ -981,18 +977,21 @@ gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder,
if (needs_disable && !is_format_change) {
GST_DEBUG_OBJECT (self,
"Already running and caps did not change the format");
if (self->input_state)
gst_video_codec_state_unref (self->input_state);
self->input_state = gst_video_codec_state_ref (state);
return TRUE;
}
if (needs_disable && is_format_change) {
gst_omx_video_dec_drain (self);
gst_omx_video_dec_drain (self, FALSE);
if (klass->cdata.hacks & GST_OMX_HACK_NO_COMPONENT_RECONFIGURE) {
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
gst_omx_video_dec_stop (GST_BASE_VIDEO_DECODER (self));
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
gst_omx_video_dec_stop (GST_VIDEO_DECODER (self));
gst_omx_video_dec_close (self);
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_LOCK (self);
if (!gst_omx_video_dec_open (self))
return FALSE;
@ -1005,14 +1004,17 @@ gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder,
if (gst_omx_port_set_enabled (self->in_port, FALSE) != OMX_ErrorNone)
return FALSE;
}
if (self->input_state)
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
}
port_def.format.video.nFrameWidth = state->width;
port_def.format.video.nFrameHeight = state->height;
if (state->fps_n == 0)
port_def.format.video.nFrameWidth = info->width;
port_def.format.video.nFrameHeight = info->height;
if (info->fps_n == 0)
port_def.format.video.xFramerate = 0;
else
port_def.format.video.xFramerate = (state->fps_n << 16) / (state->fps_d);
port_def.format.video.xFramerate = (info->fps_n << 16) / (info->fps_d);
if (!gst_omx_port_update_port_definition (self->in_port, &port_def))
return FALSE;
@ -1027,6 +1029,7 @@ gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder,
}
gst_buffer_replace (&self->codec_data, state->codec_data);
self->input_state = gst_video_codec_state_ref (state);
if (!gst_omx_video_dec_negotiate (self))
return FALSE;
@ -1073,33 +1076,33 @@ gst_omx_video_dec_set_format (GstBaseVideoDecoder * decoder,
/* Start the srcpad loop again */
self->downstream_flow_ret = GST_FLOW_OK;
gst_pad_start_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_dec_loop, decoder, NULL);
return TRUE;
}
static gboolean
gst_omx_video_dec_reset (GstBaseVideoDecoder * decoder)
gst_omx_video_dec_reset (GstVideoDecoder * decoder, gboolean hard)
{
GstOMXVideoDec *self;
self = GST_OMX_VIDEO_DEC (decoder);
GST_DEBUG_OBJECT (self, "Resetting decoder");
/* FIXME: Handle different values of hard */
gst_omx_video_dec_drain (self);
GST_DEBUG_OBJECT (self, "Resetting decoder");
gst_omx_port_set_flushing (self->in_port, TRUE);
gst_omx_port_set_flushing (self->out_port, TRUE);
/* Wait until the srcpad loop is finished,
* unlock GST_BASE_VIDEO_CODEC_STREAM_LOCK to prevent deadlocks
* unlock GST_VIDEO_DECODER_STREAM_LOCK to prevent deadlocks
* caused by using this lock from inside the loop function */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_PAD_STREAM_LOCK (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
GST_PAD_STREAM_UNLOCK (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
GST_PAD_STREAM_LOCK (GST_VIDEO_DECODER_SRC_PAD (self));
GST_PAD_STREAM_UNLOCK (GST_VIDEO_DECODER_SRC_PAD (self));
GST_VIDEO_DECODER_STREAM_LOCK (self);
gst_omx_port_set_flushing (self->in_port, FALSE);
gst_omx_port_set_flushing (self->out_port, FALSE);
@ -1108,7 +1111,7 @@ gst_omx_video_dec_reset (GstBaseVideoDecoder * decoder)
self->last_upstream_ts = 0;
self->eos = FALSE;
self->downstream_flow_ret = GST_FLOW_OK;
gst_pad_start_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_start_task (GST_VIDEO_DECODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_dec_loop, decoder, NULL);
GST_DEBUG_OBJECT (self, "Reset decoder");
@ -1117,14 +1120,8 @@ gst_omx_video_dec_reset (GstBaseVideoDecoder * decoder)
}
static GstFlowReturn
gst_omx_video_dec_parse_data (GstBaseVideoDecoder * decoder, gboolean at_eos)
{
return GST_FLOW_OK;
}
static GstFlowReturn
gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder,
GstVideoFrameState * frame)
gst_omx_video_dec_handle_frame (GstVideoDecoder * decoder,
GstVideoCodecFrame * frame)
{
GstOMXAcquireBufferReturn acq_ret = GST_OMX_ACQUIRE_BUFFER_ERROR;
GstOMXVideoDec *self;
@ -1144,8 +1141,8 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder,
return GST_FLOW_EOS;
}
timestamp = frame->presentation_timestamp;
duration = frame->presentation_duration;
timestamp = frame->pts;
duration = frame->duration;
if (self->downstream_flow_ret != GST_FLOW_OK) {
GST_ERROR_OBJECT (self, "Downstream returned %s",
@ -1165,13 +1162,13 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder,
}
}
while (offset < gst_buffer_get_size (frame->sink_buffer)) {
while (offset < gst_buffer_get_size (frame->input_buffer)) {
/* Make sure to release the base class stream lock, otherwise
* _loop() can't call _finish_frame() and we might block forever
* because no input buffers are released */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
acq_ret = gst_omx_port_acquire_buffer (self->in_port, &buf);
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_LOCK (self);
if (acq_ret == GST_OMX_ACQUIRE_BUFFER_ERROR) {
goto component_error;
@ -1237,7 +1234,7 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder,
if (offset != 0 && duration != GST_CLOCK_TIME_NONE) {
timestamp_offset =
gst_util_uint64_scale (offset, duration,
gst_buffer_get_size (frame->sink_buffer));
gst_buffer_get_size (frame->input_buffer));
}
if (timestamp != GST_CLOCK_TIME_NONE) {
@ -1249,21 +1246,19 @@ gst_omx_video_dec_handle_frame (GstBaseVideoDecoder * decoder,
if (duration != GST_CLOCK_TIME_NONE) {
buf->omx_buf->nTickCount =
gst_util_uint64_scale (buf->omx_buf->nFilledLen, duration,
gst_buffer_get_size (frame->sink_buffer));
gst_buffer_get_size (frame->input_buffer));
self->last_upstream_ts += duration;
}
if (offset == 0) {
BufferIdentification *id = g_slice_new0 (BufferIdentification);
if (!GST_BUFFER_FLAG_IS_SET (frame->sink_buffer,
GST_BUFFER_FLAG_DELTA_UNIT))
if (GST_VIDEO_CODEC_FRAME_IS_SYNC_POINT (frame))
buf->omx_buf->nFlags |= OMX_BUFFERFLAG_SYNCFRAME;
id->timestamp = buf->omx_buf->nTimeStamp;
frame->coder_hook = id;
frame->coder_hook_destroy_notify =
(GDestroyNotify) buffer_identification_free;
gst_video_codec_frame_set_user_data (frame, id,
(GDestroyNotify) buffer_identification_free);
}
/* TODO: Set flags
@ -1319,66 +1314,17 @@ reconfigure_error:
}
static GstFlowReturn
gst_omx_video_dec_finish (GstBaseVideoDecoder * decoder)
gst_omx_video_dec_finish (GstVideoDecoder * decoder)
{
GstOMXVideoDec *self;
GstOMXVideoDecClass *klass;
GstOMXBuffer *buf;
GstOMXAcquireBufferReturn acq_ret;
self = GST_OMX_VIDEO_DEC (decoder);
klass = GST_OMX_VIDEO_DEC_GET_CLASS (self);
GST_DEBUG_OBJECT (self, "Sending EOS to the component");
/* Don't send EOS buffer twice, this doesn't work */
if (self->eos) {
GST_DEBUG_OBJECT (self, "Component is already EOS");
return GST_BASE_VIDEO_DECODER_FLOW_DROPPED;
}
self->eos = TRUE;
if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) {
GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers");
/* Insert a NULL into the queue to signal EOS */
gst_omx_rec_mutex_lock (&self->out_port->port_lock);
g_queue_push_tail (self->out_port->pending_buffers, NULL);
g_cond_broadcast (self->out_port->port_cond);
gst_omx_rec_mutex_unlock (&self->out_port->port_lock);
return GST_BASE_VIDEO_DECODER_FLOW_DROPPED;
}
/* Make sure to release the base class stream lock, otherwise
* _loop() can't call _finish_frame() and we might block forever
* because no input buffers are released */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
/* Send an EOS buffer to the component and let the base
* class drop the EOS event. We will send it later when
* the EOS buffer arrives on the output port. */
acq_ret = gst_omx_port_acquire_buffer (self->in_port, &buf);
if (acq_ret == GST_OMX_ACQUIRE_BUFFER_OK) {
buf->omx_buf->nFilledLen = 0;
buf->omx_buf->nTimeStamp =
gst_util_uint64_scale (self->last_upstream_ts, OMX_TICKS_PER_SECOND,
GST_SECOND);
buf->omx_buf->nTickCount = 0;
buf->omx_buf->nFlags |= OMX_BUFFERFLAG_EOS;
gst_omx_port_release_buffer (self->in_port, buf);
GST_DEBUG_OBJECT (self, "Sent EOS to the component");
} else {
GST_ERROR_OBJECT (self, "Failed to acquire buffer for EOS: %d", acq_ret);
}
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
return GST_BASE_VIDEO_DECODER_FLOW_DROPPED;
return gst_omx_video_dec_drain (self, TRUE);
}
static GstFlowReturn
gst_omx_video_dec_drain (GstOMXVideoDec * self)
gst_omx_video_dec_drain (GstOMXVideoDec * self, gboolean is_eos)
{
GstOMXVideoDecClass *klass;
GstOMXBuffer *buf;
@ -1399,6 +1345,8 @@ gst_omx_video_dec_drain (GstOMXVideoDec * self)
GST_DEBUG_OBJECT (self, "Component is EOS already");
return GST_FLOW_OK;
}
if (is_eos)
self->eos = TRUE;
if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) {
GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers");
@ -1408,14 +1356,14 @@ gst_omx_video_dec_drain (GstOMXVideoDec * self)
/* Make sure to release the base class stream lock, otherwise
* _loop() can't call _finish_frame() and we might block forever
* because no input buffers are released */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_DECODER_STREAM_UNLOCK (self);
/* Send an EOS buffer to the component and let the base
* class drop the EOS event. We will send it later when
* the EOS buffer arrives on the output port. */
acq_ret = gst_omx_port_acquire_buffer (self->in_port, &buf);
if (acq_ret != GST_OMX_ACQUIRE_BUFFER_OK) {
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_LOCK (self);
GST_ERROR_OBJECT (self, "Failed to acquire buffer for draining: %d",
acq_ret);
return GST_FLOW_ERROR;
@ -1446,7 +1394,7 @@ gst_omx_video_dec_drain (GstOMXVideoDec * self)
}
g_mutex_unlock (self->drain_lock);
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_DECODER_STREAM_LOCK (self);
self->started = FALSE;

View file

@ -22,7 +22,7 @@
#define __GST_OMX_VIDEO_DEC_H__
#include <gst/gst.h>
#include "gstbasevideodecoder.h"
#include <gst/video/gstvideodecoder.h>
#include "gstomx.h"
@ -46,7 +46,7 @@ typedef struct _GstOMXVideoDecClass GstOMXVideoDecClass;
struct _GstOMXVideoDec
{
GstBaseVideoDecoder parent;
GstVideoDecoder parent;
/* < protected > */
GstOMXCore *core;
@ -54,6 +54,7 @@ struct _GstOMXVideoDec
GstOMXPort *in_port, *out_port;
/* < private > */
GstVideoCodecState *input_state;
GstBuffer *codec_data;
/* TRUE if the component is configured and saw
* the first buffer */
@ -75,13 +76,13 @@ struct _GstOMXVideoDec
struct _GstOMXVideoDecClass
{
GstBaseVideoDecoderClass parent_class;
GstVideoDecoderClass parent_class;
GstOMXClassData cdata;
gboolean (*is_format_change) (GstOMXVideoDec * self, GstOMXPort * port, GstVideoState * state);
gboolean (*set_format) (GstOMXVideoDec * self, GstOMXPort * port, GstVideoState * state);
GstFlowReturn (*prepare_frame) (GstOMXVideoDec * self, GstVideoFrameState *frame);
gboolean (*is_format_change) (GstOMXVideoDec * self, GstOMXPort * port, GstVideoCodecState * state);
gboolean (*set_format) (GstOMXVideoDec * self, GstOMXPort * port, GstVideoCodecState * state);
GstFlowReturn (*prepare_frame) (GstOMXVideoDec * self, GstVideoCodecFrame *frame);
};
GType gst_omx_video_dec_get_type (void);

View file

@ -78,19 +78,21 @@ static GstStateChangeReturn
gst_omx_video_enc_change_state (GstElement * element,
GstStateChange transition);
static gboolean gst_omx_video_enc_start (GstBaseVideoEncoder * encoder);
static gboolean gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder);
static gboolean gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder,
GstVideoInfo * info);
static gboolean gst_omx_video_enc_reset (GstBaseVideoEncoder * encoder);
static GstFlowReturn gst_omx_video_enc_handle_frame (GstBaseVideoEncoder *
encoder, GstVideoFrameState * frame);
static gboolean gst_omx_video_enc_finish (GstBaseVideoEncoder * encoder);
static gboolean gst_omx_video_enc_start (GstVideoEncoder * encoder);
static gboolean gst_omx_video_enc_stop (GstVideoEncoder * encoder);
static gboolean gst_omx_video_enc_set_format (GstVideoEncoder * encoder,
GstVideoCodecState * state);
static gboolean gst_omx_video_enc_reset (GstVideoEncoder * encoder,
gboolean hard);
static GstFlowReturn gst_omx_video_enc_handle_frame (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame);
static gboolean gst_omx_video_enc_finish (GstVideoEncoder * encoder);
static GstFlowReturn gst_omx_video_enc_drain (GstOMXVideoEnc * self);
static GstFlowReturn gst_omx_video_enc_drain (GstOMXVideoEnc * self,
gboolean at_eos);
static GstFlowReturn gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc *
self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoFrameState * frame);
self, GstOMXPort * port, GstOMXBuffer * buf, GstVideoCodecFrame * frame);
enum
{
@ -116,15 +118,14 @@ enum
"debug category for gst-omx video encoder base class");
G_DEFINE_ABSTRACT_TYPE_WITH_CODE (GstOMXVideoEnc, gst_omx_video_enc,
GST_TYPE_BASE_VIDEO_ENCODER, DEBUG_INIT);
GST_TYPE_VIDEO_ENCODER, DEBUG_INIT);
static void
gst_omx_video_enc_class_init (GstOMXVideoEncClass * klass)
{
GObjectClass *gobject_class = G_OBJECT_CLASS (klass);
GstElementClass *element_class = GST_ELEMENT_CLASS (klass);
GstBaseVideoEncoderClass *base_video_encoder_class =
GST_BASE_VIDEO_ENCODER_CLASS (klass);
GstVideoEncoderClass *video_encoder_class = GST_VIDEO_ENCODER_CLASS (klass);
gobject_class->finalize = gst_omx_video_enc_finalize;
@ -170,15 +171,14 @@ gst_omx_video_enc_class_init (GstOMXVideoEncClass * klass)
element_class->change_state =
GST_DEBUG_FUNCPTR (gst_omx_video_enc_change_state);
base_video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_omx_video_enc_start);
base_video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_omx_video_enc_stop);
base_video_encoder_class->reset = GST_DEBUG_FUNCPTR (gst_omx_video_enc_reset);
base_video_encoder_class->set_format =
video_encoder_class->start = GST_DEBUG_FUNCPTR (gst_omx_video_enc_start);
video_encoder_class->stop = GST_DEBUG_FUNCPTR (gst_omx_video_enc_stop);
video_encoder_class->reset = GST_DEBUG_FUNCPTR (gst_omx_video_enc_reset);
video_encoder_class->set_format =
GST_DEBUG_FUNCPTR (gst_omx_video_enc_set_format);
base_video_encoder_class->handle_frame =
video_encoder_class->handle_frame =
GST_DEBUG_FUNCPTR (gst_omx_video_enc_handle_frame);
base_video_encoder_class->finish =
GST_DEBUG_FUNCPTR (gst_omx_video_enc_finish);
video_encoder_class->finish = GST_DEBUG_FUNCPTR (gst_omx_video_enc_finish);
klass->cdata.default_sink_template_caps = "video/x-raw, "
"width = " GST_VIDEO_SIZE_RANGE ", "
@ -512,19 +512,22 @@ gst_omx_video_enc_change_state (GstElement * element, GstStateChange transition)
#define MAX_FRAME_DIST_TICKS (5 * OMX_TICKS_PER_SECOND)
#define MAX_FRAME_DIST_FRAMES (100)
static GstVideoFrameState *
static GstVideoCodecFrame *
_find_nearest_frame (GstOMXVideoEnc * self, GstOMXBuffer * buf)
{
GList *l, *best_l = NULL;
GList *finish_frames = NULL;
GstVideoFrameState *best = NULL;
GstVideoCodecFrame *best = NULL;
guint64 best_timestamp = 0;
guint64 best_diff = G_MAXUINT64;
BufferIdentification *best_id = NULL;
GList *frames;
for (l = GST_BASE_VIDEO_CODEC (self)->frames; l; l = l->next) {
GstVideoFrameState *tmp = l->data;
BufferIdentification *id = tmp->coder_hook;
frames = gst_video_encoder_get_frames (GST_VIDEO_ENCODER (self));
for (l = frames; l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
guint64 timestamp, diff;
/* This happens for frames that were just added but
@ -555,9 +558,9 @@ _find_nearest_frame (GstOMXVideoEnc * self, GstOMXBuffer * buf)
}
if (best_id) {
for (l = GST_BASE_VIDEO_CODEC (self)->frames; l && l != best_l; l = l->next) {
GstVideoFrameState *tmp = l->data;
BufferIdentification *id = tmp->coder_hook;
for (l = frames; l && l != best_l; l = l->next) {
GstVideoCodecFrame *tmp = l->data;
BufferIdentification *id = gst_video_codec_frame_get_user_data (tmp);
guint64 diff_ticks, diff_frames;
if (id->timestamp > best_timestamp)
@ -579,30 +582,34 @@ _find_nearest_frame (GstOMXVideoEnc * self, GstOMXBuffer * buf)
if (finish_frames) {
g_warning ("Too old frames, bug in encoder -- please file a bug");
for (l = finish_frames; l; l = l->next) {
gst_base_video_encoder_finish_frame (GST_BASE_VIDEO_ENCODER (self),
l->data);
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), l->data);
}
}
if (best)
gst_video_codec_frame_ref (best);
g_list_foreach (frames, (GFunc) gst_video_codec_frame_unref, NULL);
g_list_free (frames);
return best;
}
static GstFlowReturn
gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port,
GstOMXBuffer * buf, GstVideoFrameState * frame)
GstOMXBuffer * buf, GstVideoCodecFrame * frame)
{
GstOMXVideoEncClass *klass = GST_OMX_VIDEO_ENC_GET_CLASS (self);
GstFlowReturn flow_ret = GST_FLOW_OK;
if ((buf->omx_buf->nFlags & OMX_BUFFERFLAG_CODECCONFIG)
&& buf->omx_buf->nFilledLen > 0) {
GstCaps *caps;
GstVideoCodecState *state;
GstBuffer *codec_data;
GstMapInfo map = GST_MAP_INFO_INIT;
GstCaps *caps;
caps =
gst_caps_copy (gst_pad_get_current_caps (GST_BASE_VIDEO_CODEC_SRC_PAD
(self)));
caps = klass->get_caps (self, self->out_port, self->input_state);
codec_data = gst_buffer_new_and_alloc (buf->omx_buf->nFilledLen);
gst_buffer_map (codec_data, &map, GST_MAP_WRITE);
@ -610,13 +617,12 @@ gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port,
buf->omx_buf->pBuffer + buf->omx_buf->nOffset,
buf->omx_buf->nFilledLen);
gst_buffer_unmap (codec_data, &map);
gst_caps_set_simple (caps, "codec_data", GST_TYPE_BUFFER, codec_data, NULL);
if (!gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self), caps)) {
gst_caps_unref (caps);
state =
gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (self), caps,
self->input_state);
state->codec_data = codec_data;
if (!gst_video_encoder_negotiate (GST_VIDEO_ENCODER (self)))
return GST_FLOW_NOT_NEGOTIATED;
}
gst_caps_unref (caps);
flow_ret = GST_FLOW_OK;
} else if (buf->omx_buf->nFilledLen > 0) {
GstBuffer *outbuf;
@ -645,29 +651,26 @@ gst_omx_video_enc_handle_output_frame (GstOMXVideoEnc * self, GstOMXPort * port,
if ((klass->cdata.hacks & GST_OMX_HACK_SYNCFRAME_FLAG_NOT_USED)
|| (buf->omx_buf->nFlags & OMX_BUFFERFLAG_SYNCFRAME)) {
if (frame)
frame->is_sync_point = TRUE;
GST_VIDEO_CODEC_FRAME_SET_SYNC_POINT (frame);
else
GST_BUFFER_FLAG_UNSET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
} else {
if (frame)
frame->is_sync_point = FALSE;
GST_VIDEO_CODEC_FRAME_UNSET_SYNC_POINT (frame);
else
GST_BUFFER_FLAG_SET (outbuf, GST_BUFFER_FLAG_DELTA_UNIT);
}
if (frame) {
frame->src_buffer = outbuf;
frame->output_buffer = outbuf;
flow_ret =
gst_base_video_encoder_finish_frame (GST_BASE_VIDEO_ENCODER (self),
frame);
gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), frame);
} else {
GST_ERROR_OBJECT (self, "No corresponding frame found");
flow_ret = gst_pad_push (GST_BASE_VIDEO_CODEC_SRC_PAD (self), outbuf);
flow_ret = gst_pad_push (GST_VIDEO_ENCODER_SRC_PAD (self), outbuf);
}
} else if (frame != NULL) {
flow_ret =
gst_base_video_encoder_finish_frame (GST_BASE_VIDEO_ENCODER (self),
frame);
flow_ret = gst_video_encoder_finish_frame (GST_VIDEO_ENCODER (self), frame);
}
return flow_ret;
@ -679,7 +682,7 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self)
GstOMXVideoEncClass *klass;
GstOMXPort *port = self->out_port;
GstOMXBuffer *buf = NULL;
GstVideoFrameState *frame;
GstVideoCodecFrame *frame;
GstFlowReturn flow_ret = GST_FLOW_OK;
GstOMXAcquireBufferReturn acq_return;
gboolean is_eos;
@ -698,33 +701,35 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self)
return;
}
if (!gst_pad_has_current_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self))
if (!gst_pad_has_current_caps (GST_VIDEO_ENCODER_SRC_PAD (self))
|| acq_return == GST_OMX_ACQUIRE_BUFFER_RECONFIGURED) {
GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state;
GstCaps *caps;
GstVideoCodecState *state;
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_ENCODER_STREAM_LOCK (self);
GST_DEBUG_OBJECT (self, "Port settings have changed, updating caps");
caps = klass->get_caps (self, self->out_port, state);
caps = klass->get_caps (self, self->out_port, self->input_state);
if (!caps) {
if (buf)
gst_omx_port_release_buffer (self->out_port, buf);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
goto caps_failed;
}
state =
gst_video_encoder_set_output_state (GST_VIDEO_ENCODER (self), caps,
self->input_state);
gst_video_codec_state_unref (state);
if (!gst_pad_set_caps (GST_BASE_VIDEO_CODEC_SRC_PAD (self), caps)) {
gst_caps_unref (caps);
if (!gst_video_encoder_negotiate (GST_VIDEO_ENCODER (self))) {
if (buf)
gst_omx_port_release_buffer (self->out_port, buf);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
goto caps_failed;
}
gst_caps_unref (caps);
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
/* Now get a buffer */
if (acq_return != GST_OMX_ACQUIRE_BUFFER_OK)
@ -747,7 +752,7 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self)
goto flushing;
}
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_ENCODER_STREAM_LOCK (self);
frame = _find_nearest_frame (self, buf);
is_eos = ! !(buf->omx_buf->nFlags & OMX_BUFFERFLAG_EOS);
@ -777,14 +782,14 @@ gst_omx_video_enc_loop (GstOMXVideoEnc * self)
} else {
g_assert ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER));
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_ENCODER_STREAM_LOCK (self);
flow_ret = GST_FLOW_EOS;
}
if (flow_ret != GST_FLOW_OK)
goto flow_error;
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
return;
@ -794,9 +799,8 @@ component_error:
("OpenMAX component in error state %s (0x%08x)",
gst_omx_component_get_last_error_string (self->component),
gst_omx_component_get_last_error (self->component)));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_push_event (GST_VIDEO_ENCODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_VIDEO_ENCODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_ERROR;
self->started = FALSE;
return;
@ -804,7 +808,7 @@ component_error:
flushing:
{
GST_DEBUG_OBJECT (self, "Flushing -- stopping task");
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_pause_task (GST_VIDEO_ENCODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_FLUSHING;
self->started = FALSE;
return;
@ -814,28 +818,27 @@ flow_error:
if (flow_ret == GST_FLOW_EOS) {
GST_DEBUG_OBJECT (self, "EOS");
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_push_event (GST_VIDEO_ENCODER_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_pause_task (GST_VIDEO_ENCODER_SRC_PAD (self));
} else if (flow_ret == GST_FLOW_NOT_LINKED || flow_ret < GST_FLOW_EOS) {
GST_ELEMENT_ERROR (self, STREAM, FAILED, ("Internal data stream error."),
("stream stopped, reason %s", gst_flow_get_name (flow_ret)));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_push_event (GST_VIDEO_ENCODER_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_pause_task (GST_VIDEO_ENCODER_SRC_PAD (self));
}
self->started = FALSE;
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
return;
}
reconfigure_error:
{
GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL),
("Unable to reconfigure output port"));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_push_event (GST_VIDEO_ENCODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_VIDEO_ENCODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
self->started = FALSE;
return;
@ -843,9 +846,8 @@ reconfigure_error:
caps_failed:
{
GST_ELEMENT_ERROR (self, LIBRARY, SETTINGS, (NULL), ("Failed to set caps"));
gst_pad_push_event (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_event_new_eos ());
gst_pad_pause_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
gst_pad_push_event (GST_VIDEO_ENCODER_SRC_PAD (self), gst_event_new_eos ());
gst_pad_pause_task (GST_VIDEO_ENCODER_SRC_PAD (self));
self->downstream_flow_ret = GST_FLOW_NOT_NEGOTIATED;
self->started = FALSE;
return;
@ -853,7 +855,7 @@ caps_failed:
}
static gboolean
gst_omx_video_enc_start (GstBaseVideoEncoder * encoder)
gst_omx_video_enc_start (GstVideoEncoder * encoder)
{
GstOMXVideoEnc *self;
gboolean ret;
@ -864,14 +866,14 @@ gst_omx_video_enc_start (GstBaseVideoEncoder * encoder)
self->eos = FALSE;
self->downstream_flow_ret = GST_FLOW_OK;
ret =
gst_pad_start_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_start_task (GST_VIDEO_ENCODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_enc_loop, self, NULL);
return ret;
}
static gboolean
gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder)
gst_omx_video_enc_stop (GstVideoEncoder * encoder)
{
GstOMXVideoEnc *self;
@ -882,7 +884,7 @@ gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder)
gst_omx_port_set_flushing (self->in_port, TRUE);
gst_omx_port_set_flushing (self->out_port, TRUE);
gst_pad_stop_task (GST_BASE_VIDEO_CODEC_SRC_PAD (encoder));
gst_pad_stop_task (GST_VIDEO_ENCODER_SRC_PAD (encoder));
if (gst_omx_component_get_state (self->component, 0) > OMX_StateIdle)
gst_omx_component_set_state (self->component, OMX_StateIdle);
@ -891,6 +893,10 @@ gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder)
self->started = FALSE;
self->eos = FALSE;
if (self->input_state)
gst_video_codec_state_unref (self->input_state);
self->input_state = NULL;
g_mutex_lock (self->drain_lock);
self->draining = FALSE;
g_cond_broadcast (self->drain_cond);
@ -902,13 +908,14 @@ gst_omx_video_enc_stop (GstBaseVideoEncoder * encoder)
}
static gboolean
gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder,
GstVideoInfo * info)
gst_omx_video_enc_set_format (GstVideoEncoder * encoder,
GstVideoCodecState * state)
{
GstOMXVideoEnc *self;
GstOMXVideoEncClass *klass;
gboolean needs_disable = FALSE;
OMX_PARAM_PORTDEFINITIONTYPE port_def;
GstVideoInfo *info = &state->info;
self = GST_OMX_VIDEO_ENC (encoder);
klass = GST_OMX_VIDEO_ENC_GET_CLASS (encoder);
@ -926,7 +933,7 @@ gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder,
* format change happened we can just exit here.
*/
if (needs_disable) {
gst_omx_video_enc_drain (self);
gst_omx_video_enc_drain (self, FALSE);
if (gst_omx_port_manual_reconfigure (self->in_port, TRUE) != OMX_ErrorNone)
return FALSE;
@ -964,7 +971,7 @@ gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder,
return FALSE;
if (klass->set_format) {
if (!klass->set_format (self, self->in_port, info)) {
if (!klass->set_format (self, self->in_port, state)) {
GST_ERROR_OBJECT (self, "Subclass failed to set the new format");
return FALSE;
}
@ -1010,16 +1017,20 @@ gst_omx_video_enc_set_format (GstBaseVideoEncoder * encoder,
return FALSE;
}
if (self->input_state)
gst_video_codec_state_unref (self->input_state);
self->input_state = gst_video_codec_state_ref (self->input_state);
/* Start the srcpad loop again */
self->downstream_flow_ret = GST_FLOW_OK;
gst_pad_start_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_start_task (GST_VIDEO_ENCODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_enc_loop, encoder, NULL);
return TRUE;
}
static gboolean
gst_omx_video_enc_reset (GstBaseVideoEncoder * encoder)
gst_omx_video_enc_reset (GstVideoEncoder * encoder, gboolean hard)
{
GstOMXVideoEnc *self;
@ -1027,18 +1038,16 @@ gst_omx_video_enc_reset (GstBaseVideoEncoder * encoder)
GST_DEBUG_OBJECT (self, "Resetting encoder");
gst_omx_video_enc_drain (self);
gst_omx_port_set_flushing (self->in_port, TRUE);
gst_omx_port_set_flushing (self->out_port, TRUE);
/* Wait until the srcpad loop is finished,
* unlock GST_BASE_VIDEO_CODEC_STREAM_LOCK to prevent deadlocks
* unlock GST_VIDEO_ENCODER_STREAM_LOCK to prevent deadlocks
* caused by using this lock from inside the loop function */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_PAD_STREAM_LOCK (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
GST_PAD_STREAM_UNLOCK (GST_BASE_VIDEO_CODEC_SRC_PAD (self));
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
GST_PAD_STREAM_LOCK (GST_VIDEO_ENCODER_SRC_PAD (self));
GST_PAD_STREAM_UNLOCK (GST_VIDEO_ENCODER_SRC_PAD (self));
GST_VIDEO_ENCODER_STREAM_LOCK (self);
gst_omx_port_set_flushing (self->in_port, FALSE);
gst_omx_port_set_flushing (self->out_port, FALSE);
@ -1047,7 +1056,7 @@ gst_omx_video_enc_reset (GstBaseVideoEncoder * encoder)
self->last_upstream_ts = 0;
self->eos = FALSE;
self->downstream_flow_ret = GST_FLOW_OK;
gst_pad_start_task (GST_BASE_VIDEO_CODEC_SRC_PAD (self),
gst_pad_start_task (GST_VIDEO_ENCODER_SRC_PAD (self),
(GstTaskFunction) gst_omx_video_enc_loop, encoder, NULL);
return TRUE;
@ -1057,14 +1066,15 @@ static gboolean
gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
GstOMXBuffer * outbuf)
{
GstVideoState *state = &GST_BASE_VIDEO_CODEC (self)->state;
GstVideoCodecState *state =
gst_video_encoder_get_output_state (GST_VIDEO_ENCODER (self));
GstVideoInfo *info = &state->info;
OMX_PARAM_PORTDEFINITIONTYPE *port_def = &self->in_port->port_def;
gboolean ret = FALSE;
GstVideoInfo vinfo;
GstVideoFrame frame;
if (state->width != port_def->format.video.nFrameWidth ||
state->height != port_def->format.video.nFrameHeight) {
if (info->width != port_def->format.video.nFrameWidth ||
info->height != port_def->format.video.nFrameHeight) {
GST_ERROR_OBJECT (self, "Width or height do not match");
goto done;
}
@ -1083,9 +1093,7 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
/* Different strides */
gst_video_info_from_caps (&vinfo, state->caps);
switch (state->format) {
switch (info->finfo->format) {
case GST_VIDEO_FORMAT_I420:{
gint i, j, height;
guint8 *src, *dest;
@ -1093,17 +1101,23 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
outbuf->omx_buf->nFilledLen = 0;
if (!gst_video_frame_map (&frame, info, inbuf, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Invalid input buffer size");
ret = FALSE;
break;
}
for (i = 0; i < 3; i++) {
if (i == 0) {
dest_stride = port_def->format.video.nStride;
src_stride = vinfo.stride[0];
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
/* XXX: Try this if no stride was set */
if (dest_stride == 0)
dest_stride = src_stride;
} else {
dest_stride = port_def->format.video.nStride / 2;
src_stride = vinfo.stride[1];
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
/* XXX: Try this if no stride was set */
if (dest_stride == 0)
@ -1120,11 +1134,6 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
(port_def->format.video.nSliceHeight / 2) *
(port_def->format.video.nStride / 2);
if (!gst_video_frame_map (&frame, &vinfo, inbuf, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Invalid input buffer size");
ret = FALSE;
break;
}
src = GST_VIDEO_FRAME_COMP_DATA (&frame, i);
height = GST_VIDEO_FRAME_HEIGHT (&frame);
@ -1141,9 +1150,8 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
src += src_stride;
dest += dest_stride;
}
gst_video_frame_unmap (&frame);
}
gst_video_frame_unmap (&frame);
ret = TRUE;
break;
}
@ -1154,16 +1162,22 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
outbuf->omx_buf->nFilledLen = 0;
if (!gst_video_frame_map (&frame, info, inbuf, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Invalid input buffer size");
ret = FALSE;
break;
}
for (i = 0; i < 2; i++) {
if (i == 0) {
dest_stride = port_def->format.video.nStride;
src_stride = vinfo.stride[0];
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 0);
/* XXX: Try this if no stride was set */
if (dest_stride == 0)
dest_stride = src_stride;
} else {
dest_stride = port_def->format.video.nStride;
src_stride = vinfo.stride[1];
src_stride = GST_VIDEO_FRAME_COMP_STRIDE (&frame, 1);
/* XXX: Try this if no stride was set */
if (dest_stride == 0)
@ -1176,13 +1190,6 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
port_def->format.video.nSliceHeight *
port_def->format.video.nStride;
if (!gst_video_frame_map (&frame, &vinfo, inbuf, GST_MAP_READ)) {
GST_ERROR_OBJECT (self, "Invalid input buffer size");
ret = FALSE;
break;
}
src = GST_VIDEO_FRAME_COMP_DATA (&frame, i);
height = GST_VIDEO_FRAME_HEIGHT (&frame);
@ -1200,8 +1207,8 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
dest += dest_stride;
}
gst_video_frame_unmap (&frame);
}
gst_video_frame_unmap (&frame);
ret = TRUE;
break;
}
@ -1212,12 +1219,15 @@ gst_omx_video_enc_fill_buffer (GstOMXVideoEnc * self, GstBuffer * inbuf,
}
done:
gst_video_codec_state_unref (state);
return ret;
}
static GstFlowReturn
gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * encoder,
GstVideoFrameState * frame)
gst_omx_video_enc_handle_frame (GstVideoEncoder * encoder,
GstVideoCodecFrame * frame)
{
GstOMXAcquireBufferReturn acq_ret = GST_OMX_ACQUIRE_BUFFER_ERROR;
GstOMXVideoEnc *self;
@ -1246,9 +1256,9 @@ gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * encoder,
/* Make sure to release the base class stream lock, otherwise
* _loop() can't call _finish_frame() and we might block forever
* because no input buffers are released */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
acq_ret = gst_omx_port_acquire_buffer (self->in_port, &buf);
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_ENCODER_STREAM_LOCK (self);
if (acq_ret == GST_OMX_ACQUIRE_BUFFER_ERROR) {
goto component_error;
@ -1280,7 +1290,7 @@ gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * encoder,
}
/* Now handle the frame */
if (frame->force_keyframe) {
if (GST_VIDEO_CODEC_FRAME_IS_FORCE_KEYFRAME (frame)) {
OMX_ERRORTYPE err;
OMX_CONFIG_INTRAREFRESHVOPTYPE config;
@ -1294,37 +1304,34 @@ gst_omx_video_enc_handle_frame (GstBaseVideoEncoder * encoder,
if (err != OMX_ErrorNone)
GST_ERROR_OBJECT (self, "Failed to force a keyframe: %s (0x%08x)",
gst_omx_error_to_string (err), err);
frame->force_keyframe = FALSE;
}
/* Copy the buffer content in chunks of size as requested
* by the port */
if (!gst_omx_video_enc_fill_buffer (self, frame->sink_buffer, buf)) {
if (!gst_omx_video_enc_fill_buffer (self, frame->input_buffer, buf)) {
gst_omx_port_release_buffer (self->in_port, buf);
goto buffer_fill_error;
}
timestamp = frame->presentation_timestamp;
timestamp = frame->pts;
if (timestamp != GST_CLOCK_TIME_NONE) {
buf->omx_buf->nTimeStamp =
gst_util_uint64_scale (timestamp, OMX_TICKS_PER_SECOND, GST_SECOND);
self->last_upstream_ts = timestamp;
}
duration = frame->presentation_duration;
duration = frame->duration;
if (duration != GST_CLOCK_TIME_NONE) {
buf->omx_buf->nTickCount =
gst_util_uint64_scale (buf->omx_buf->nFilledLen, duration,
gst_buffer_get_size (frame->sink_buffer));
gst_buffer_get_size (frame->input_buffer));
self->last_upstream_ts += duration;
}
id = g_slice_new0 (BufferIdentification);
id->timestamp = buf->omx_buf->nTimeStamp;
frame->coder_hook = id;
frame->coder_hook_destroy_notify =
(GDestroyNotify) buffer_identification_free;
gst_video_codec_frame_set_user_data (frame, id,
(GDestroyNotify) buffer_identification_free);
self->started = TRUE;
gst_omx_port_release_buffer (self->in_port, buf);
@ -1369,66 +1376,17 @@ buffer_fill_error:
}
static GstFlowReturn
gst_omx_video_enc_finish (GstBaseVideoEncoder * encoder)
gst_omx_video_enc_finish (GstVideoEncoder * encoder)
{
GstOMXVideoEnc *self;
GstOMXVideoEncClass *klass;
GstOMXBuffer *buf;
GstOMXAcquireBufferReturn acq_ret;
self = GST_OMX_VIDEO_ENC (encoder);
klass = GST_OMX_VIDEO_ENC_GET_CLASS (self);
GST_DEBUG_OBJECT (self, "Sending EOS to the component");
/* Don't send EOS buffer twice, this doesn't work */
if (self->eos) {
GST_DEBUG_OBJECT (self, "Component is already EOS");
return GST_BASE_VIDEO_ENCODER_FLOW_DROPPED;
}
self->eos = TRUE;
if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) {
GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers");
/* Insert a NULL into the queue to signal EOS */
gst_omx_rec_mutex_lock (&self->out_port->port_lock);
g_queue_push_tail (self->out_port->pending_buffers, NULL);
g_cond_broadcast (self->out_port->port_cond);
gst_omx_rec_mutex_unlock (&self->out_port->port_lock);
return GST_BASE_VIDEO_ENCODER_FLOW_DROPPED;
}
/* Make sure to release the base class stream lock, otherwise
* _loop() can't call _finish_frame() and we might block forever
* because no input buffers are released */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
/* Send an EOS buffer to the component and let the base
* class drop the EOS event. We will send it later when
* the EOS buffer arrives on the output port. */
acq_ret = gst_omx_port_acquire_buffer (self->in_port, &buf);
if (acq_ret == GST_OMX_ACQUIRE_BUFFER_OK) {
buf->omx_buf->nFilledLen = 0;
buf->omx_buf->nTimeStamp =
gst_util_uint64_scale (self->last_upstream_ts, OMX_TICKS_PER_SECOND,
GST_SECOND);
buf->omx_buf->nTickCount = 0;
buf->omx_buf->nFlags |= OMX_BUFFERFLAG_EOS;
gst_omx_port_release_buffer (self->in_port, buf);
GST_DEBUG_OBJECT (self, "Sent EOS to the component");
} else {
GST_ERROR_OBJECT (self, "Failed to acquire buffer for EOS: %d", acq_ret);
}
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
return GST_BASE_VIDEO_ENCODER_FLOW_DROPPED;
return gst_omx_video_enc_drain (self, TRUE);
}
static GstFlowReturn
gst_omx_video_enc_drain (GstOMXVideoEnc * self)
gst_omx_video_enc_drain (GstOMXVideoEnc * self, gboolean at_eos)
{
GstOMXVideoEncClass *klass;
GstOMXBuffer *buf;
@ -1449,6 +1407,8 @@ gst_omx_video_enc_drain (GstOMXVideoEnc * self)
GST_DEBUG_OBJECT (self, "Component is EOS already");
return GST_FLOW_OK;
}
if (at_eos)
self->eos = TRUE;
if ((klass->cdata.hacks & GST_OMX_HACK_NO_EMPTY_EOS_BUFFER)) {
GST_WARNING_OBJECT (self, "Component does not support empty EOS buffers");
@ -1458,14 +1418,14 @@ gst_omx_video_enc_drain (GstOMXVideoEnc * self)
/* Make sure to release the base class stream lock, otherwise
* _loop() can't call _finish_frame() and we might block forever
* because no input buffers are released */
GST_BASE_VIDEO_CODEC_STREAM_UNLOCK (self);
GST_VIDEO_ENCODER_STREAM_UNLOCK (self);
/* Send an EOS buffer to the component and let the base
* class drop the EOS event. We will send it later when
* the EOS buffer arrives on the output port. */
acq_ret = gst_omx_port_acquire_buffer (self->in_port, &buf);
if (acq_ret != GST_OMX_ACQUIRE_BUFFER_OK) {
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_ENCODER_STREAM_LOCK (self);
GST_ERROR_OBJECT (self, "Failed to acquire buffer for draining: %d",
acq_ret);
return GST_FLOW_ERROR;
@ -1484,7 +1444,7 @@ gst_omx_video_enc_drain (GstOMXVideoEnc * self)
g_cond_wait (self->drain_cond, self->drain_lock);
GST_DEBUG_OBJECT (self, "Drained component");
g_mutex_unlock (self->drain_lock);
GST_BASE_VIDEO_CODEC_STREAM_LOCK (self);
GST_VIDEO_ENCODER_STREAM_LOCK (self);
self->started = FALSE;

View file

@ -22,7 +22,7 @@
#define __GST_OMX_VIDEO_ENC_H__
#include <gst/gst.h>
#include "gstbasevideoencoder.h"
#include <gst/video/gstvideoencoder.h>
#include "gstomx.h"
@ -46,7 +46,7 @@ typedef struct _GstOMXVideoEncClass GstOMXVideoEncClass;
struct _GstOMXVideoEnc
{
GstBaseVideoEncoder parent;
GstVideoEncoder parent;
/* < protected > */
@ -57,6 +57,7 @@ struct _GstOMXVideoEnc
GstOMXPort *in_port, *out_port;
/* < private > */
GstVideoCodecState *input_state;
/* TRUE if the component is configured and saw
* the first buffer */
gboolean started;
@ -84,13 +85,13 @@ struct _GstOMXVideoEnc
struct _GstOMXVideoEncClass
{
GstBaseVideoEncoderClass parent_class;
GstVideoEncoderClass parent_class;
GstOMXClassData cdata;
gboolean (*set_format) (GstOMXVideoEnc * self, GstOMXPort * port, GstVideoInfo * info );
GstCaps *(*get_caps) (GstOMXVideoEnc * self, GstOMXPort * port, GstVideoState * state);
GstFlowReturn (*handle_output_frame) (GstOMXVideoEnc * self, GstOMXPort * port, GstOMXBuffer * buffer, GstVideoFrameState * frame);
gboolean (*set_format) (GstOMXVideoEnc * self, GstOMXPort * port, GstVideoCodecState * state);
GstCaps *(*get_caps) (GstOMXVideoEnc * self, GstOMXPort * port, GstVideoCodecState * state);
GstFlowReturn (*handle_output_frame) (GstOMXVideoEnc * self, GstOMXPort * port, GstOMXBuffer * buffer, GstVideoCodecFrame * frame);
};
GType gst_omx_video_enc_get_type (void);

View file

@ -31,9 +31,9 @@ GST_DEBUG_CATEGORY_STATIC (gst_omx_wmv_dec_debug_category);
/* prototypes */
static gboolean gst_omx_wmv_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
static gboolean gst_omx_wmv_dec_set_format (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state);
GstOMXPort * port, GstVideoCodecState * state);
enum
{
@ -78,14 +78,14 @@ gst_omx_wmv_dec_init (GstOMXWMVDec * self)
static gboolean
gst_omx_wmv_dec_is_format_change (GstOMXVideoDec * dec,
GstOMXPort * port, GstVideoState * state)
GstOMXPort * port, GstVideoCodecState * state)
{
return FALSE;
}
static gboolean
gst_omx_wmv_dec_set_format (GstOMXVideoDec * dec, GstOMXPort * port,
GstVideoState * state)
GstVideoCodecState * state)
{
gboolean ret;
OMX_PARAM_PORTDEFINITIONTYPE port_def;